2014-12-03 18:20:30 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import sys
|
2015-02-20 03:09:37 +08:00
|
|
|
import time
|
2014-12-03 18:20:30 +08:00
|
|
|
from inspect import isclass
|
|
|
|
from operator import itemgetter
|
2015-01-13 19:12:19 +08:00
|
|
|
from itertools import chain
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-02-04 18:46:55 +08:00
|
|
|
import h5py
|
|
|
|
|
2015-01-12 18:51:23 +08:00
|
|
|
from artiq.language.db import *
|
2015-01-17 19:38:20 +08:00
|
|
|
from artiq.protocols import pyon
|
|
|
|
from artiq.protocols.file_db import FlatFileDB
|
2015-02-22 11:09:46 +08:00
|
|
|
from artiq.master.worker_db import DBHub, ResultDB
|
2015-02-04 19:09:54 +08:00
|
|
|
from artiq.tools import file_import, verbosity_args, init_logger
|
2014-12-03 18:20:30 +08:00
|
|
|
|
|
|
|
|
2015-01-12 18:51:23 +08:00
|
|
|
class ELFRunner(AutoDB):
|
|
|
|
class DBKeys:
|
|
|
|
comm = Device()
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-01-07 19:21:17 +08:00
|
|
|
def run(self, filename):
|
2014-12-03 18:20:30 +08:00
|
|
|
with open(filename, "rb") as f:
|
|
|
|
binary = f.read()
|
|
|
|
comm.load(binary)
|
2015-01-07 19:21:17 +08:00
|
|
|
comm.run("run")
|
2014-12-03 18:20:30 +08:00
|
|
|
comm.serve(dict(), dict())
|
|
|
|
|
|
|
|
|
2015-01-12 18:51:23 +08:00
|
|
|
class SimpleParamLogger:
|
|
|
|
def set(self, timestamp, name, value):
|
|
|
|
print("Parameter change: {} -> {}".format(name, value))
|
|
|
|
|
|
|
|
|
2015-02-20 03:09:37 +08:00
|
|
|
class DummyScheduler:
|
|
|
|
def __init__(self):
|
|
|
|
self.next_rid = 0
|
|
|
|
self.next_trid = 0
|
|
|
|
|
2015-02-20 10:50:52 +08:00
|
|
|
def run_queued(self, run_params):
|
2015-02-20 03:09:37 +08:00
|
|
|
rid = self.next_rid
|
|
|
|
self.next_rid += 1
|
|
|
|
print("Queuing: {}, RID={}".format(run_params, rid))
|
|
|
|
return rid
|
|
|
|
|
|
|
|
def cancel_queued(self, rid):
|
|
|
|
print("Cancelling RID {}".format(rid))
|
|
|
|
|
2015-02-20 10:50:52 +08:00
|
|
|
def run_timed(self, run_params, next_run):
|
2015-02-20 03:09:37 +08:00
|
|
|
trid = self.next_trid
|
|
|
|
self.next_trid += 1
|
|
|
|
next_run_s = time.strftime("%m/%d %H:%M:%S", time.localtime(next_run))
|
|
|
|
print("Timing: {} at {}, TRID={}".format(run_params, next_run_s, trid))
|
|
|
|
return trid
|
|
|
|
|
|
|
|
def cancel_timed(self, trid):
|
|
|
|
print("Cancelling TRID {}".format(trid))
|
|
|
|
|
|
|
|
|
2015-01-23 00:52:13 +08:00
|
|
|
def get_argparser():
|
2015-01-12 18:51:23 +08:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Local experiment running tool")
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-02-04 19:09:54 +08:00
|
|
|
verbosity_args(parser)
|
2014-12-03 18:20:30 +08:00
|
|
|
parser.add_argument("-d", "--ddb", default="ddb.pyon",
|
|
|
|
help="device database file")
|
|
|
|
parser.add_argument("-p", "--pdb", default="pdb.pyon",
|
|
|
|
help="parameter database file")
|
|
|
|
|
|
|
|
parser.add_argument("-e", "--elf", default=False, action="store_true",
|
|
|
|
help="run ELF binary")
|
|
|
|
parser.add_argument("-u", "--unit", default=None,
|
|
|
|
help="unit to run")
|
2015-02-04 18:46:55 +08:00
|
|
|
parser.add_argument("-o", "--hdf5", default=None,
|
|
|
|
help="write results to specified HDF5 file"
|
|
|
|
" (default: print them)")
|
2014-12-08 16:11:31 +08:00
|
|
|
parser.add_argument("file",
|
|
|
|
help="file containing the unit to run")
|
2015-01-07 19:21:17 +08:00
|
|
|
parser.add_argument("arguments", nargs="*",
|
|
|
|
help="run arguments")
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-01-23 00:52:13 +08:00
|
|
|
return parser
|
2014-12-03 18:20:30 +08:00
|
|
|
|
|
|
|
|
2015-01-07 19:21:17 +08:00
|
|
|
def _parse_arguments(arguments):
|
|
|
|
d = {}
|
|
|
|
for argument in arguments:
|
|
|
|
name, value = argument.split("=")
|
|
|
|
d[name] = pyon.decode(value)
|
|
|
|
return d
|
|
|
|
|
|
|
|
|
2014-12-03 18:20:30 +08:00
|
|
|
def main():
|
2015-01-23 00:52:13 +08:00
|
|
|
args = get_argparser().parse_args()
|
2015-02-04 19:09:54 +08:00
|
|
|
init_logger(args)
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-01-12 18:51:23 +08:00
|
|
|
ddb = FlatFileDB(args.ddb)
|
|
|
|
pdb = FlatFileDB(args.pdb)
|
|
|
|
pdb.hooks.append(SimpleParamLogger())
|
2015-01-13 19:12:19 +08:00
|
|
|
rdb = ResultDB(set())
|
2015-01-12 18:51:23 +08:00
|
|
|
dbh = DBHub(ddb, pdb, rdb)
|
2014-12-03 18:20:30 +08:00
|
|
|
try:
|
|
|
|
if args.elf:
|
2015-01-07 19:21:17 +08:00
|
|
|
if args.arguments:
|
|
|
|
print("Run arguments are not supported in ELF mode")
|
|
|
|
sys.exit(1)
|
|
|
|
unit_inst = ELFRunner(dps)
|
|
|
|
unit_inst.run(args.file)
|
2014-12-03 18:20:30 +08:00
|
|
|
else:
|
2014-12-08 16:11:31 +08:00
|
|
|
module = file_import(args.file)
|
2014-12-03 18:20:30 +08:00
|
|
|
if args.unit is None:
|
|
|
|
units = [(k, v) for k, v in module.__dict__.items()
|
2015-02-22 11:26:58 +08:00
|
|
|
if isclass(v) and hasattr(v, "__artiq_unit__")]
|
2014-12-03 18:20:30 +08:00
|
|
|
l = len(units)
|
|
|
|
if l == 0:
|
|
|
|
print("No units found in module")
|
|
|
|
sys.exit(1)
|
|
|
|
elif l > 1:
|
|
|
|
print("More than one unit found in module:")
|
|
|
|
for k, v in sorted(units, key=itemgetter(0)):
|
2015-02-21 05:01:34 +08:00
|
|
|
print(" {} ({})".format(k, v.__artiq_unit__))
|
2014-12-03 18:20:30 +08:00
|
|
|
print("Use -u to specify which unit to use.")
|
|
|
|
sys.exit(1)
|
|
|
|
else:
|
|
|
|
unit = units[0][1]
|
|
|
|
else:
|
|
|
|
unit = getattr(module, args.unit)
|
2015-01-07 19:21:17 +08:00
|
|
|
|
|
|
|
try:
|
|
|
|
arguments = _parse_arguments(args.arguments)
|
|
|
|
except:
|
|
|
|
print("Failed to parse run arguments")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2015-02-20 11:05:12 +08:00
|
|
|
run_params = {
|
|
|
|
"file": args.file,
|
|
|
|
"unit": args.unit,
|
|
|
|
"timeout": None,
|
|
|
|
"arguments": arguments
|
|
|
|
}
|
|
|
|
unit_inst = unit(dbh,
|
|
|
|
scheduler=DummyScheduler(),
|
|
|
|
run_params=run_params,
|
|
|
|
**run_params["arguments"])
|
2015-01-10 15:41:35 +08:00
|
|
|
unit_inst.run()
|
2015-02-20 00:40:15 +08:00
|
|
|
if hasattr(unit_inst, "analyze"):
|
|
|
|
unit_inst.analyze()
|
2015-01-07 19:21:17 +08:00
|
|
|
|
2015-02-04 18:46:55 +08:00
|
|
|
if args.hdf5 is not None:
|
|
|
|
f = h5py.File(args.hdf5, "w")
|
|
|
|
try:
|
|
|
|
rdb.write_hdf5(f)
|
|
|
|
finally:
|
|
|
|
f.close()
|
|
|
|
else:
|
|
|
|
if rdb.data.read or rdb.realtime_data.read:
|
|
|
|
print("Results:")
|
2015-02-18 07:12:21 +08:00
|
|
|
for k, v in sorted(chain(rdb.realtime_data.read.items(),
|
|
|
|
rdb.data.read.items()),
|
|
|
|
key=itemgetter(0)):
|
2015-02-04 18:46:55 +08:00
|
|
|
print("{}: {}".format(k, v))
|
2014-12-03 18:20:30 +08:00
|
|
|
finally:
|
2015-01-12 18:51:23 +08:00
|
|
|
dbh.close()
|
2014-12-03 18:20:30 +08:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|