2014-12-03 18:20:30 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import sys
|
2015-02-20 03:09:37 +08:00
|
|
|
import time
|
2014-12-03 18:20:30 +08:00
|
|
|
from operator import itemgetter
|
2015-01-13 19:12:19 +08:00
|
|
|
from itertools import chain
|
2015-04-05 10:42:37 +08:00
|
|
|
import logging
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-02-04 18:46:55 +08:00
|
|
|
import h5py
|
|
|
|
|
2015-01-12 18:51:23 +08:00
|
|
|
from artiq.language.db import *
|
2015-04-07 13:04:47 +08:00
|
|
|
from artiq.language.experiment import Experiment
|
2015-01-17 19:38:20 +08:00
|
|
|
from artiq.protocols.file_db import FlatFileDB
|
2015-02-22 11:09:46 +08:00
|
|
|
from artiq.master.worker_db import DBHub, ResultDB
|
2015-04-07 13:04:47 +08:00
|
|
|
from artiq.tools import *
|
2014-12-03 18:20:30 +08:00
|
|
|
|
|
|
|
|
2015-04-05 10:42:37 +08:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class ELFRunner(Experiment, AutoDB):
|
2015-01-12 18:51:23 +08:00
|
|
|
class DBKeys:
|
2015-04-30 10:16:24 +08:00
|
|
|
core = Device()
|
2015-04-05 10:42:37 +08:00
|
|
|
file = Argument()
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-04-05 10:42:37 +08:00
|
|
|
def run(self):
|
|
|
|
with open(self.file, "rb") as f:
|
2015-04-30 10:16:24 +08:00
|
|
|
self.core.comm.load(f.read())
|
2015-05-02 23:41:49 +08:00
|
|
|
self.core.comm.run("run", False)
|
2015-04-30 10:16:24 +08:00
|
|
|
self.core.comm.serve(dict(), dict())
|
2014-12-03 18:20:30 +08:00
|
|
|
|
|
|
|
|
2015-01-12 18:51:23 +08:00
|
|
|
class SimpleParamLogger:
|
|
|
|
def set(self, timestamp, name, value):
|
2015-04-05 10:42:37 +08:00
|
|
|
logger.info("Parameter change: {} = {}".format(name, value))
|
2015-01-12 18:51:23 +08:00
|
|
|
|
|
|
|
|
2015-02-20 03:09:37 +08:00
|
|
|
class DummyScheduler:
|
2015-05-28 17:21:20 +08:00
|
|
|
def __init__(self, expid):
|
2015-02-20 03:09:37 +08:00
|
|
|
self.next_rid = 0
|
|
|
|
self.next_trid = 0
|
2015-05-28 17:21:20 +08:00
|
|
|
self.pipeline_name = "main"
|
|
|
|
self.priority = 0
|
|
|
|
self.expid = expid
|
2015-02-20 03:09:37 +08:00
|
|
|
|
2015-05-28 17:21:20 +08:00
|
|
|
def submit(self, pipeline_name, expid, priority, due_date, flush):
|
2015-02-20 03:09:37 +08:00
|
|
|
rid = self.next_rid
|
|
|
|
self.next_rid += 1
|
2015-05-28 17:21:20 +08:00
|
|
|
logger.info("Submitting: %s, RID=%s", expid, rid)
|
2015-02-20 03:09:37 +08:00
|
|
|
return rid
|
|
|
|
|
2015-05-28 17:21:20 +08:00
|
|
|
def delete(self, rid):
|
|
|
|
logger.info("Deleting RID %s", rid)
|
2015-02-20 03:09:37 +08:00
|
|
|
|
|
|
|
|
2015-04-05 11:17:24 +08:00
|
|
|
def get_argparser(with_file=True):
|
2015-01-12 18:51:23 +08:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Local experiment running tool")
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-02-04 19:09:54 +08:00
|
|
|
verbosity_args(parser)
|
2014-12-03 18:20:30 +08:00
|
|
|
parser.add_argument("-d", "--ddb", default="ddb.pyon",
|
|
|
|
help="device database file")
|
|
|
|
parser.add_argument("-p", "--pdb", default="pdb.pyon",
|
|
|
|
help="parameter database file")
|
|
|
|
|
2015-03-08 22:43:04 +08:00
|
|
|
parser.add_argument("-e", "--experiment", default=None,
|
|
|
|
help="experiment to run")
|
2015-02-04 18:46:55 +08:00
|
|
|
parser.add_argument("-o", "--hdf5", default=None,
|
|
|
|
help="write results to specified HDF5 file"
|
|
|
|
" (default: print them)")
|
2015-04-05 10:42:37 +08:00
|
|
|
if with_file:
|
|
|
|
parser.add_argument("file",
|
|
|
|
help="file containing the experiment to run")
|
2015-01-07 19:21:17 +08:00
|
|
|
parser.add_argument("arguments", nargs="*",
|
|
|
|
help="run arguments")
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-01-23 00:52:13 +08:00
|
|
|
return parser
|
2014-12-03 18:20:30 +08:00
|
|
|
|
|
|
|
|
2015-04-05 10:42:37 +08:00
|
|
|
def _build_experiment(dbh, args):
|
|
|
|
if hasattr(args, "file"):
|
|
|
|
if args.file.endswith(".elf"):
|
|
|
|
if args.arguments:
|
|
|
|
raise ValueError("arguments not supported for ELF kernels")
|
|
|
|
if args.experiment:
|
|
|
|
raise ValueError("experiment-by-name not supported "
|
|
|
|
"for ELF kernels")
|
|
|
|
return ELFRunner(dbh, file=args.file)
|
|
|
|
else:
|
|
|
|
module = file_import(args.file)
|
|
|
|
file = args.file
|
|
|
|
else:
|
|
|
|
module = sys.modules["__main__"]
|
|
|
|
file = getattr(module, "__file__")
|
2015-04-07 13:04:47 +08:00
|
|
|
exp = get_experiment(module, args.experiment)
|
|
|
|
arguments = parse_arguments(args.arguments)
|
2015-05-28 17:21:20 +08:00
|
|
|
expid = {
|
|
|
|
"file": file,
|
|
|
|
"experiment": args.experiment,
|
|
|
|
"arguments": arguments
|
|
|
|
}
|
2015-04-05 10:42:37 +08:00
|
|
|
return exp(dbh,
|
2015-05-28 17:21:20 +08:00
|
|
|
scheduler=DummyScheduler(expid),
|
2015-04-05 10:42:37 +08:00
|
|
|
**arguments)
|
|
|
|
|
|
|
|
|
|
|
|
def run(with_file=False):
|
|
|
|
args = get_argparser(with_file).parse_args()
|
2015-02-04 19:09:54 +08:00
|
|
|
init_logger(args)
|
2014-12-03 18:20:30 +08:00
|
|
|
|
2015-01-12 18:51:23 +08:00
|
|
|
ddb = FlatFileDB(args.ddb)
|
|
|
|
pdb = FlatFileDB(args.pdb)
|
|
|
|
pdb.hooks.append(SimpleParamLogger())
|
2015-03-09 00:27:27 +08:00
|
|
|
rdb = ResultDB(lambda description: None, lambda mod: None)
|
2015-04-05 17:49:41 +08:00
|
|
|
dbh = DBHub(ddb, pdb, rdb)
|
2015-04-05 10:42:37 +08:00
|
|
|
|
2015-04-05 17:49:41 +08:00
|
|
|
try:
|
2015-04-05 10:42:37 +08:00
|
|
|
exp_inst = _build_experiment(dbh, args)
|
|
|
|
rdb.build()
|
|
|
|
exp_inst.run()
|
|
|
|
exp_inst.analyze()
|
2015-04-05 17:49:41 +08:00
|
|
|
finally:
|
|
|
|
dbh.close_devices()
|
2015-04-05 10:42:37 +08:00
|
|
|
|
|
|
|
if args.hdf5 is not None:
|
|
|
|
with h5py.File(args.hdf5, "w") as f:
|
|
|
|
rdb.write_hdf5(f)
|
|
|
|
elif rdb.data.read or rdb.realtime_data.read:
|
|
|
|
r = chain(rdb.realtime_data.read.items(), rdb.data.read.items())
|
|
|
|
for k, v in sorted(r, key=itemgetter(0)):
|
|
|
|
print("{}: {}".format(k, v))
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
return run(with_file=True)
|
|
|
|
|
2014-12-03 18:20:30 +08:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|