mirror of https://github.com/m-labs/artiq.git
artiq_run: refactor, support use from within experiments
You can always (under posix) use #!/usr/bin/env artiq_run as shebang for experiments and make them executable. Now, you can also do this (portable): if __name__ == "__main__": from artiq.frontend.artiq_run import run run() to make an experiment executable. The CLI options are all inherited. Also: * removed --elf: can be inferred from filename * did some refactoring and cleanup * use logging for all messages, except the result printing (use -v to get parameter changes and dummy scheduler actions)
This commit is contained in:
parent
43893c6c1d
commit
1a1afd5410
|
@ -5,32 +5,36 @@ import sys
|
|||
import time
|
||||
from operator import itemgetter
|
||||
from itertools import chain
|
||||
import logging
|
||||
|
||||
import h5py
|
||||
|
||||
from artiq.language.db import *
|
||||
from artiq.language.experiment import is_experiment
|
||||
from artiq.language.experiment import is_experiment, Experiment
|
||||
from artiq.protocols import pyon
|
||||
from artiq.protocols.file_db import FlatFileDB
|
||||
from artiq.master.worker_db import DBHub, ResultDB
|
||||
from artiq.tools import file_import, verbosity_args, init_logger
|
||||
|
||||
|
||||
class ELFRunner(AutoDB):
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ELFRunner(Experiment, AutoDB):
|
||||
class DBKeys:
|
||||
comm = Device()
|
||||
file = Argument()
|
||||
|
||||
def run(self, filename):
|
||||
with open(filename, "rb") as f:
|
||||
binary = f.read()
|
||||
comm.load(binary)
|
||||
comm.run("run")
|
||||
comm.serve(dict(), dict())
|
||||
def run(self):
|
||||
with open(self.file, "rb") as f:
|
||||
self.comm.load(f.read())
|
||||
self.comm.run("run")
|
||||
self.comm.serve(dict(), dict())
|
||||
|
||||
|
||||
class SimpleParamLogger:
|
||||
def set(self, timestamp, name, value):
|
||||
print("Parameter change: {} -> {}".format(name, value))
|
||||
logger.info("Parameter change: {} = {}".format(name, value))
|
||||
|
||||
|
||||
class DummyWatchdog:
|
||||
|
@ -52,26 +56,26 @@ class DummyScheduler:
|
|||
def run_queued(self, run_params):
|
||||
rid = self.next_rid
|
||||
self.next_rid += 1
|
||||
print("Queuing: {}, RID={}".format(run_params, rid))
|
||||
logger.info("Queuing: %s, RID=%s", run_params, rid)
|
||||
return rid
|
||||
|
||||
def cancel_queued(self, rid):
|
||||
print("Cancelling RID {}".format(rid))
|
||||
logger.info("Cancelling RID %s", rid)
|
||||
|
||||
def run_timed(self, run_params, next_run):
|
||||
trid = self.next_trid
|
||||
self.next_trid += 1
|
||||
next_run_s = time.strftime("%m/%d %H:%M:%S", time.localtime(next_run))
|
||||
print("Timing: {} at {}, TRID={}".format(run_params, next_run_s, trid))
|
||||
logger.info("Timing: %s at %s, TRID=%s", run_params, next_run_s, trid)
|
||||
return trid
|
||||
|
||||
def cancel_timed(self, trid):
|
||||
print("Cancelling TRID {}".format(trid))
|
||||
logger.info("Cancelling TRID %s", trid)
|
||||
|
||||
watchdog = DummyWatchdog
|
||||
|
||||
|
||||
def get_argparser():
|
||||
def get_argparser(with_file):
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Local experiment running tool")
|
||||
|
||||
|
@ -81,15 +85,14 @@ def get_argparser():
|
|||
parser.add_argument("-p", "--pdb", default="pdb.pyon",
|
||||
help="parameter database file")
|
||||
|
||||
parser.add_argument("-E", "--elf", default=False, action="store_true",
|
||||
help="run ELF binary")
|
||||
parser.add_argument("-e", "--experiment", default=None,
|
||||
help="experiment to run")
|
||||
parser.add_argument("-o", "--hdf5", default=None,
|
||||
help="write results to specified HDF5 file"
|
||||
" (default: print them)")
|
||||
parser.add_argument("file",
|
||||
help="file containing the experiment to run")
|
||||
if with_file:
|
||||
parser.add_argument("file",
|
||||
help="file containing the experiment to run")
|
||||
parser.add_argument("arguments", nargs="*",
|
||||
help="run arguments")
|
||||
|
||||
|
@ -99,86 +102,77 @@ def get_argparser():
|
|||
def _parse_arguments(arguments):
|
||||
d = {}
|
||||
for argument in arguments:
|
||||
name, value = argument.split("=")
|
||||
name, eq, value = argument.partition("=")
|
||||
d[name] = pyon.decode(value)
|
||||
return d
|
||||
|
||||
|
||||
def main():
|
||||
args = get_argparser().parse_args()
|
||||
def _get_experiment(module, experiment=None):
|
||||
if experiment:
|
||||
return getattr(module, experiment)
|
||||
|
||||
exps = [(k, v) for k, v in module.__dict__.items()
|
||||
if is_experiment(v)]
|
||||
if not exps:
|
||||
logger.error("No experiments in module")
|
||||
if len(exps) > 1:
|
||||
logger.warning("Multiple experiments (%s), using first",
|
||||
", ".join(k for (k, v) in exps))
|
||||
return exps[0][1]
|
||||
|
||||
|
||||
def _build_experiment(dbh, args):
|
||||
if hasattr(args, "file"):
|
||||
if args.file.endswith(".elf"):
|
||||
if args.arguments:
|
||||
raise ValueError("arguments not supported for ELF kernels")
|
||||
if args.experiment:
|
||||
raise ValueError("experiment-by-name not supported "
|
||||
"for ELF kernels")
|
||||
return ELFRunner(dbh, file=args.file)
|
||||
else:
|
||||
module = file_import(args.file)
|
||||
file = args.file
|
||||
else:
|
||||
module = sys.modules["__main__"]
|
||||
file = getattr(module, "__file__")
|
||||
exp = _get_experiment(module, args.experiment)
|
||||
arguments = _parse_arguments(args.arguments)
|
||||
return exp(dbh,
|
||||
scheduler=DummyScheduler(),
|
||||
run_params=dict(file=file,
|
||||
experiment=args.experiment,
|
||||
arguments=arguments),
|
||||
**arguments)
|
||||
|
||||
|
||||
def run(with_file=False):
|
||||
args = get_argparser(with_file).parse_args()
|
||||
init_logger(args)
|
||||
|
||||
ddb = FlatFileDB(args.ddb)
|
||||
pdb = FlatFileDB(args.pdb)
|
||||
pdb.hooks.append(SimpleParamLogger())
|
||||
rdb = ResultDB(lambda description: None, lambda mod: None)
|
||||
dbh = DBHub(ddb, pdb, rdb)
|
||||
try:
|
||||
if args.elf:
|
||||
if args.arguments:
|
||||
print("Run arguments are not supported in ELF mode")
|
||||
sys.exit(1)
|
||||
exp_inst = ELFRunner(dbh)
|
||||
rdb.build()
|
||||
exp_inst.run(args.file)
|
||||
else:
|
||||
module = file_import(args.file)
|
||||
if args.experiment is None:
|
||||
exps = [(k, v) for k, v in module.__dict__.items()
|
||||
if is_experiment(v)]
|
||||
l = len(exps)
|
||||
if l == 0:
|
||||
print("No experiments found in module")
|
||||
sys.exit(1)
|
||||
elif l > 1:
|
||||
print("More than one experiment found in module:")
|
||||
for k, v in sorted(experiments, key=itemgetter(0)):
|
||||
if v.__doc__ is None:
|
||||
print(" {}".format(k))
|
||||
else:
|
||||
print(" {} ({})".format(
|
||||
k, v.__doc__.splitlines()[0].strip()))
|
||||
print("Use -u to specify which experiment to use.")
|
||||
sys.exit(1)
|
||||
else:
|
||||
exp = exps[0][1]
|
||||
else:
|
||||
exp = getattr(module, args.experiment)
|
||||
|
||||
try:
|
||||
arguments = _parse_arguments(args.arguments)
|
||||
except:
|
||||
print("Failed to parse run arguments")
|
||||
sys.exit(1)
|
||||
with DBHub(ddb, pdb, rdb) as dbh:
|
||||
exp_inst = _build_experiment(dbh, args)
|
||||
rdb.build()
|
||||
exp_inst.run()
|
||||
exp_inst.analyze()
|
||||
|
||||
run_params = {
|
||||
"file": args.file,
|
||||
"experiment": args.experiment,
|
||||
"arguments": arguments
|
||||
}
|
||||
exp_inst = exp(dbh,
|
||||
scheduler=DummyScheduler(),
|
||||
run_params=run_params,
|
||||
**run_params["arguments"])
|
||||
rdb.build()
|
||||
exp_inst.run()
|
||||
exp_inst.analyze()
|
||||
if args.hdf5 is not None:
|
||||
with h5py.File(args.hdf5, "w") as f:
|
||||
rdb.write_hdf5(f)
|
||||
elif rdb.data.read or rdb.realtime_data.read:
|
||||
r = chain(rdb.realtime_data.read.items(), rdb.data.read.items())
|
||||
for k, v in sorted(r, key=itemgetter(0)):
|
||||
print("{}: {}".format(k, v))
|
||||
|
||||
|
||||
def main():
|
||||
return run(with_file=True)
|
||||
|
||||
if args.hdf5 is not None:
|
||||
f = h5py.File(args.hdf5, "w")
|
||||
try:
|
||||
rdb.write_hdf5(f)
|
||||
finally:
|
||||
f.close()
|
||||
else:
|
||||
if rdb.data.read or rdb.realtime_data.read:
|
||||
print("Results:")
|
||||
for k, v in sorted(chain(rdb.realtime_data.read.items(),
|
||||
rdb.data.read.items()),
|
||||
key=itemgetter(0)):
|
||||
print("{}: {}".format(k, v))
|
||||
finally:
|
||||
dbh.close_devices()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
Loading…
Reference in New Issue