forked from M-Labs/artiq
master/language: add methods to set experiment pipeline/priority/flush defaults
This commit is contained in:
parent
b2177eff81
commit
8659c769cb
|
@ -207,10 +207,12 @@ class HasEnvironment:
|
||||||
self.__device_mgr = managers_or_parent[0]
|
self.__device_mgr = managers_or_parent[0]
|
||||||
self.__dataset_mgr = managers_or_parent[1]
|
self.__dataset_mgr = managers_or_parent[1]
|
||||||
self.__argument_mgr = managers_or_parent[2]
|
self.__argument_mgr = managers_or_parent[2]
|
||||||
|
self.__scheduler_defaults = managers_or_parent[3]
|
||||||
else:
|
else:
|
||||||
self.__device_mgr = managers_or_parent.__device_mgr
|
self.__device_mgr = managers_or_parent.__device_mgr
|
||||||
self.__dataset_mgr = managers_or_parent.__dataset_mgr
|
self.__dataset_mgr = managers_or_parent.__dataset_mgr
|
||||||
self.__argument_mgr = managers_or_parent.__argument_mgr
|
self.__argument_mgr = managers_or_parent.__argument_mgr
|
||||||
|
self.__scheduler_defaults = {}
|
||||||
managers_or_parent.register_child(self)
|
managers_or_parent.register_child(self)
|
||||||
|
|
||||||
self.__in_build = True
|
self.__in_build = True
|
||||||
|
@ -364,6 +366,21 @@ class HasEnvironment:
|
||||||
dataset and of the attribute are the same."""
|
dataset and of the attribute are the same."""
|
||||||
setattr(self, key, self.get_dataset(key, default, archive))
|
setattr(self, key, self.get_dataset(key, default, archive))
|
||||||
|
|
||||||
|
def set_default_scheduling(self, priority=None, pipeline_name=None, flush=None):
|
||||||
|
"""Sets the default scheduling options.
|
||||||
|
|
||||||
|
This function should only be called from ``build``."""
|
||||||
|
if not self.__in_build:
|
||||||
|
raise TypeError("set_default_scheduling() should only "
|
||||||
|
"be called from build()")
|
||||||
|
|
||||||
|
if priority is not None:
|
||||||
|
self.__scheduler_defaults["priority"] = int(priority)
|
||||||
|
if pipeline_name is not None:
|
||||||
|
self.__scheduler_defaults["pipeline_name"] = pipeline_name
|
||||||
|
if flush is not None:
|
||||||
|
self.__scheduler_defaults["flush"] = flush
|
||||||
|
|
||||||
|
|
||||||
class Experiment:
|
class Experiment:
|
||||||
"""Base class for top-level experiments.
|
"""Base class for top-level experiments.
|
||||||
|
|
|
@ -46,7 +46,8 @@ class _RepoScanner:
|
||||||
entry = {
|
entry = {
|
||||||
"file": filename,
|
"file": filename,
|
||||||
"class_name": class_name,
|
"class_name": class_name,
|
||||||
"arginfo": arginfo
|
"arginfo": arginfo,
|
||||||
|
"scheduler_defaults": class_desc["scheduler_defaults"]
|
||||||
}
|
}
|
||||||
entry_dict[name] = entry
|
entry_dict[name] = entry
|
||||||
|
|
||||||
|
@ -115,7 +116,6 @@ class ExperimentDB:
|
||||||
t1 = time.monotonic()
|
t1 = time.monotonic()
|
||||||
new_explist = await _RepoScanner(self.worker_handlers).scan(wd)
|
new_explist = await _RepoScanner(self.worker_handlers).scan(wd)
|
||||||
logger.info("repository scan took %d seconds", time.monotonic()-t1)
|
logger.info("repository scan took %d seconds", time.monotonic()-t1)
|
||||||
|
|
||||||
update_from_dict(self.explist, new_explist)
|
update_from_dict(self.explist, new_explist)
|
||||||
finally:
|
finally:
|
||||||
self._scanning = False
|
self._scanning = False
|
||||||
|
|
|
@ -303,8 +303,8 @@ class Worker:
|
||||||
await self._create_process(logging.WARNING)
|
await self._create_process(logging.WARNING)
|
||||||
r = dict()
|
r = dict()
|
||||||
|
|
||||||
def register(class_name, name, arginfo):
|
def register(class_name, name, arginfo, scheduler_defaults):
|
||||||
r[class_name] = {"name": name, "arginfo": arginfo}
|
r[class_name] = {"name": name, "arginfo": arginfo, "scheduler_defaults": scheduler_defaults}
|
||||||
self.register_experiment = register
|
self.register_experiment = register
|
||||||
await self._worker_action({"action": "examine", "file": file},
|
await self._worker_action({"action": "examine", "file": file},
|
||||||
timeout)
|
timeout)
|
||||||
|
|
|
@ -174,11 +174,12 @@ def examine(device_mgr, dataset_mgr, file):
|
||||||
if name[-1] == ".":
|
if name[-1] == ".":
|
||||||
name = name[:-1]
|
name = name[:-1]
|
||||||
argument_mgr = TraceArgumentManager()
|
argument_mgr = TraceArgumentManager()
|
||||||
exp_class((device_mgr, dataset_mgr, argument_mgr))
|
scheduler_defaults = {}
|
||||||
|
cls = exp_class((device_mgr, dataset_mgr, argument_mgr, scheduler_defaults))
|
||||||
arginfo = OrderedDict(
|
arginfo = OrderedDict(
|
||||||
(k, (proc.describe(), group, tooltip))
|
(k, (proc.describe(), group, tooltip))
|
||||||
for k, (proc, group, tooltip) in argument_mgr.requested_args.items())
|
for k, (proc, group, tooltip) in argument_mgr.requested_args.items())
|
||||||
register_experiment(class_name, name, arginfo)
|
register_experiment(class_name, name, arginfo, scheduler_defaults)
|
||||||
finally:
|
finally:
|
||||||
new_keys = set(sys.modules.keys())
|
new_keys = set(sys.modules.keys())
|
||||||
for key in new_keys - previous_keys:
|
for key in new_keys - previous_keys:
|
||||||
|
@ -277,7 +278,7 @@ def main():
|
||||||
os.makedirs(dirname, exist_ok=True)
|
os.makedirs(dirname, exist_ok=True)
|
||||||
os.chdir(dirname)
|
os.chdir(dirname)
|
||||||
argument_mgr = ProcessArgumentManager(expid["arguments"])
|
argument_mgr = ProcessArgumentManager(expid["arguments"])
|
||||||
exp_inst = exp((device_mgr, dataset_mgr, argument_mgr))
|
exp_inst = exp((device_mgr, dataset_mgr, argument_mgr, {}))
|
||||||
put_object({"action": "completed"})
|
put_object({"action": "completed"})
|
||||||
elif action == "prepare":
|
elif action == "prepare":
|
||||||
exp_inst.prepare()
|
exp_inst.prepare()
|
||||||
|
|
Loading…
Reference in New Issue