forked from M-Labs/artiq
Merge branch 'master' into nk/phaser-servo-clean
* master: (25 commits)
flake: update rpi-1 host key
aqctl_moninj_proxy: clear listeners on disconnect
Add method to check if termination is requested (#811, #1932)
moninj: fix underflows by order of operation fix channel toggle
moninj: fix underflows for urukul freq set
Urukul monitoring (#1142, #1921)
moninj: make receive_task private again
moninj,corelog: fix/cleanup exception handling (#1897)
aqctl_corelog: enable keepalive, terminate on connection failure
Modify log for matching the style
Add log message when dashboard connected to proxy
Public receive_task for the use in proxy
applets.simple: Actually forward dataset_prefixes when using IPC
master: Fixup 32db6ff978
(argument_ui support)
Revert "add pull.yml (#1918)"
add pull.yml (#1918)
Allow experiments to specify a custom argument editor UI (#1916)
dashboard: Add submit/close hooks for custom argument editors
dashboard: Plumb through datasets client to ExperimentManager
dashboard: Add cmdline option to load plugins on startup
...
This commit is contained in:
commit
d2dacc6433
|
@ -45,6 +45,7 @@ Highlights:
|
||||||
switch is supported.
|
switch is supported.
|
||||||
* The "ip" config option can now be set to "use_dhcp" in order to use DHCP to obtain an IP address.
|
* The "ip" config option can now be set to "use_dhcp" in order to use DHCP to obtain an IP address.
|
||||||
DHCP will also be used if no "ip" config option is set.
|
DHCP will also be used if no "ip" config option is set.
|
||||||
|
* Urukul monitoring and frequency setting (through dashboard) is now supported.
|
||||||
|
|
||||||
Breaking changes:
|
Breaking changes:
|
||||||
|
|
||||||
|
|
|
@ -64,9 +64,10 @@ class AppletIPCClient(AsyncioChildComm):
|
||||||
exc_info=True)
|
exc_info=True)
|
||||||
self.close_cb()
|
self.close_cb()
|
||||||
|
|
||||||
def subscribe(self, datasets, init_cb, mod_cb):
|
def subscribe(self, datasets, init_cb, mod_cb, dataset_prefixes=[]):
|
||||||
self.write_pyon({"action": "subscribe",
|
self.write_pyon({"action": "subscribe",
|
||||||
"datasets": datasets})
|
"datasets": datasets,
|
||||||
|
"dataset_prefixes": dataset_prefixes})
|
||||||
self.init_cb = init_cb
|
self.init_cb = init_cb
|
||||||
self.mod_cb = mod_cb
|
self.mod_cb = mod_cb
|
||||||
asyncio.ensure_future(self.listen())
|
asyncio.ensure_future(self.listen())
|
||||||
|
@ -113,6 +114,9 @@ class SimpleApplet:
|
||||||
self.embed = os.getenv("ARTIQ_APPLET_EMBED")
|
self.embed = os.getenv("ARTIQ_APPLET_EMBED")
|
||||||
self.datasets = {getattr(self.args, arg.replace("-", "_"))
|
self.datasets = {getattr(self.args, arg.replace("-", "_"))
|
||||||
for arg in self.dataset_args}
|
for arg in self.dataset_args}
|
||||||
|
# Optional prefixes (dataset sub-trees) to match subscriptions against;
|
||||||
|
# currently only used by out-of-tree subclasses (ndscan).
|
||||||
|
self.dataset_prefixes = []
|
||||||
|
|
||||||
def qasync_init(self):
|
def qasync_init(self):
|
||||||
app = QtWidgets.QApplication([])
|
app = QtWidgets.QApplication([])
|
||||||
|
@ -162,6 +166,14 @@ class SimpleApplet:
|
||||||
self.data = data
|
self.data = data
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def is_dataset_subscribed(self, key):
|
||||||
|
if key in self.datasets:
|
||||||
|
return True
|
||||||
|
for prefix in self.dataset_prefixes:
|
||||||
|
if key.startswith(prefix):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def filter_mod(self, mod):
|
def filter_mod(self, mod):
|
||||||
if self.embed is not None:
|
if self.embed is not None:
|
||||||
# the parent already filters for us
|
# the parent already filters for us
|
||||||
|
@ -170,9 +182,9 @@ class SimpleApplet:
|
||||||
if mod["action"] == "init":
|
if mod["action"] == "init":
|
||||||
return True
|
return True
|
||||||
if mod["path"]:
|
if mod["path"]:
|
||||||
return mod["path"][0] in self.datasets
|
return self.is_dataset_subscribed(mod["path"][0])
|
||||||
elif mod["action"] in {"setitem", "delitem"}:
|
elif mod["action"] in {"setitem", "delitem"}:
|
||||||
return mod["key"] in self.datasets
|
return self.is_dataset_subscribed(mod["key"])
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -204,7 +216,8 @@ class SimpleApplet:
|
||||||
self.loop.run_until_complete(self.subscriber.connect(
|
self.loop.run_until_complete(self.subscriber.connect(
|
||||||
self.args.server, self.args.port))
|
self.args.server, self.args.port))
|
||||||
else:
|
else:
|
||||||
self.ipc.subscribe(self.datasets, self.sub_init, self.sub_mod)
|
self.ipc.subscribe(self.datasets, self.sub_init, self.sub_mod,
|
||||||
|
dataset_prefixes=self.dataset_prefixes)
|
||||||
|
|
||||||
def unsubscribe(self):
|
def unsubscribe(self):
|
||||||
if self.embed is None:
|
if self.embed is None:
|
||||||
|
|
|
@ -46,6 +46,9 @@ class CommMonInj:
|
||||||
del self._writer
|
del self._writer
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
def wait_terminate(self):
|
||||||
|
return self._receive_task
|
||||||
|
|
||||||
async def close(self):
|
async def close(self):
|
||||||
self.disconnect_cb = None
|
self.disconnect_cb = None
|
||||||
try:
|
try:
|
||||||
|
@ -91,6 +94,10 @@ class CommMonInj:
|
||||||
self.injection_status_cb(channel, override, value)
|
self.injection_status_cb(channel, override, value)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown packet type", ty)
|
raise ValueError("Unknown packet type", ty)
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
raise
|
||||||
|
except:
|
||||||
|
logger.error("Moninj connection terminating with exception", exc_info=True)
|
||||||
finally:
|
finally:
|
||||||
if self.disconnect_cb is not None:
|
if self.disconnect_cb is not None:
|
||||||
self.disconnect_cb()
|
self.disconnect_cb()
|
||||||
|
|
|
@ -164,7 +164,7 @@ class _ArgumentEditor(QtWidgets.QTreeWidget):
|
||||||
|
|
||||||
async def _recompute_argument(self, name):
|
async def _recompute_argument(self, name):
|
||||||
try:
|
try:
|
||||||
expdesc = await self.manager.compute_expdesc(self.expurl)
|
expdesc, _ = await self.manager.compute_expdesc(self.expurl)
|
||||||
except:
|
except:
|
||||||
logger.error("Could not recompute argument '%s' of '%s'",
|
logger.error("Could not recompute argument '%s' of '%s'",
|
||||||
name, self.expurl, exc_info=True)
|
name, self.expurl, exc_info=True)
|
||||||
|
@ -216,6 +216,15 @@ class _ArgumentEditor(QtWidgets.QTreeWidget):
|
||||||
pass
|
pass
|
||||||
self.verticalScrollBar().setValue(state["scroll"])
|
self.verticalScrollBar().setValue(state["scroll"])
|
||||||
|
|
||||||
|
# Hooks that allow user-supplied argument editors to react to imminent user
|
||||||
|
# actions. Here, we always keep the manager-stored submission arguments
|
||||||
|
# up-to-date, so no further action is required.
|
||||||
|
def about_to_submit(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def about_to_close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
||||||
|
|
||||||
|
@ -241,7 +250,8 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
self.manager = manager
|
self.manager = manager
|
||||||
self.expurl = expurl
|
self.expurl = expurl
|
||||||
|
|
||||||
self.argeditor = _ArgumentEditor(self.manager, self, self.expurl)
|
editor_class = self.manager.get_argument_editor_class(expurl)
|
||||||
|
self.argeditor = editor_class(self.manager, self, self.expurl)
|
||||||
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
|
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
|
||||||
self.layout.setRowStretch(0, 1)
|
self.layout.setRowStretch(0, 1)
|
||||||
|
|
||||||
|
@ -369,6 +379,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
self.hdf5_load_directory = os.path.expanduser("~")
|
self.hdf5_load_directory = os.path.expanduser("~")
|
||||||
|
|
||||||
def submit_clicked(self):
|
def submit_clicked(self):
|
||||||
|
self.argeditor.about_to_submit()
|
||||||
try:
|
try:
|
||||||
self.manager.submit(self.expurl)
|
self.manager.submit(self.expurl)
|
||||||
except:
|
except:
|
||||||
|
@ -391,7 +402,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
|
|
||||||
async def _recompute_arguments_task(self, overrides=dict()):
|
async def _recompute_arguments_task(self, overrides=dict()):
|
||||||
try:
|
try:
|
||||||
expdesc = await self.manager.compute_expdesc(self.expurl)
|
expdesc, ui_name = await self.manager.compute_expdesc(self.expurl)
|
||||||
except:
|
except:
|
||||||
logger.error("Could not recompute experiment description of '%s'",
|
logger.error("Could not recompute experiment description of '%s'",
|
||||||
self.expurl, exc_info=True)
|
self.expurl, exc_info=True)
|
||||||
|
@ -404,12 +415,13 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
arginfo[k][0]["default"].insert(0, v)
|
arginfo[k][0]["default"].insert(0, v)
|
||||||
else:
|
else:
|
||||||
arginfo[k][0]["default"] = v
|
arginfo[k][0]["default"] = v
|
||||||
self.manager.initialize_submission_arguments(self.expurl, arginfo)
|
self.manager.initialize_submission_arguments(self.expurl, arginfo, ui_name)
|
||||||
|
|
||||||
argeditor_state = self.argeditor.save_state()
|
argeditor_state = self.argeditor.save_state()
|
||||||
self.argeditor.deleteLater()
|
self.argeditor.deleteLater()
|
||||||
|
|
||||||
self.argeditor = _ArgumentEditor(self.manager, self, self.expurl)
|
editor_class = self.manager.get_argument_editor_class(self.expurl)
|
||||||
|
self.argeditor = editor_class(self.manager, self, self.expurl)
|
||||||
self.argeditor.restore_state(argeditor_state)
|
self.argeditor.restore_state(argeditor_state)
|
||||||
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
|
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
|
||||||
|
|
||||||
|
@ -422,7 +434,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
|
|
||||||
async def _recompute_sched_options_task(self):
|
async def _recompute_sched_options_task(self):
|
||||||
try:
|
try:
|
||||||
expdesc = await self.manager.compute_expdesc(self.expurl)
|
expdesc, _ = await self.manager.compute_expdesc(self.expurl)
|
||||||
except:
|
except:
|
||||||
logger.error("Could not recompute experiment description of '%s'",
|
logger.error("Could not recompute experiment description of '%s'",
|
||||||
self.expurl, exc_info=True)
|
self.expurl, exc_info=True)
|
||||||
|
@ -473,6 +485,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
await self._recompute_arguments_task(arguments)
|
await self._recompute_arguments_task(arguments)
|
||||||
|
|
||||||
def closeEvent(self, event):
|
def closeEvent(self, event):
|
||||||
|
self.argeditor.about_to_close()
|
||||||
self.sigClosed.emit()
|
self.sigClosed.emit()
|
||||||
QtWidgets.QMdiSubWindow.closeEvent(self, event)
|
QtWidgets.QMdiSubWindow.closeEvent(self, event)
|
||||||
|
|
||||||
|
@ -544,7 +557,13 @@ class _QuickOpenDialog(QtWidgets.QDialog):
|
||||||
|
|
||||||
|
|
||||||
class ExperimentManager:
|
class ExperimentManager:
|
||||||
def __init__(self, main_window,
|
#: Global registry for custom argument editor classes, indexed by the experiment
|
||||||
|
#: `argument_ui` string; can be populated by dashboard plugins such as ndscan.
|
||||||
|
#: If no handler for a requested UI name is found, the default built-in argument
|
||||||
|
#: editor will be used.
|
||||||
|
argument_ui_classes = dict()
|
||||||
|
|
||||||
|
def __init__(self, main_window, dataset_sub,
|
||||||
explist_sub, schedule_sub,
|
explist_sub, schedule_sub,
|
||||||
schedule_ctl, experiment_db_ctl):
|
schedule_ctl, experiment_db_ctl):
|
||||||
self.main_window = main_window
|
self.main_window = main_window
|
||||||
|
@ -555,7 +574,10 @@ class ExperimentManager:
|
||||||
self.submission_scheduling = dict()
|
self.submission_scheduling = dict()
|
||||||
self.submission_options = dict()
|
self.submission_options = dict()
|
||||||
self.submission_arguments = dict()
|
self.submission_arguments = dict()
|
||||||
|
self.argument_ui_names = dict()
|
||||||
|
|
||||||
|
self.datasets = dict()
|
||||||
|
dataset_sub.add_setmodel_callback(self.set_dataset_model)
|
||||||
self.explist = dict()
|
self.explist = dict()
|
||||||
explist_sub.add_setmodel_callback(self.set_explist_model)
|
explist_sub.add_setmodel_callback(self.set_explist_model)
|
||||||
self.schedule = dict()
|
self.schedule = dict()
|
||||||
|
@ -570,6 +592,9 @@ class ExperimentManager:
|
||||||
quick_open_shortcut.setContext(QtCore.Qt.ApplicationShortcut)
|
quick_open_shortcut.setContext(QtCore.Qt.ApplicationShortcut)
|
||||||
quick_open_shortcut.activated.connect(self.show_quick_open)
|
quick_open_shortcut.activated.connect(self.show_quick_open)
|
||||||
|
|
||||||
|
def set_dataset_model(self, model):
|
||||||
|
self.datasets = model
|
||||||
|
|
||||||
def set_explist_model(self, model):
|
def set_explist_model(self, model):
|
||||||
self.explist = model.backing_store
|
self.explist = model.backing_store
|
||||||
|
|
||||||
|
@ -586,6 +611,17 @@ class ExperimentManager:
|
||||||
else:
|
else:
|
||||||
raise ValueError("Malformed experiment URL")
|
raise ValueError("Malformed experiment URL")
|
||||||
|
|
||||||
|
def get_argument_editor_class(self, expurl):
|
||||||
|
ui_name = self.argument_ui_names.get(expurl, None)
|
||||||
|
if not ui_name and expurl[:5] == "repo:":
|
||||||
|
ui_name = self.explist.get(expurl[5:], {}).get("argument_ui", None)
|
||||||
|
if ui_name:
|
||||||
|
result = self.argument_ui_classes.get(ui_name, None)
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
logger.warning("Ignoring unknown argument UI '%s'", ui_name)
|
||||||
|
return _ArgumentEditor
|
||||||
|
|
||||||
def get_submission_scheduling(self, expurl):
|
def get_submission_scheduling(self, expurl):
|
||||||
if expurl in self.submission_scheduling:
|
if expurl in self.submission_scheduling:
|
||||||
return self.submission_scheduling[expurl]
|
return self.submission_scheduling[expurl]
|
||||||
|
@ -615,7 +651,7 @@ class ExperimentManager:
|
||||||
self.submission_options[expurl] = options
|
self.submission_options[expurl] = options
|
||||||
return options
|
return options
|
||||||
|
|
||||||
def initialize_submission_arguments(self, expurl, arginfo):
|
def initialize_submission_arguments(self, expurl, arginfo, ui_name):
|
||||||
arguments = OrderedDict()
|
arguments = OrderedDict()
|
||||||
for name, (procdesc, group, tooltip) in arginfo.items():
|
for name, (procdesc, group, tooltip) in arginfo.items():
|
||||||
state = procdesc_to_entry(procdesc).default_state(procdesc)
|
state = procdesc_to_entry(procdesc).default_state(procdesc)
|
||||||
|
@ -626,6 +662,7 @@ class ExperimentManager:
|
||||||
"state": state, # mutated by entries
|
"state": state, # mutated by entries
|
||||||
}
|
}
|
||||||
self.submission_arguments[expurl] = arguments
|
self.submission_arguments[expurl] = arguments
|
||||||
|
self.argument_ui_names[expurl] = ui_name
|
||||||
return arguments
|
return arguments
|
||||||
|
|
||||||
def get_submission_arguments(self, expurl):
|
def get_submission_arguments(self, expurl):
|
||||||
|
@ -635,9 +672,9 @@ class ExperimentManager:
|
||||||
if expurl[:5] != "repo:":
|
if expurl[:5] != "repo:":
|
||||||
raise ValueError("Submission arguments must be preinitialized "
|
raise ValueError("Submission arguments must be preinitialized "
|
||||||
"when not using repository")
|
"when not using repository")
|
||||||
arginfo = self.explist[expurl[5:]]["arginfo"]
|
class_desc = self.explist[expurl[5:]]
|
||||||
arguments = self.initialize_submission_arguments(expurl, arginfo)
|
return self.initialize_submission_arguments(expurl,
|
||||||
return arguments
|
class_desc["arginfo"], class_desc.get("argument_ui", None))
|
||||||
|
|
||||||
def open_experiment(self, expurl):
|
def open_experiment(self, expurl):
|
||||||
if expurl in self.open_experiments:
|
if expurl in self.open_experiments:
|
||||||
|
@ -739,13 +776,15 @@ class ExperimentManager:
|
||||||
revision = None
|
revision = None
|
||||||
description = await self.experiment_db_ctl.examine(
|
description = await self.experiment_db_ctl.examine(
|
||||||
file, use_repository, revision)
|
file, use_repository, revision)
|
||||||
return description[class_name]
|
class_desc = description[class_name]
|
||||||
|
return class_desc, class_desc.get("argument_ui", None)
|
||||||
|
|
||||||
async def open_file(self, file):
|
async def open_file(self, file):
|
||||||
description = await self.experiment_db_ctl.examine(file, False)
|
description = await self.experiment_db_ctl.examine(file, False)
|
||||||
for class_name, class_desc in description.items():
|
for class_name, class_desc in description.items():
|
||||||
expurl = "file:{}@{}".format(class_name, file)
|
expurl = "file:{}@{}".format(class_name, file)
|
||||||
self.initialize_submission_arguments(expurl, class_desc["arginfo"])
|
self.initialize_submission_arguments(expurl, class_desc["arginfo"],
|
||||||
|
class_desc.get("argument_ui", None))
|
||||||
if expurl in self.open_experiments:
|
if expurl in self.open_experiments:
|
||||||
self.open_experiments[expurl].close()
|
self.open_experiments[expurl].close()
|
||||||
self.open_experiment(expurl)
|
self.open_experiment(expurl)
|
||||||
|
@ -758,6 +797,7 @@ class ExperimentManager:
|
||||||
"options": self.submission_options,
|
"options": self.submission_options,
|
||||||
"arguments": self.submission_arguments,
|
"arguments": self.submission_arguments,
|
||||||
"docks": self.dock_states,
|
"docks": self.dock_states,
|
||||||
|
"argument_uis": self.argument_ui_names,
|
||||||
"open_docks": set(self.open_experiments.keys())
|
"open_docks": set(self.open_experiments.keys())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -768,6 +808,7 @@ class ExperimentManager:
|
||||||
self.submission_scheduling = state["scheduling"]
|
self.submission_scheduling = state["scheduling"]
|
||||||
self.submission_options = state["options"]
|
self.submission_options = state["options"]
|
||||||
self.submission_arguments = state["arguments"]
|
self.submission_arguments = state["arguments"]
|
||||||
|
self.argument_ui_names = state.get("argument_uis", {})
|
||||||
for expurl in state["open_docks"]:
|
for expurl in state["open_docks"]:
|
||||||
self.open_experiment(expurl)
|
self.open_experiment(expurl)
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,8 @@ from PyQt5 import QtCore, QtWidgets, QtGui
|
||||||
from sipyco.sync_struct import Subscriber
|
from sipyco.sync_struct import Subscriber
|
||||||
|
|
||||||
from artiq.coredevice.comm_moninj import *
|
from artiq.coredevice.comm_moninj import *
|
||||||
|
from artiq.coredevice.ad9910 import _AD9910_REG_PROFILE0, _AD9910_REG_PROFILE7, _AD9910_REG_FTW
|
||||||
|
from artiq.coredevice.ad9912_reg import AD9912_POW1
|
||||||
from artiq.gui.tools import LayoutWidget
|
from artiq.gui.tools import LayoutWidget
|
||||||
from artiq.gui.flowlayout import FlowLayout
|
from artiq.gui.flowlayout import FlowLayout
|
||||||
|
|
||||||
|
@ -179,14 +181,45 @@ class _SimpleDisplayWidget(QtWidgets.QFrame):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class _DDSModel:
|
||||||
|
def __init__(self, dds_type, ref_clk, cpld=None, pll=1, clk_div=0):
|
||||||
|
self.cpld = cpld
|
||||||
|
self.cur_frequency = 0
|
||||||
|
self.cur_reg = 0
|
||||||
|
self.dds_type = dds_type
|
||||||
|
self.is_urukul = dds_type in ["AD9910", "AD9912"]
|
||||||
|
|
||||||
|
if dds_type == "AD9914":
|
||||||
|
self.ftw_per_hz = 2**32 / ref_clk
|
||||||
|
else:
|
||||||
|
if dds_type == "AD9910":
|
||||||
|
max_freq = 1 << 32
|
||||||
|
clk_mult = [4, 1, 2, 4]
|
||||||
|
elif dds_type == "AD9912": # AD9912
|
||||||
|
max_freq = 1 << 48
|
||||||
|
clk_mult = [1, 1, 2, 4]
|
||||||
|
else:
|
||||||
|
raise NotImplementedError
|
||||||
|
sysclk = ref_clk / clk_mult[clk_div] * pll
|
||||||
|
self.ftw_per_hz = 1 / sysclk * max_freq
|
||||||
|
|
||||||
|
def monitor_update(self, probe, value):
|
||||||
|
if self.dds_type == "AD9912":
|
||||||
|
value = value << 16
|
||||||
|
self.cur_frequency = self._ftw_to_freq(value)
|
||||||
|
|
||||||
|
def _ftw_to_freq(self, ftw):
|
||||||
|
return ftw / self.ftw_per_hz
|
||||||
|
|
||||||
|
|
||||||
class _DDSWidget(QtWidgets.QFrame):
|
class _DDSWidget(QtWidgets.QFrame):
|
||||||
def __init__(self, dm, title, bus_channel=0, channel=0, cpld=None):
|
def __init__(self, dm, title, bus_channel=0, channel=0, dds_model=None):
|
||||||
self.dm = dm
|
self.dm = dm
|
||||||
self.bus_channel = bus_channel
|
self.bus_channel = bus_channel
|
||||||
self.channel = channel
|
self.channel = channel
|
||||||
self.dds_name = title
|
self.dds_name = title
|
||||||
self.cpld = cpld
|
|
||||||
self.cur_frequency = 0
|
self.cur_frequency = 0
|
||||||
|
self.dds_model = dds_model
|
||||||
|
|
||||||
QtWidgets.QFrame.__init__(self)
|
QtWidgets.QFrame.__init__(self)
|
||||||
|
|
||||||
|
@ -249,7 +282,7 @@ class _DDSWidget(QtWidgets.QFrame):
|
||||||
set_grid.addWidget(set_btn, 0, 1, 1, 1)
|
set_grid.addWidget(set_btn, 0, 1, 1, 1)
|
||||||
|
|
||||||
# for urukuls also allow switching off RF
|
# for urukuls also allow switching off RF
|
||||||
if self.cpld:
|
if self.dds_model.is_urukul:
|
||||||
off_btn = QtWidgets.QToolButton()
|
off_btn = QtWidgets.QToolButton()
|
||||||
off_btn.setText("Off")
|
off_btn.setText("Off")
|
||||||
off_btn.setToolTip("Switch off the output")
|
off_btn.setToolTip("Switch off the output")
|
||||||
|
@ -276,7 +309,7 @@ class _DDSWidget(QtWidgets.QFrame):
|
||||||
|
|
||||||
set_btn.clicked.connect(self.set_clicked)
|
set_btn.clicked.connect(self.set_clicked)
|
||||||
apply.clicked.connect(self.apply_changes)
|
apply.clicked.connect(self.apply_changes)
|
||||||
if self.cpld:
|
if self.dds_model.is_urukul:
|
||||||
off_btn.clicked.connect(self.off_clicked)
|
off_btn.clicked.connect(self.off_clicked)
|
||||||
self.value_edit.returnPressed.connect(lambda: self.apply_changes(None))
|
self.value_edit.returnPressed.connect(lambda: self.apply_changes(None))
|
||||||
self.value_edit.escapePressedConnect(self.cancel_changes)
|
self.value_edit.escapePressedConnect(self.cancel_changes)
|
||||||
|
@ -293,19 +326,20 @@ class _DDSWidget(QtWidgets.QFrame):
|
||||||
self.value_edit.selectAll()
|
self.value_edit.selectAll()
|
||||||
|
|
||||||
def off_clicked(self, set):
|
def off_clicked(self, set):
|
||||||
self.dm.dds_channel_toggle(self.dds_name, self.cpld, sw=False)
|
self.dm.dds_channel_toggle(self.dds_name, self.dds_model, sw=False)
|
||||||
|
|
||||||
def apply_changes(self, apply):
|
def apply_changes(self, apply):
|
||||||
self.data_stack.setCurrentIndex(0)
|
self.data_stack.setCurrentIndex(0)
|
||||||
self.button_stack.setCurrentIndex(0)
|
self.button_stack.setCurrentIndex(0)
|
||||||
frequency = float(self.value_edit.text())*1e6
|
frequency = float(self.value_edit.text())*1e6
|
||||||
self.dm.dds_set_frequency(self.dds_name, self.cpld, frequency)
|
self.dm.dds_set_frequency(self.dds_name, self.dds_model, frequency)
|
||||||
|
|
||||||
def cancel_changes(self, cancel):
|
def cancel_changes(self, cancel):
|
||||||
self.data_stack.setCurrentIndex(0)
|
self.data_stack.setCurrentIndex(0)
|
||||||
self.button_stack.setCurrentIndex(0)
|
self.button_stack.setCurrentIndex(0)
|
||||||
|
|
||||||
def refresh_display(self):
|
def refresh_display(self):
|
||||||
|
self.cur_frequency = self.dds_model.cur_frequency
|
||||||
self.value_label.setText("<font size=\"4\">{:.7f}</font>"
|
self.value_label.setText("<font size=\"4\">{:.7f}</font>"
|
||||||
.format(self.cur_frequency/1e6))
|
.format(self.cur_frequency/1e6))
|
||||||
self.value_edit.setText("{:.7f}"
|
self.value_edit.setText("{:.7f}"
|
||||||
|
@ -356,7 +390,8 @@ def setup_from_ddb(ddb):
|
||||||
bus_channel = v["arguments"]["bus_channel"]
|
bus_channel = v["arguments"]["bus_channel"]
|
||||||
channel = v["arguments"]["channel"]
|
channel = v["arguments"]["channel"]
|
||||||
dds_sysclk = v["arguments"]["sysclk"]
|
dds_sysclk = v["arguments"]["sysclk"]
|
||||||
widget = _WidgetDesc(k, comment, _DDSWidget, (k, bus_channel, channel))
|
model = _DDSModel(v["class"], dds_sysclk)
|
||||||
|
widget = _WidgetDesc(k, comment, _DDSWidget, (k, bus_channel, channel, model))
|
||||||
description.add(widget)
|
description.add(widget)
|
||||||
elif (v["module"] == "artiq.coredevice.ad9910"
|
elif (v["module"] == "artiq.coredevice.ad9910"
|
||||||
and v["class"] == "AD9910") or \
|
and v["class"] == "AD9910") or \
|
||||||
|
@ -368,7 +403,11 @@ def setup_from_ddb(ddb):
|
||||||
dds_cpld = v["arguments"]["cpld_device"]
|
dds_cpld = v["arguments"]["cpld_device"]
|
||||||
spi_dev = ddb[dds_cpld]["arguments"]["spi_device"]
|
spi_dev = ddb[dds_cpld]["arguments"]["spi_device"]
|
||||||
bus_channel = ddb[spi_dev]["arguments"]["channel"]
|
bus_channel = ddb[spi_dev]["arguments"]["channel"]
|
||||||
widget = _WidgetDesc(k, comment, _DDSWidget, (k, bus_channel, channel, dds_cpld))
|
pll = v["arguments"]["pll_n"]
|
||||||
|
refclk = ddb[dds_cpld]["arguments"]["refclk"]
|
||||||
|
clk_div = v["arguments"].get("clk_div", 0)
|
||||||
|
model = _DDSModel( v["class"], refclk, dds_cpld, pll, clk_div)
|
||||||
|
widget = _WidgetDesc(k, comment, _DDSWidget, (k, bus_channel, channel, model))
|
||||||
description.add(widget)
|
description.add(widget)
|
||||||
elif ( (v["module"] == "artiq.coredevice.ad53xx" and v["class"] == "AD53xx")
|
elif ( (v["module"] == "artiq.coredevice.ad53xx" and v["class"] == "AD53xx")
|
||||||
or (v["module"] == "artiq.coredevice.zotino" and v["class"] == "Zotino")):
|
or (v["module"] == "artiq.coredevice.zotino" and v["class"] == "Zotino")):
|
||||||
|
@ -385,7 +424,7 @@ def setup_from_ddb(ddb):
|
||||||
mi_port = v.get("port_proxy", 1383)
|
mi_port = v.get("port_proxy", 1383)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
return mi_addr, mi_port, dds_sysclk, description
|
return mi_addr, mi_port, description
|
||||||
|
|
||||||
|
|
||||||
class _DeviceManager:
|
class _DeviceManager:
|
||||||
|
@ -415,15 +454,13 @@ class _DeviceManager:
|
||||||
return ddb
|
return ddb
|
||||||
|
|
||||||
def notify(self, mod):
|
def notify(self, mod):
|
||||||
mi_addr, mi_port, dds_sysclk, description = setup_from_ddb(self.ddb)
|
mi_addr, mi_port, description = setup_from_ddb(self.ddb)
|
||||||
|
|
||||||
if (mi_addr, mi_port) != (self.mi_addr, self.mi_port):
|
if (mi_addr, mi_port) != (self.mi_addr, self.mi_port):
|
||||||
self.mi_addr = mi_addr
|
self.mi_addr = mi_addr
|
||||||
self.mi_port = mi_port
|
self.mi_port = mi_port
|
||||||
self.reconnect_mi.set()
|
self.reconnect_mi.set()
|
||||||
|
|
||||||
self.dds_sysclk = dds_sysclk
|
|
||||||
|
|
||||||
for to_remove in self.description - description:
|
for to_remove in self.description - description:
|
||||||
widget = self.widgets_by_uid[to_remove.uid]
|
widget = self.widgets_by_uid[to_remove.uid]
|
||||||
del self.widgets_by_uid[to_remove.uid]
|
del self.widgets_by_uid[to_remove.uid]
|
||||||
|
@ -512,24 +549,25 @@ class _DeviceManager:
|
||||||
scheduling["flush"])
|
scheduling["flush"])
|
||||||
logger.info("Submitted '%s', RID is %d", title, rid)
|
logger.info("Submitted '%s', RID is %d", title, rid)
|
||||||
|
|
||||||
def dds_set_frequency(self, dds_channel, dds_cpld, freq):
|
def dds_set_frequency(self, dds_channel, dds_model, freq):
|
||||||
# create kernel and fill it in and send-by-content
|
# create kernel and fill it in and send-by-content
|
||||||
if dds_cpld:
|
if dds_model.is_urukul:
|
||||||
# urukuls need CPLD init and switch to on
|
# urukuls need CPLD init and switch to on
|
||||||
# keep previous config if it was set already
|
# keep previous config if it was set already
|
||||||
cpld_dev = """self.setattr_device("core_cache")
|
cpld_dev = """self.setattr_device("core_cache")
|
||||||
self.setattr_device("{}")""".format(dds_cpld)
|
self.setattr_device("{}")""".format(dds_model.cpld)
|
||||||
cpld_init = """cfg = self.core_cache.get("_{cpld}_cfg")
|
cpld_init = """cfg = self.core_cache.get("_{cpld}_cfg")
|
||||||
if len(cfg) > 0:
|
if len(cfg) > 0:
|
||||||
self.{cpld}.cfg_reg = cfg[0]
|
self.{cpld}.cfg_reg = cfg[0]
|
||||||
else:
|
else:
|
||||||
|
delay(15*ms)
|
||||||
self.{cpld}.init()
|
self.{cpld}.init()
|
||||||
self.core_cache.put("_{cpld}_cfg", [self.{cpld}.cfg_reg])
|
self.core_cache.put("_{cpld}_cfg", [self.{cpld}.cfg_reg])
|
||||||
cfg = self.core_cache.get("_{cpld}_cfg")
|
cfg = self.core_cache.get("_{cpld}_cfg")
|
||||||
""".format(cpld=dds_cpld)
|
""".format(cpld=dds_model.cpld)
|
||||||
cfg_sw = """self.{}.cfg_sw(True)
|
cfg_sw = """self.{}.cfg_sw(True)
|
||||||
cfg[0] = self.{}.cfg_reg
|
cfg[0] = self.{}.cfg_reg
|
||||||
""".format(dds_channel, dds_cpld)
|
""".format(dds_channel, dds_model.cpld)
|
||||||
else:
|
else:
|
||||||
cpld_dev = ""
|
cpld_dev = ""
|
||||||
cpld_init = ""
|
cpld_init = ""
|
||||||
|
@ -545,9 +583,9 @@ class _DeviceManager:
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
self.core.break_realtime()
|
self.core.reset()
|
||||||
delay(2*ms)
|
|
||||||
{cpld_init}
|
{cpld_init}
|
||||||
|
delay(5*ms)
|
||||||
self.{dds_channel}.init()
|
self.{dds_channel}.init()
|
||||||
self.{dds_channel}.set({freq})
|
self.{dds_channel}.set({freq})
|
||||||
{cfg_sw}
|
{cfg_sw}
|
||||||
|
@ -560,7 +598,7 @@ class _DeviceManager:
|
||||||
"SetDDS",
|
"SetDDS",
|
||||||
"Set DDS {} {}MHz".format(dds_channel, freq/1e6)))
|
"Set DDS {} {}MHz".format(dds_channel, freq/1e6)))
|
||||||
|
|
||||||
def dds_channel_toggle(self, dds_channel, dds_cpld, sw=True):
|
def dds_channel_toggle(self, dds_channel, dds_model, sw=True):
|
||||||
# urukul only
|
# urukul only
|
||||||
toggle_exp = textwrap.dedent("""
|
toggle_exp = textwrap.dedent("""
|
||||||
from artiq.experiment import *
|
from artiq.experiment import *
|
||||||
|
@ -574,19 +612,20 @@ class _DeviceManager:
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
self.core.break_realtime()
|
self.core.reset()
|
||||||
delay(2*ms)
|
|
||||||
cfg = self.core_cache.get("_{cpld}_cfg")
|
cfg = self.core_cache.get("_{cpld}_cfg")
|
||||||
if len(cfg) > 0:
|
if len(cfg) > 0:
|
||||||
self.{cpld}.cfg_reg = cfg[0]
|
self.{cpld}.cfg_reg = cfg[0]
|
||||||
else:
|
else:
|
||||||
|
delay(15*ms)
|
||||||
self.{cpld}.init()
|
self.{cpld}.init()
|
||||||
self.core_cache.put("_{cpld}_cfg", [self.{cpld}.cfg_reg])
|
self.core_cache.put("_{cpld}_cfg", [self.{cpld}.cfg_reg])
|
||||||
cfg = self.core_cache.get("_{cpld}_cfg")
|
cfg = self.core_cache.get("_{cpld}_cfg")
|
||||||
|
delay(5*ms)
|
||||||
self.{ch}.init()
|
self.{ch}.init()
|
||||||
self.{ch}.cfg_sw({sw})
|
self.{ch}.cfg_sw({sw})
|
||||||
cfg[0] = self.{cpld}.cfg_reg
|
cfg[0] = self.{cpld}.cfg_reg
|
||||||
""".format(ch=dds_channel, cpld=dds_cpld, sw=sw))
|
""".format(ch=dds_channel, cpld=dds_model.cpld, sw=sw))
|
||||||
asyncio.ensure_future(
|
asyncio.ensure_future(
|
||||||
self._submit_by_content(
|
self._submit_by_content(
|
||||||
toggle_exp,
|
toggle_exp,
|
||||||
|
@ -619,11 +658,11 @@ class _DeviceManager:
|
||||||
elif probe == TTLProbe.oe.value:
|
elif probe == TTLProbe.oe.value:
|
||||||
widget.cur_oe = bool(value)
|
widget.cur_oe = bool(value)
|
||||||
widget.refresh_display()
|
widget.refresh_display()
|
||||||
if (channel, probe) in self.dds_widgets:
|
elif (channel, probe) in self.dds_widgets:
|
||||||
widget = self.dds_widgets[(channel, probe)]
|
widget = self.dds_widgets[(channel, probe)]
|
||||||
widget.cur_frequency = value*self.dds_sysclk/2**32
|
widget.dds_model.monitor_update(probe, value)
|
||||||
widget.refresh_display()
|
widget.refresh_display()
|
||||||
if (channel, probe) in self.dac_widgets:
|
elif (channel, probe) in self.dac_widgets:
|
||||||
widget = self.dac_widgets[(channel, probe)]
|
widget = self.dac_widgets[(channel, probe)]
|
||||||
widget.cur_value = value
|
widget.cur_value = value
|
||||||
widget.refresh_display()
|
widget.refresh_display()
|
||||||
|
@ -660,6 +699,8 @@ class _DeviceManager:
|
||||||
await asyncio.sleep(10.)
|
await asyncio.sleep(10.)
|
||||||
self.reconnect_mi.set()
|
self.reconnect_mi.set()
|
||||||
else:
|
else:
|
||||||
|
logger.info("ARTIQ dashboard connected to moninj proxy (%s)",
|
||||||
|
self.mi_addr)
|
||||||
self.mi_connection = new_mi_connection
|
self.mi_connection = new_mi_connection
|
||||||
for ttl_channel in self.ttl_widgets.keys():
|
for ttl_channel in self.ttl_widgets.keys():
|
||||||
self.setup_ttl_monitoring(True, ttl_channel)
|
self.setup_ttl_monitoring(True, ttl_channel)
|
||||||
|
|
|
@ -6,6 +6,9 @@ use board_misoc::clock;
|
||||||
use board_artiq::drtio_routing;
|
use board_artiq::drtio_routing;
|
||||||
use sched::Io;
|
use sched::Io;
|
||||||
use sched::Mutex;
|
use sched::Mutex;
|
||||||
|
const ASYNC_ERROR_COLLISION: u8 = 1 << 0;
|
||||||
|
const ASYNC_ERROR_BUSY: u8 = 1 << 1;
|
||||||
|
const ASYNC_ERROR_SEQUENCE_ERROR: u8 = 1 << 2;
|
||||||
|
|
||||||
#[cfg(has_drtio)]
|
#[cfg(has_drtio)]
|
||||||
pub mod drtio {
|
pub mod drtio {
|
||||||
|
@ -211,12 +214,18 @@ pub mod drtio {
|
||||||
Ok(drtioaux::Packet::DestinationDownReply) =>
|
Ok(drtioaux::Packet::DestinationDownReply) =>
|
||||||
destination_set_up(routing_table, up_destinations, destination, false),
|
destination_set_up(routing_table, up_destinations, destination, false),
|
||||||
Ok(drtioaux::Packet::DestinationOkReply) => (),
|
Ok(drtioaux::Packet::DestinationOkReply) => (),
|
||||||
Ok(drtioaux::Packet::DestinationSequenceErrorReply { channel }) =>
|
Ok(drtioaux::Packet::DestinationSequenceErrorReply { channel }) => {
|
||||||
error!("[DEST#{}] RTIO sequence error involving channel 0x{:04x}", destination, channel),
|
error!("[DEST#{}] RTIO sequence error involving channel 0x{:04x}", destination, channel);
|
||||||
Ok(drtioaux::Packet::DestinationCollisionReply { channel }) =>
|
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_SEQUENCE_ERROR };
|
||||||
error!("[DEST#{}] RTIO collision involving channel 0x{:04x}", destination, channel),
|
}
|
||||||
Ok(drtioaux::Packet::DestinationBusyReply { channel }) =>
|
Ok(drtioaux::Packet::DestinationCollisionReply { channel }) => {
|
||||||
error!("[DEST#{}] RTIO busy error involving channel 0x{:04x}", destination, channel),
|
error!("[DEST#{}] RTIO collision involving channel 0x{:04x}", destination, channel);
|
||||||
|
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_COLLISION };
|
||||||
|
}
|
||||||
|
Ok(drtioaux::Packet::DestinationBusyReply { channel }) => {
|
||||||
|
error!("[DEST#{}] RTIO busy error involving channel 0x{:04x}", destination, channel);
|
||||||
|
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_BUSY };
|
||||||
|
}
|
||||||
Ok(packet) => error!("[DEST#{}] received unexpected aux packet: {:?}", destination, packet),
|
Ok(packet) => error!("[DEST#{}] received unexpected aux packet: {:?}", destination, packet),
|
||||||
Err(e) => error!("[DEST#{}] communication failed ({})", destination, e)
|
Err(e) => error!("[DEST#{}] communication failed ({})", destination, e)
|
||||||
}
|
}
|
||||||
|
@ -339,15 +348,15 @@ fn async_error_thread(io: Io) {
|
||||||
unsafe {
|
unsafe {
|
||||||
io.until(|| csr::rtio_core::async_error_read() != 0).unwrap();
|
io.until(|| csr::rtio_core::async_error_read() != 0).unwrap();
|
||||||
let errors = csr::rtio_core::async_error_read();
|
let errors = csr::rtio_core::async_error_read();
|
||||||
if errors & 1 != 0 {
|
if errors & ASYNC_ERROR_COLLISION != 0 {
|
||||||
error!("RTIO collision involving channel {}",
|
error!("RTIO collision involving channel {}",
|
||||||
csr::rtio_core::collision_channel_read());
|
csr::rtio_core::collision_channel_read());
|
||||||
}
|
}
|
||||||
if errors & 2 != 0 {
|
if errors & ASYNC_ERROR_BUSY != 0 {
|
||||||
error!("RTIO busy error involving channel {}",
|
error!("RTIO busy error involving channel {}",
|
||||||
csr::rtio_core::busy_channel_read());
|
csr::rtio_core::busy_channel_read());
|
||||||
}
|
}
|
||||||
if errors & 4 != 0 {
|
if errors & ASYNC_ERROR_SEQUENCE_ERROR != 0 {
|
||||||
error!("RTIO sequence error involving channel {}",
|
error!("RTIO sequence error involving channel {}",
|
||||||
csr::rtio_core::sequence_error_channel_read());
|
csr::rtio_core::sequence_error_channel_read());
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,9 +10,11 @@ from sipyco.pc_rpc import Server
|
||||||
from sipyco import common_args
|
from sipyco import common_args
|
||||||
from sipyco.logging_tools import log_with_name
|
from sipyco.logging_tools import log_with_name
|
||||||
from sipyco.asyncio_tools import SignalHandler
|
from sipyco.asyncio_tools import SignalHandler
|
||||||
|
from sipyco.keepalive import async_open_connection
|
||||||
|
|
||||||
from artiq.coredevice.comm_mgmt import Request, Reply
|
from artiq.coredevice.comm_mgmt import Request, Reply
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def get_argparser():
|
def get_argparser():
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
|
@ -38,7 +40,14 @@ async def get_logs_sim(host):
|
||||||
|
|
||||||
|
|
||||||
async def get_logs(host):
|
async def get_logs(host):
|
||||||
reader, writer = await asyncio.open_connection(host, 1380)
|
try:
|
||||||
|
reader, writer = await async_open_connection(
|
||||||
|
host,
|
||||||
|
1380,
|
||||||
|
after_idle=1,
|
||||||
|
interval=1,
|
||||||
|
max_fails=3,
|
||||||
|
)
|
||||||
writer.write(b"ARTIQ management\n")
|
writer.write(b"ARTIQ management\n")
|
||||||
endian = await reader.readexactly(1)
|
endian = await reader.readexactly(1)
|
||||||
if endian == b"e":
|
if endian == b"e":
|
||||||
|
@ -70,6 +79,10 @@ async def get_logs(host):
|
||||||
name = 'firmware.' + m.group(2).replace('::', '.')
|
name = 'firmware.' + m.group(2).replace('::', '.')
|
||||||
text = m.group(3)
|
text = m.group(3)
|
||||||
log_with_name(name, level, text)
|
log_with_name(name, level, text)
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
raise
|
||||||
|
except:
|
||||||
|
logger.error("Logging connection terminating with exception", exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -89,17 +102,15 @@ def main():
|
||||||
loop.run_until_complete(server.start(common_args.bind_address_from_args(args), args.port))
|
loop.run_until_complete(server.start(common_args.bind_address_from_args(args), args.port))
|
||||||
try:
|
try:
|
||||||
_, pending = loop.run_until_complete(asyncio.wait(
|
_, pending = loop.run_until_complete(asyncio.wait(
|
||||||
[signal_handler.wait_terminate(), server.wait_terminate()],
|
[signal_handler.wait_terminate(),
|
||||||
|
server.wait_terminate(),
|
||||||
|
get_logs_task],
|
||||||
return_when=asyncio.FIRST_COMPLETED))
|
return_when=asyncio.FIRST_COMPLETED))
|
||||||
for task in pending:
|
for task in pending:
|
||||||
task.cancel()
|
task.cancel()
|
||||||
finally:
|
finally:
|
||||||
loop.run_until_complete(server.stop())
|
loop.run_until_complete(server.stop())
|
||||||
finally:
|
finally:
|
||||||
get_logs_task.cancel()
|
|
||||||
try:
|
|
||||||
loop.run_until_complete(get_logs_task)
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
signal_handler.teardown()
|
signal_handler.teardown()
|
||||||
|
|
|
@ -116,6 +116,9 @@ class MonitorMux:
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
|
def disconnect_cb(self):
|
||||||
|
self.listeners.clear()
|
||||||
|
|
||||||
|
|
||||||
class ProxyConnection:
|
class ProxyConnection:
|
||||||
def __init__(self, monitor_mux, reader, writer):
|
def __init__(self, monitor_mux, reader, writer):
|
||||||
|
@ -203,7 +206,9 @@ def main():
|
||||||
signal_handler.setup()
|
signal_handler.setup()
|
||||||
try:
|
try:
|
||||||
monitor_mux = MonitorMux()
|
monitor_mux = MonitorMux()
|
||||||
comm_moninj = CommMonInj(monitor_mux.monitor_cb, monitor_mux.injection_status_cb)
|
comm_moninj = CommMonInj(monitor_mux.monitor_cb,
|
||||||
|
monitor_mux.injection_status_cb,
|
||||||
|
monitor_mux.disconnect_cb)
|
||||||
monitor_mux.comm_moninj = comm_moninj
|
monitor_mux.comm_moninj = comm_moninj
|
||||||
loop.run_until_complete(comm_moninj.connect(args.core_addr))
|
loop.run_until_complete(comm_moninj.connect(args.core_addr))
|
||||||
try:
|
try:
|
||||||
|
@ -214,7 +219,9 @@ def main():
|
||||||
loop.run_until_complete(server.start(bind_address, args.port_control))
|
loop.run_until_complete(server.start(bind_address, args.port_control))
|
||||||
try:
|
try:
|
||||||
_, pending = loop.run_until_complete(asyncio.wait(
|
_, pending = loop.run_until_complete(asyncio.wait(
|
||||||
[signal_handler.wait_terminate(), server.wait_terminate()],
|
[signal_handler.wait_terminate(),
|
||||||
|
server.wait_terminate(),
|
||||||
|
comm_moninj.wait_terminate()],
|
||||||
return_when=asyncio.FIRST_COMPLETED))
|
return_when=asyncio.FIRST_COMPLETED))
|
||||||
for task in pending:
|
for task in pending:
|
||||||
task.cancel()
|
task.cancel()
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
import asyncio
|
||||||
import atexit
|
import atexit
|
||||||
|
import importlib
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
@ -43,6 +44,9 @@ def get_argparser():
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--db-file", default=None,
|
"--db-file", default=None,
|
||||||
help="database file for local GUI settings")
|
help="database file for local GUI settings")
|
||||||
|
parser.add_argument(
|
||||||
|
"-p", "--load-plugin", dest="plugin_modules", action="append",
|
||||||
|
help="Python module to load on startup")
|
||||||
common_args.verbosity_args(parser)
|
common_args.verbosity_args(parser)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
@ -95,6 +99,11 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
widget_log_handler = log.init_log(args, "dashboard")
|
widget_log_handler = log.init_log(args, "dashboard")
|
||||||
|
|
||||||
|
# load any plugin modules first (to register argument_ui classes, etc.)
|
||||||
|
if args.plugin_modules:
|
||||||
|
for mod in args.plugin_modules:
|
||||||
|
importlib.import_module(mod)
|
||||||
|
|
||||||
if args.db_file is None:
|
if args.db_file is None:
|
||||||
args.db_file = os.path.join(get_user_config_dir(),
|
args.db_file = os.path.join(get_user_config_dir(),
|
||||||
"artiq_dashboard_{server}_{port}.pyon".format(
|
"artiq_dashboard_{server}_{port}.pyon".format(
|
||||||
|
@ -160,6 +169,7 @@ def main():
|
||||||
|
|
||||||
# create UI components
|
# create UI components
|
||||||
expmgr = experiments.ExperimentManager(main_window,
|
expmgr = experiments.ExperimentManager(main_window,
|
||||||
|
sub_clients["datasets"],
|
||||||
sub_clients["explist"],
|
sub_clients["explist"],
|
||||||
sub_clients["schedule"],
|
sub_clients["schedule"],
|
||||||
rpc_clients["schedule"],
|
rpc_clients["schedule"],
|
||||||
|
@ -179,7 +189,13 @@ def main():
|
||||||
rpc_clients["dataset_db"])
|
rpc_clients["dataset_db"])
|
||||||
smgr.register(d_datasets)
|
smgr.register(d_datasets)
|
||||||
|
|
||||||
d_applets = applets_ccb.AppletsCCBDock(main_window, sub_clients["datasets"])
|
d_applets = applets_ccb.AppletsCCBDock(main_window,
|
||||||
|
sub_clients["datasets"],
|
||||||
|
extra_substitutes={
|
||||||
|
"server": args.server,
|
||||||
|
"port_notify": args.port_notify,
|
||||||
|
"port_control": args.port_control,
|
||||||
|
})
|
||||||
atexit_register_coroutine(d_applets.stop)
|
atexit_register_coroutine(d_applets.stop)
|
||||||
smgr.register(d_applets)
|
smgr.register(d_applets)
|
||||||
broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify)
|
broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify)
|
||||||
|
@ -232,9 +248,9 @@ def main():
|
||||||
server_description = server_name + " ({})".format(args.server)
|
server_description = server_name + " ({})".format(args.server)
|
||||||
else:
|
else:
|
||||||
server_description = args.server
|
server_description = args.server
|
||||||
logging.info("ARTIQ dashboard %s connected to %s",
|
logging.info("ARTIQ dashboard version: %s",
|
||||||
artiq_version, server_description)
|
artiq_version)
|
||||||
|
logging.info("ARTIQ dashboard connected to moninj_proxy (%s)", server_description)
|
||||||
# run
|
# run
|
||||||
main_window.show()
|
main_window.show()
|
||||||
loop.run_until_complete(main_window.exit_request.wait())
|
loop.run_until_complete(main_window.exit_request.wait())
|
||||||
|
|
|
@ -128,6 +128,7 @@ def main():
|
||||||
"scheduler_request_termination": scheduler.request_termination,
|
"scheduler_request_termination": scheduler.request_termination,
|
||||||
"scheduler_get_status": scheduler.get_status,
|
"scheduler_get_status": scheduler.get_status,
|
||||||
"scheduler_check_pause": scheduler.check_pause,
|
"scheduler_check_pause": scheduler.check_pause,
|
||||||
|
"scheduler_check_termination": scheduler.check_termination,
|
||||||
"ccb_issue": ccb_issue,
|
"ccb_issue": ccb_issue,
|
||||||
})
|
})
|
||||||
experiment_db.scan_repository_async()
|
experiment_db.scan_repository_async()
|
||||||
|
|
|
@ -3,7 +3,7 @@ from migen.build.generic_platform import *
|
||||||
from migen.genlib.io import DifferentialOutput
|
from migen.genlib.io import DifferentialOutput
|
||||||
|
|
||||||
from artiq.gateware import rtio
|
from artiq.gateware import rtio
|
||||||
from artiq.gateware.rtio.phy import spi2, ad53xx_monitor, grabber
|
from artiq.gateware.rtio.phy import spi2, ad53xx_monitor, dds, grabber
|
||||||
from artiq.gateware.suservo import servo, pads as servo_pads
|
from artiq.gateware.suservo import servo, pads as servo_pads
|
||||||
from artiq.gateware.rtio.phy import servo as rtservo, fastino, phaser
|
from artiq.gateware.rtio.phy import servo as rtservo, fastino, phaser
|
||||||
|
|
||||||
|
@ -222,13 +222,13 @@ class Urukul(_EEM):
|
||||||
return ios
|
return ios
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def add_std(cls, target, eem, eem_aux, ttl_out_cls, sync_gen_cls=None, iostandard=default_iostandard):
|
def add_std(cls, target, eem, eem_aux, ttl_out_cls, dds_type, sync_gen_cls=None, iostandard=default_iostandard):
|
||||||
cls.add_extension(target, eem, eem_aux, iostandard=iostandard)
|
cls.add_extension(target, eem, eem_aux, iostandard=iostandard)
|
||||||
|
|
||||||
phy = spi2.SPIMaster(target.platform.request("urukul{}_spi_p".format(eem)),
|
spi_phy = spi2.SPIMaster(target.platform.request("urukul{}_spi_p".format(eem)),
|
||||||
target.platform.request("urukul{}_spi_n".format(eem)))
|
target.platform.request("urukul{}_spi_n".format(eem)))
|
||||||
target.submodules += phy
|
target.submodules += spi_phy
|
||||||
target.rtio_channels.append(rtio.Channel.from_phy(phy, ififo_depth=4))
|
target.rtio_channels.append(rtio.Channel.from_phy(spi_phy, ififo_depth=4))
|
||||||
|
|
||||||
pads = target.platform.request("urukul{}_dds_reset_sync_in".format(eem))
|
pads = target.platform.request("urukul{}_dds_reset_sync_in".format(eem))
|
||||||
if sync_gen_cls is not None: # AD9910 variant and SYNC_IN from EEM
|
if sync_gen_cls is not None: # AD9910 variant and SYNC_IN from EEM
|
||||||
|
@ -237,9 +237,14 @@ class Urukul(_EEM):
|
||||||
target.rtio_channels.append(rtio.Channel.from_phy(phy))
|
target.rtio_channels.append(rtio.Channel.from_phy(phy))
|
||||||
|
|
||||||
pads = target.platform.request("urukul{}_io_update".format(eem))
|
pads = target.platform.request("urukul{}_io_update".format(eem))
|
||||||
phy = ttl_out_cls(pads.p, pads.n)
|
io_upd_phy = ttl_out_cls(pads.p, pads.n)
|
||||||
target.submodules += phy
|
target.submodules += io_upd_phy
|
||||||
target.rtio_channels.append(rtio.Channel.from_phy(phy))
|
target.rtio_channels.append(rtio.Channel.from_phy(io_upd_phy))
|
||||||
|
|
||||||
|
dds_monitor = dds.UrukulMonitor(spi_phy, io_upd_phy, dds_type)
|
||||||
|
target.submodules += dds_monitor
|
||||||
|
spi_phy.probes.extend(dds_monitor.probes)
|
||||||
|
|
||||||
if eem_aux is not None:
|
if eem_aux is not None:
|
||||||
for signal in "sw0 sw1 sw2 sw3".split():
|
for signal in "sw0 sw1 sw2 sw3".split():
|
||||||
pads = target.platform.request("urukul{}_{}".format(eem, signal))
|
pads = target.platform.request("urukul{}_{}".format(eem, signal))
|
||||||
|
@ -247,6 +252,7 @@ class Urukul(_EEM):
|
||||||
target.submodules += phy
|
target.submodules += phy
|
||||||
target.rtio_channels.append(rtio.Channel.from_phy(phy))
|
target.rtio_channels.append(rtio.Channel.from_phy(phy))
|
||||||
|
|
||||||
|
|
||||||
class Sampler(_EEM):
|
class Sampler(_EEM):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def io(eem, eem_aux, iostandard):
|
def io(eem, eem_aux, iostandard):
|
||||||
|
|
|
@ -47,7 +47,7 @@ def peripheral_urukul(module, peripheral, **kwargs):
|
||||||
else:
|
else:
|
||||||
sync_gen_cls = None
|
sync_gen_cls = None
|
||||||
eem.Urukul.add_std(module, port, port_aux, ttl_serdes_7series.Output_8X,
|
eem.Urukul.add_std(module, port, port_aux, ttl_serdes_7series.Output_8X,
|
||||||
sync_gen_cls, **kwargs)
|
peripheral["dds"], sync_gen_cls, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def peripheral_novogorny(module, peripheral, **kwargs):
|
def peripheral_novogorny(module, peripheral, **kwargs):
|
||||||
|
|
|
@ -3,6 +3,11 @@ from migen import *
|
||||||
from artiq.gateware import ad9_dds
|
from artiq.gateware import ad9_dds
|
||||||
from artiq.gateware.rtio.phy.wishbone import RT2WB
|
from artiq.gateware.rtio.phy.wishbone import RT2WB
|
||||||
|
|
||||||
|
from artiq.coredevice.spi2 import SPI_CONFIG_ADDR, SPI_DATA_ADDR, SPI_END
|
||||||
|
from artiq.coredevice.urukul import CS_DDS_CH0, CS_DDS_MULTI, CFG_IO_UPDATE, CS_CFG
|
||||||
|
|
||||||
|
from artiq.coredevice.ad9912_reg import AD9912_POW1
|
||||||
|
from artiq.coredevice.ad9910 import _AD9910_REG_PROFILE0, _AD9910_REG_PROFILE7, _AD9910_REG_FTW
|
||||||
|
|
||||||
class AD9914(Module):
|
class AD9914(Module):
|
||||||
def __init__(self, pads, nchannels, onehot=False, **kwargs):
|
def __init__(self, pads, nchannels, onehot=False, **kwargs):
|
||||||
|
@ -54,3 +59,121 @@ class AD9914(Module):
|
||||||
self.sync.rio_phy += If(current_address == 2**len(pads.a), [
|
self.sync.rio_phy += If(current_address == 2**len(pads.a), [
|
||||||
If(selected(c), probe.eq(ftw))
|
If(selected(c), probe.eq(ftw))
|
||||||
for c, (probe, ftw) in enumerate(zip(self.probes, ftws))])
|
for c, (probe, ftw) in enumerate(zip(self.probes, ftws))])
|
||||||
|
|
||||||
|
|
||||||
|
class UrukulMonitor(Module):
|
||||||
|
def __init__(self, spi_phy, io_update_phy, dds, nchannels=4):
|
||||||
|
self.spi_phy = spi_phy
|
||||||
|
self.io_update_phy = io_update_phy
|
||||||
|
|
||||||
|
self.probes = [Signal(32) for i in range(nchannels)]
|
||||||
|
|
||||||
|
self.cs = Signal(8)
|
||||||
|
self.current_data = Signal.like(self.spi_phy.rtlink.o.data)
|
||||||
|
current_address = Signal.like(self.spi_phy.rtlink.o.address)
|
||||||
|
data_length = Signal(8)
|
||||||
|
flags = Signal(8)
|
||||||
|
|
||||||
|
self.sync.rio += If(self.spi_phy.rtlink.o.stb, [
|
||||||
|
current_address.eq(self.spi_phy.rtlink.o.address),
|
||||||
|
self.current_data.eq(self.spi_phy.rtlink.o.data),
|
||||||
|
If(self.spi_phy.rtlink.o.address == SPI_CONFIG_ADDR, [
|
||||||
|
self.cs.eq(self.spi_phy.rtlink.o.data[24:]),
|
||||||
|
data_length.eq(self.spi_phy.rtlink.o.data[8:16] + 1),
|
||||||
|
flags.eq(self.spi_phy.rtlink.o.data[0:8])
|
||||||
|
])
|
||||||
|
])
|
||||||
|
|
||||||
|
for i in range(nchannels):
|
||||||
|
ch_sel = Signal()
|
||||||
|
self.comb += ch_sel.eq(
|
||||||
|
((self.cs == CS_DDS_MULTI) | (self.cs == i + CS_DDS_CH0))
|
||||||
|
& (current_address == SPI_DATA_ADDR)
|
||||||
|
)
|
||||||
|
|
||||||
|
if dds == "ad9912":
|
||||||
|
mon_cls = _AD9912Monitor
|
||||||
|
elif dds == "ad9910":
|
||||||
|
mon_cls = _AD9910Monitor
|
||||||
|
else:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
monitor = mon_cls(self.current_data, data_length, flags, ch_sel)
|
||||||
|
self.submodules += monitor
|
||||||
|
|
||||||
|
self.sync.rio_phy += [
|
||||||
|
If(ch_sel & self.is_io_update(), self.probes[i].eq(monitor.ftw))
|
||||||
|
]
|
||||||
|
|
||||||
|
def is_io_update(self):
|
||||||
|
# shifted 8 bits left for 32-bit bus
|
||||||
|
reg_io_upd = (self.cs == CS_CFG) & self.current_data[8 + CFG_IO_UPDATE]
|
||||||
|
phy_io_upd = False
|
||||||
|
if self.io_update_phy:
|
||||||
|
phy_io_upd = self.io_update_phy.rtlink.o.stb & self.io_update_phy.rtlink.o.data
|
||||||
|
return phy_io_upd | reg_io_upd
|
||||||
|
|
||||||
|
|
||||||
|
class _AD9912Monitor(Module):
|
||||||
|
def __init__(self, current_data, data_length, flags, ch_sel):
|
||||||
|
self.ftw = Signal(32, reset_less=True)
|
||||||
|
|
||||||
|
fsm = ClockDomainsRenamer("rio_phy")(FSM(reset_state="IDLE"))
|
||||||
|
self.submodules += fsm
|
||||||
|
|
||||||
|
reg_addr = current_data[16:29]
|
||||||
|
reg_write = ~current_data[31]
|
||||||
|
|
||||||
|
fsm.act("IDLE",
|
||||||
|
If(ch_sel & reg_write,
|
||||||
|
If((data_length == 16) & (reg_addr == AD9912_POW1),
|
||||||
|
NextState("READ")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
fsm.act("READ",
|
||||||
|
If(ch_sel,
|
||||||
|
If(flags & SPI_END,
|
||||||
|
# lower 16 bits (16-32 from 48-bit transfer)
|
||||||
|
NextValue(self.ftw[:16], current_data[16:]),
|
||||||
|
NextState("IDLE")
|
||||||
|
).Else(
|
||||||
|
NextValue(self.ftw[16:], current_data[:16])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class _AD9910Monitor(Module):
|
||||||
|
def __init__(self, current_data, data_length, flags, ch_sel):
|
||||||
|
self.ftw = Signal(32, reset_less=True)
|
||||||
|
|
||||||
|
fsm = ClockDomainsRenamer("rio_phy")(FSM(reset_state="IDLE"))
|
||||||
|
self.submodules += fsm
|
||||||
|
|
||||||
|
reg_addr = current_data[24:29]
|
||||||
|
reg_write = ~current_data[31]
|
||||||
|
|
||||||
|
fsm.act("IDLE",
|
||||||
|
If(ch_sel & reg_write,
|
||||||
|
If((data_length == 8) & (_AD9910_REG_PROFILE7 >= reg_addr) & (reg_addr >= _AD9910_REG_PROFILE0),
|
||||||
|
NextState("READ")
|
||||||
|
).Elif(reg_addr == _AD9910_REG_FTW,
|
||||||
|
If((data_length == 24) & (flags & SPI_END),
|
||||||
|
NextValue(self.ftw[:16], current_data[8:24])
|
||||||
|
).Elif(data_length == 8,
|
||||||
|
NextState("READ")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
fsm.act("READ",
|
||||||
|
If(ch_sel,
|
||||||
|
If(flags & SPI_END,
|
||||||
|
NextValue(self.ftw, current_data),
|
||||||
|
NextState("IDLE")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -25,6 +25,7 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
AsyncioParentComm.__init__(self)
|
AsyncioParentComm.__init__(self)
|
||||||
self.datasets_sub = datasets_sub
|
self.datasets_sub = datasets_sub
|
||||||
self.datasets = set()
|
self.datasets = set()
|
||||||
|
self.dataset_prefixes = []
|
||||||
|
|
||||||
def write_pyon(self, obj):
|
def write_pyon(self, obj):
|
||||||
self.write(pyon.encode(obj).encode() + b"\n")
|
self.write(pyon.encode(obj).encode() + b"\n")
|
||||||
|
@ -33,8 +34,16 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
line = await self.readline()
|
line = await self.readline()
|
||||||
return pyon.decode(line.decode())
|
return pyon.decode(line.decode())
|
||||||
|
|
||||||
|
def _is_dataset_subscribed(self, key):
|
||||||
|
if key in self.datasets:
|
||||||
|
return True
|
||||||
|
for prefix in self.dataset_prefixes:
|
||||||
|
if key.startswith(prefix):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def _synthesize_init(self, data):
|
def _synthesize_init(self, data):
|
||||||
struct = {k: v for k, v in data.items() if k in self.datasets}
|
struct = {k: v for k, v in data.items() if self._is_dataset_subscribed(k)}
|
||||||
return {"action": "init",
|
return {"action": "init",
|
||||||
"struct": struct}
|
"struct": struct}
|
||||||
|
|
||||||
|
@ -43,10 +52,10 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
mod = self._synthesize_init(mod["struct"])
|
mod = self._synthesize_init(mod["struct"])
|
||||||
else:
|
else:
|
||||||
if mod["path"]:
|
if mod["path"]:
|
||||||
if mod["path"][0] not in self.datasets:
|
if not self._is_dataset_subscribed(mod["path"][0]):
|
||||||
return
|
return
|
||||||
elif mod["action"] in {"setitem", "delitem"}:
|
elif mod["action"] in {"setitem", "delitem"}:
|
||||||
if mod["key"] not in self.datasets:
|
if not self._is_dataset_subscribed(mod["key"]):
|
||||||
return
|
return
|
||||||
self.write_pyon({"action": "mod", "mod": mod})
|
self.write_pyon({"action": "mod", "mod": mod})
|
||||||
|
|
||||||
|
@ -64,6 +73,7 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
fix_initial_size_cb()
|
fix_initial_size_cb()
|
||||||
elif action == "subscribe":
|
elif action == "subscribe":
|
||||||
self.datasets = obj["datasets"]
|
self.datasets = obj["datasets"]
|
||||||
|
self.dataset_prefixes = obj["dataset_prefixes"]
|
||||||
if self.datasets_sub.model is not None:
|
if self.datasets_sub.model is not None:
|
||||||
mod = self._synthesize_init(
|
mod = self._synthesize_init(
|
||||||
self.datasets_sub.model.backing_store)
|
self.datasets_sub.model.backing_store)
|
||||||
|
@ -93,7 +103,7 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
|
|
||||||
|
|
||||||
class _AppletDock(QDockWidgetCloseDetect):
|
class _AppletDock(QDockWidgetCloseDetect):
|
||||||
def __init__(self, datasets_sub, uid, name, spec):
|
def __init__(self, datasets_sub, uid, name, spec, extra_substitutes):
|
||||||
QDockWidgetCloseDetect.__init__(self, "Applet: " + name)
|
QDockWidgetCloseDetect.__init__(self, "Applet: " + name)
|
||||||
self.setObjectName("applet" + str(uid))
|
self.setObjectName("applet" + str(uid))
|
||||||
|
|
||||||
|
@ -104,6 +114,7 @@ class _AppletDock(QDockWidgetCloseDetect):
|
||||||
self.datasets_sub = datasets_sub
|
self.datasets_sub = datasets_sub
|
||||||
self.applet_name = name
|
self.applet_name = name
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
|
self.extra_substitutes = extra_substitutes
|
||||||
|
|
||||||
self.starting_stopping = False
|
self.starting_stopping = False
|
||||||
|
|
||||||
|
@ -152,7 +163,8 @@ class _AppletDock(QDockWidgetCloseDetect):
|
||||||
python = sys.executable.replace("\\", "\\\\")
|
python = sys.executable.replace("\\", "\\\\")
|
||||||
command = command_tpl.safe_substitute(
|
command = command_tpl.safe_substitute(
|
||||||
python=python,
|
python=python,
|
||||||
artiq_applet=python + " -m artiq.applets."
|
artiq_applet=python + " -m artiq.applets.",
|
||||||
|
**self.extra_substitutes
|
||||||
)
|
)
|
||||||
logger.debug("starting command %s for %s", command, self.applet_name)
|
logger.debug("starting command %s for %s", command, self.applet_name)
|
||||||
await self.start_process(shlex.split(command), None)
|
await self.start_process(shlex.split(command), None)
|
||||||
|
@ -315,7 +327,11 @@ class _CompleterDelegate(QtWidgets.QStyledItemDelegate):
|
||||||
|
|
||||||
|
|
||||||
class AppletsDock(QtWidgets.QDockWidget):
|
class AppletsDock(QtWidgets.QDockWidget):
|
||||||
def __init__(self, main_window, datasets_sub):
|
def __init__(self, main_window, datasets_sub, extra_substitutes={}):
|
||||||
|
"""
|
||||||
|
:param extra_substitutes: Map of extra ``${strings}`` to substitute in applet
|
||||||
|
commands to their respective values.
|
||||||
|
"""
|
||||||
QtWidgets.QDockWidget.__init__(self, "Applets")
|
QtWidgets.QDockWidget.__init__(self, "Applets")
|
||||||
self.setObjectName("Applets")
|
self.setObjectName("Applets")
|
||||||
self.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable |
|
self.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable |
|
||||||
|
@ -323,6 +339,7 @@ class AppletsDock(QtWidgets.QDockWidget):
|
||||||
|
|
||||||
self.main_window = main_window
|
self.main_window = main_window
|
||||||
self.datasets_sub = datasets_sub
|
self.datasets_sub = datasets_sub
|
||||||
|
self.extra_substitutes = extra_substitutes
|
||||||
self.applet_uids = set()
|
self.applet_uids = set()
|
||||||
|
|
||||||
self.table = QtWidgets.QTreeWidget()
|
self.table = QtWidgets.QTreeWidget()
|
||||||
|
@ -421,7 +438,7 @@ class AppletsDock(QtWidgets.QDockWidget):
|
||||||
self.table.itemChanged.connect(self.item_changed)
|
self.table.itemChanged.connect(self.item_changed)
|
||||||
|
|
||||||
def create(self, item, name, spec):
|
def create(self, item, name, spec):
|
||||||
dock = _AppletDock(self.datasets_sub, item.applet_uid, name, spec)
|
dock = _AppletDock(self.datasets_sub, item.applet_uid, name, spec, self.extra_substitutes)
|
||||||
self.main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, dock)
|
self.main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, dock)
|
||||||
dock.setFloating(True)
|
dock.setFloating(True)
|
||||||
asyncio.ensure_future(dock.start())
|
asyncio.ensure_future(dock.start())
|
||||||
|
|
|
@ -19,4 +19,4 @@ _register_unit("Hz", "m_kMG")
|
||||||
_register_unit("dB", "_")
|
_register_unit("dB", "_")
|
||||||
_register_unit("V", "um_k")
|
_register_unit("V", "um_k")
|
||||||
_register_unit("A", "um_")
|
_register_unit("A", "um_")
|
||||||
_register_unit("W", "um_")
|
_register_unit("W", "num_")
|
||||||
|
|
|
@ -30,7 +30,6 @@ class _RepoScanner:
|
||||||
raise
|
raise
|
||||||
for class_name, class_desc in description.items():
|
for class_name, class_desc in description.items():
|
||||||
name = class_desc["name"]
|
name = class_desc["name"]
|
||||||
arginfo = class_desc["arginfo"]
|
|
||||||
if "/" in name:
|
if "/" in name:
|
||||||
logger.warning("Character '/' is not allowed in experiment "
|
logger.warning("Character '/' is not allowed in experiment "
|
||||||
"name (%s)", name)
|
"name (%s)", name)
|
||||||
|
@ -47,7 +46,8 @@ class _RepoScanner:
|
||||||
entry = {
|
entry = {
|
||||||
"file": filename,
|
"file": filename,
|
||||||
"class_name": class_name,
|
"class_name": class_name,
|
||||||
"arginfo": arginfo,
|
"arginfo": class_desc["arginfo"],
|
||||||
|
"argument_ui": class_desc["argument_ui"],
|
||||||
"scheduler_defaults": class_desc["scheduler_defaults"]
|
"scheduler_defaults": class_desc["scheduler_defaults"]
|
||||||
}
|
}
|
||||||
entry_dict[name] = entry
|
entry_dict[name] = entry
|
||||||
|
|
|
@ -490,3 +490,13 @@ class Scheduler:
|
||||||
return False
|
return False
|
||||||
return r.priority_key() > run.priority_key()
|
return r.priority_key() > run.priority_key()
|
||||||
raise KeyError("RID not found")
|
raise KeyError("RID not found")
|
||||||
|
|
||||||
|
def check_termination(self, rid):
|
||||||
|
"""Returns ``True`` if termination is requested."""
|
||||||
|
for pipeline in self._pipelines.values():
|
||||||
|
if rid in pipeline.pool.runs:
|
||||||
|
run = pipeline.pool.runs[rid]
|
||||||
|
if run.termination_requested:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
|
@ -301,8 +301,14 @@ class Worker:
|
||||||
await self._create_process(logging.WARNING)
|
await self._create_process(logging.WARNING)
|
||||||
r = dict()
|
r = dict()
|
||||||
|
|
||||||
def register(class_name, name, arginfo, scheduler_defaults):
|
def register(class_name, name, arginfo, argument_ui,
|
||||||
r[class_name] = {"name": name, "arginfo": arginfo, "scheduler_defaults": scheduler_defaults}
|
scheduler_defaults):
|
||||||
|
r[class_name] = {
|
||||||
|
"name": name,
|
||||||
|
"arginfo": arginfo,
|
||||||
|
"argument_ui": argument_ui,
|
||||||
|
"scheduler_defaults": scheduler_defaults
|
||||||
|
}
|
||||||
self.register_experiment = register
|
self.register_experiment = register
|
||||||
await self._worker_action({"action": "examine", "file": file},
|
await self._worker_action({"action": "examine", "file": file},
|
||||||
timeout)
|
timeout)
|
||||||
|
|
|
@ -111,6 +111,12 @@ class Scheduler:
|
||||||
rid = self.rid
|
rid = self.rid
|
||||||
return self._check_pause(rid)
|
return self._check_pause(rid)
|
||||||
|
|
||||||
|
_check_termination = staticmethod(make_parent_action("scheduler_check_termination"))
|
||||||
|
def check_termination(self, rid=None) -> TBool:
|
||||||
|
if rid is None:
|
||||||
|
rid = self.rid
|
||||||
|
return self._check_termination(rid)
|
||||||
|
|
||||||
_submit = staticmethod(make_parent_action("scheduler_submit"))
|
_submit = staticmethod(make_parent_action("scheduler_submit"))
|
||||||
def submit(self, pipeline_name=None, expid=None, priority=None, due_date=None, flush=False):
|
def submit(self, pipeline_name=None, expid=None, priority=None, due_date=None, flush=False):
|
||||||
if pipeline_name is None:
|
if pipeline_name is None:
|
||||||
|
@ -205,7 +211,10 @@ def examine(device_mgr, dataset_mgr, file):
|
||||||
(k, (proc.describe(), group, tooltip))
|
(k, (proc.describe(), group, tooltip))
|
||||||
for k, (proc, group, tooltip) in argument_mgr.requested_args.items()
|
for k, (proc, group, tooltip) in argument_mgr.requested_args.items()
|
||||||
)
|
)
|
||||||
register_experiment(class_name, name, arginfo, scheduler_defaults)
|
argument_ui = None
|
||||||
|
if hasattr(exp_class, "argument_ui"):
|
||||||
|
argument_ui = exp_class.argument_ui
|
||||||
|
register_experiment(class_name, name, arginfo, argument_ui, scheduler_defaults)
|
||||||
finally:
|
finally:
|
||||||
new_keys = set(sys.modules.keys())
|
new_keys = set(sys.modules.keys())
|
||||||
for key in new_keys - previous_keys:
|
for key in new_keys - previous_keys:
|
||||||
|
|
12
flake.lock
12
flake.lock
|
@ -41,11 +41,11 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1653920503,
|
"lastModified": 1655278232,
|
||||||
"narHash": "sha256-BBeCZwZImtjP3oYy4WogkQYy5OxNyfNciVSc1AfZgLQ=",
|
"narHash": "sha256-H6s7tnHYiDKFCcLADS4sl1sUq0dDJuRQXCieguk/6SA=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "a634c8f6c1fbf9b9730e01764999666f3436f10a",
|
"rev": "8b538fcb329a7bc3d153962f17c509ee49166973",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -73,11 +73,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1654006751,
|
"lastModified": 1654830914,
|
||||||
"narHash": "sha256-OWAnoTCutvTQcYdtdtLQuL6uRtG+7Jz7sbRhcScv8bo=",
|
"narHash": "sha256-tratXcWu6Dgzd0Qd9V6EMjuNlE9qDN1pKFhP+Gt0b64=",
|
||||||
"owner": "m-labs",
|
"owner": "m-labs",
|
||||||
"repo": "sipyco",
|
"repo": "sipyco",
|
||||||
"rev": "b3d03a94c751a24769c54a61a0dbe9d6af52dade",
|
"rev": "58b0935f7ae47659abee5b5792fa594153328d6f",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
|
@ -467,7 +467,7 @@
|
||||||
mkdir $HOME/.ssh
|
mkdir $HOME/.ssh
|
||||||
cp /opt/hydra_id_ed25519 $HOME/.ssh/id_ed25519
|
cp /opt/hydra_id_ed25519 $HOME/.ssh/id_ed25519
|
||||||
cp /opt/hydra_id_ed25519.pub $HOME/.ssh/id_ed25519.pub
|
cp /opt/hydra_id_ed25519.pub $HOME/.ssh/id_ed25519.pub
|
||||||
echo "rpi-1 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPOBQVcsvk6WgRj18v4m0zkFeKrcN9gA+r6sxQxNwFpv" > $HOME/.ssh/known_hosts
|
echo "rpi-1 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIACtBFDVBYoAE4fpJCTANZSE0bcVpTR3uvfNvb80C4i5" > $HOME/.ssh/known_hosts
|
||||||
chmod 600 $HOME/.ssh/id_ed25519
|
chmod 600 $HOME/.ssh/id_ed25519
|
||||||
LOCKCTL=$(mktemp -d)
|
LOCKCTL=$(mktemp -d)
|
||||||
mkfifo $LOCKCTL/lockctl
|
mkfifo $LOCKCTL/lockctl
|
||||||
|
|
Loading…
Reference in New Issue