forked from M-Labs/artiq
Compare commits
No commits in common. "moninj_router_design" and "master" have entirely different histories.
moninj_rou
...
master
|
@ -29,7 +29,7 @@ Website: https://m-labs.hk/artiq
|
||||||
License
|
License
|
||||||
=======
|
=======
|
||||||
|
|
||||||
Copyright (C) 2014-2024 M-Labs Limited.
|
Copyright (C) 2014-2023 M-Labs Limited.
|
||||||
|
|
||||||
ARTIQ is free software: you can redistribute it and/or modify
|
ARTIQ is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU Lesser General Public License as published by
|
it under the terms of the GNU Lesser General Public License as published by
|
||||||
|
|
|
@ -39,19 +39,12 @@ Highlights:
|
||||||
- Hotkeys now organize experiment windows in the order they were last interacted with:
|
- Hotkeys now organize experiment windows in the order they were last interacted with:
|
||||||
+ CTRL+SHIFT+T tiles experiment windows
|
+ CTRL+SHIFT+T tiles experiment windows
|
||||||
+ CTRL+SHIFT+C cascades experiment windows
|
+ CTRL+SHIFT+C cascades experiment windows
|
||||||
- By enabling the ``quickstyle`` option, ``EnumerationValue`` entry widgets can now alternatively display
|
|
||||||
its choices as buttons that submit the experiment on click.
|
|
||||||
* Datasets can now be associated with units and scale factors, and displayed accordingly in the dashboard
|
|
||||||
including applets, like widgets such as ``NumberValue`` already did in earlier ARTIQ versions.
|
|
||||||
* Experiments can now request arguments interactively from the user at any time.
|
|
||||||
* Persistent datasets are now stored in a LMDB database for improved performance.
|
* Persistent datasets are now stored in a LMDB database for improved performance.
|
||||||
* Python's built-in types (such as ``float``, or ``List[...]``) can now be used in type annotations on
|
* Python's built-in types (such as ``float``, or ``List[...]``) can now be used in type annotations on
|
||||||
kernel functions.
|
kernel functions.
|
||||||
* Full Python 3.11 support.
|
* Full Python 3.10 support.
|
||||||
* MSYS2 packaging for Windows, which replaces Conda. Conda packages are still available to
|
* MSYS2 packaging for Windows, which replaces Conda. Conda packages are still available to
|
||||||
support legacy installations, but may be removed in a future release.
|
support legacy installations, but may be removed in a future release.
|
||||||
* Experiments can now be submitted with revisions set to a branch / tag name instead of only git hashes.
|
|
||||||
* Grabber image input now has an optional timeout.
|
|
||||||
|
|
||||||
Breaking changes:
|
Breaking changes:
|
||||||
|
|
||||||
|
|
|
@ -42,13 +42,13 @@ class _AppletRequestInterface:
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def set_argument_value(self, expurl, key, value):
|
def set_argument_value(self, expurl, name, value):
|
||||||
"""
|
"""
|
||||||
Temporarily set the value of an argument in a experiment in the dashboard.
|
Temporarily set the value of an argument in a experiment in the dashboard.
|
||||||
The value resets to default value when recomputing the argument.
|
The value resets to default value when recomputing the argument.
|
||||||
|
|
||||||
:param expurl: Experiment URL identifying the experiment in the dashboard. Example: 'repo:ArgumentsDemo'.
|
:param expurl: Experiment URL identifying the experiment in the dashboard. Example: 'repo:ArgumentsDemo'.
|
||||||
:param key: Name of the argument in the experiment.
|
:param name: Name of the argument in the experiment.
|
||||||
:param value: Object representing the new temporary value of the argument. For ``Scannable`` arguments, this parameter
|
:param value: Object representing the new temporary value of the argument. For ``Scannable`` arguments, this parameter
|
||||||
should be a ``ScanObject``. The type of the ``ScanObject`` will be set as the selected type when this function is called.
|
should be a ``ScanObject``. The type of the ``ScanObject`` will be set as the selected type when this function is called.
|
||||||
"""
|
"""
|
||||||
|
@ -77,10 +77,10 @@ class AppletRequestIPC(_AppletRequestInterface):
|
||||||
mod = {"action": "append", "path": [key, 1], "x": value}
|
mod = {"action": "append", "path": [key, 1], "x": value}
|
||||||
self.ipc.update_dataset(mod)
|
self.ipc.update_dataset(mod)
|
||||||
|
|
||||||
def set_argument_value(self, expurl, key, value):
|
def set_argument_value(self, expurl, name, value):
|
||||||
if isinstance(value, ScanObject):
|
if isinstance(value, ScanObject):
|
||||||
value = value.describe()
|
value = value.describe()
|
||||||
self.ipc.set_argument_value(expurl, key, value)
|
self.ipc.set_argument_value(expurl, name, value)
|
||||||
|
|
||||||
|
|
||||||
class AppletRequestRPC(_AppletRequestInterface):
|
class AppletRequestRPC(_AppletRequestInterface):
|
||||||
|
@ -182,10 +182,10 @@ class AppletIPCClient(AsyncioChildComm):
|
||||||
self.write_pyon({"action": "update_dataset",
|
self.write_pyon({"action": "update_dataset",
|
||||||
"mod": mod})
|
"mod": mod})
|
||||||
|
|
||||||
def set_argument_value(self, expurl, key, value):
|
def set_argument_value(self, expurl, name, value):
|
||||||
self.write_pyon({"action": "set_argument_value",
|
self.write_pyon({"action": "set_argument_value",
|
||||||
"expurl": expurl,
|
"expurl": expurl,
|
||||||
"key": key,
|
"name": name,
|
||||||
"value": value})
|
"value": value})
|
||||||
|
|
||||||
|
|
||||||
|
@ -255,7 +255,7 @@ class SimpleApplet:
|
||||||
if self.embed is None:
|
if self.embed is None:
|
||||||
dataset_ctl = RPCClient()
|
dataset_ctl = RPCClient()
|
||||||
self.loop.run_until_complete(dataset_ctl.connect_rpc(
|
self.loop.run_until_complete(dataset_ctl.connect_rpc(
|
||||||
self.args.server, self.args.port_control, "dataset_db"))
|
self.args.server, self.args.port_control, "master_dataset_db"))
|
||||||
self.req = AppletRequestRPC(self.loop, dataset_ctl)
|
self.req = AppletRequestRPC(self.loop, dataset_ctl)
|
||||||
else:
|
else:
|
||||||
self.req = AppletRequestIPC(self.ipc)
|
self.req = AppletRequestIPC(self.ipc)
|
||||||
|
|
|
@ -33,7 +33,7 @@ class DatasetCtl:
|
||||||
try:
|
try:
|
||||||
remote = RPCClient()
|
remote = RPCClient()
|
||||||
await remote.connect_rpc(self.master_host, self.master_port,
|
await remote.connect_rpc(self.master_host, self.master_port,
|
||||||
"dataset_db")
|
"master_dataset_db")
|
||||||
try:
|
try:
|
||||||
if op_name == "set":
|
if op_name == "set":
|
||||||
await remote.set(key_or_mod, value, persist, metadata)
|
await remote.set(key_or_mod, value, persist, metadata)
|
||||||
|
|
|
@ -10,26 +10,87 @@ import h5py
|
||||||
from sipyco import pyon
|
from sipyco import pyon
|
||||||
|
|
||||||
from artiq import __artiq_dir__ as artiq_dir
|
from artiq import __artiq_dir__ as artiq_dir
|
||||||
from artiq.gui.tools import (LayoutWidget, log_level_to_name, get_open_file_name)
|
from artiq.gui.tools import (LayoutWidget, WheelFilter,
|
||||||
from artiq.gui.entries import procdesc_to_entry, EntryTreeWidget
|
log_level_to_name, get_open_file_name)
|
||||||
|
from artiq.gui.entries import procdesc_to_entry
|
||||||
from artiq.master.worker import Worker, log_worker_exception
|
from artiq.master.worker import Worker, log_worker_exception
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class _ArgumentEditor(EntryTreeWidget):
|
class _ArgumentEditor(QtWidgets.QTreeWidget):
|
||||||
def __init__(self, dock):
|
def __init__(self, dock):
|
||||||
EntryTreeWidget.__init__(self)
|
QtWidgets.QTreeWidget.__init__(self)
|
||||||
|
self.setColumnCount(3)
|
||||||
|
self.header().setStretchLastSection(False)
|
||||||
|
try:
|
||||||
|
set_resize_mode = self.header().setSectionResizeMode
|
||||||
|
except AttributeError:
|
||||||
|
set_resize_mode = self.header().setResizeMode
|
||||||
|
set_resize_mode(0, QtWidgets.QHeaderView.ResizeToContents)
|
||||||
|
set_resize_mode(1, QtWidgets.QHeaderView.Stretch)
|
||||||
|
set_resize_mode(2, QtWidgets.QHeaderView.ResizeToContents)
|
||||||
|
self.header().setVisible(False)
|
||||||
|
self.setSelectionMode(self.NoSelection)
|
||||||
|
self.setHorizontalScrollMode(self.ScrollPerPixel)
|
||||||
|
self.setVerticalScrollMode(self.ScrollPerPixel)
|
||||||
|
|
||||||
|
self.setStyleSheet("QTreeWidget {background: " +
|
||||||
|
self.palette().midlight().color().name() + " ;}")
|
||||||
|
|
||||||
|
self.viewport().installEventFilter(WheelFilter(self.viewport(), True))
|
||||||
|
|
||||||
|
self._groups = dict()
|
||||||
|
self._arg_to_widgets = dict()
|
||||||
self._dock = dock
|
self._dock = dock
|
||||||
|
|
||||||
if not self._dock.arguments:
|
if not self._dock.arguments:
|
||||||
self.insertTopLevelItem(0, QtWidgets.QTreeWidgetItem(["No arguments"]))
|
self.addTopLevelItem(QtWidgets.QTreeWidgetItem(["No arguments"]))
|
||||||
|
gradient = QtGui.QLinearGradient(
|
||||||
|
0, 0, 0, QtGui.QFontMetrics(self.font()).lineSpacing()*2.5)
|
||||||
|
gradient.setColorAt(0, self.palette().base().color())
|
||||||
|
gradient.setColorAt(1, self.palette().midlight().color())
|
||||||
|
|
||||||
for name, argument in self._dock.arguments.items():
|
for name, argument in self._dock.arguments.items():
|
||||||
self.set_argument(name, argument)
|
widgets = dict()
|
||||||
|
self._arg_to_widgets[name] = widgets
|
||||||
|
|
||||||
self.quickStyleClicked.connect(self._dock._run_clicked)
|
entry = procdesc_to_entry(argument["desc"])(argument)
|
||||||
|
widget_item = QtWidgets.QTreeWidgetItem([name])
|
||||||
|
if argument["tooltip"]:
|
||||||
|
widget_item.setToolTip(0, argument["tooltip"])
|
||||||
|
widgets["entry"] = entry
|
||||||
|
widgets["widget_item"] = widget_item
|
||||||
|
|
||||||
|
for col in range(3):
|
||||||
|
widget_item.setBackground(col, gradient)
|
||||||
|
font = widget_item.font(0)
|
||||||
|
font.setBold(True)
|
||||||
|
widget_item.setFont(0, font)
|
||||||
|
|
||||||
|
if argument["group"] is None:
|
||||||
|
self.addTopLevelItem(widget_item)
|
||||||
|
else:
|
||||||
|
self._get_group(argument["group"]).addChild(widget_item)
|
||||||
|
fix_layout = LayoutWidget()
|
||||||
|
widgets["fix_layout"] = fix_layout
|
||||||
|
fix_layout.addWidget(entry)
|
||||||
|
self.setItemWidget(widget_item, 1, fix_layout)
|
||||||
|
|
||||||
|
recompute_argument = QtWidgets.QToolButton()
|
||||||
|
recompute_argument.setToolTip("Re-run the experiment's build "
|
||||||
|
"method and take the default value")
|
||||||
|
recompute_argument.setIcon(
|
||||||
|
QtWidgets.QApplication.style().standardIcon(
|
||||||
|
QtWidgets.QStyle.SP_BrowserReload))
|
||||||
|
recompute_argument.clicked.connect(
|
||||||
|
partial(self._recompute_argument_clicked, name))
|
||||||
|
fix_layout = LayoutWidget()
|
||||||
|
fix_layout.addWidget(recompute_argument)
|
||||||
|
self.setItemWidget(widget_item, 2, fix_layout)
|
||||||
|
|
||||||
|
widget_item = QtWidgets.QTreeWidgetItem()
|
||||||
|
self.addTopLevelItem(widget_item)
|
||||||
recompute_arguments = QtWidgets.QPushButton("Recompute all arguments")
|
recompute_arguments = QtWidgets.QPushButton("Recompute all arguments")
|
||||||
recompute_arguments.setIcon(
|
recompute_arguments.setIcon(
|
||||||
QtWidgets.QApplication.style().standardIcon(
|
QtWidgets.QApplication.style().standardIcon(
|
||||||
|
@ -39,7 +100,7 @@ class _ArgumentEditor(EntryTreeWidget):
|
||||||
load = QtWidgets.QPushButton("Set arguments from HDF5")
|
load = QtWidgets.QPushButton("Set arguments from HDF5")
|
||||||
load.setToolTip("Set arguments from currently selected HDF5 file")
|
load.setToolTip("Set arguments from currently selected HDF5 file")
|
||||||
load.setIcon(QtWidgets.QApplication.style().standardIcon(
|
load.setIcon(QtWidgets.QApplication.style().standardIcon(
|
||||||
QtWidgets.QStyle.SP_DialogApplyButton))
|
QtWidgets.QStyle.SP_DialogApplyButton))
|
||||||
load.clicked.connect(self._load_clicked)
|
load.clicked.connect(self._load_clicked)
|
||||||
|
|
||||||
buttons = LayoutWidget()
|
buttons = LayoutWidget()
|
||||||
|
@ -47,7 +108,21 @@ class _ArgumentEditor(EntryTreeWidget):
|
||||||
buttons.addWidget(load, 1, 2)
|
buttons.addWidget(load, 1, 2)
|
||||||
for i, s in enumerate((1, 0, 0, 1)):
|
for i, s in enumerate((1, 0, 0, 1)):
|
||||||
buttons.layout.setColumnStretch(i, s)
|
buttons.layout.setColumnStretch(i, s)
|
||||||
self.setItemWidget(self.bottom_item, 1, buttons)
|
self.setItemWidget(widget_item, 1, buttons)
|
||||||
|
|
||||||
|
def _get_group(self, name):
|
||||||
|
if name in self._groups:
|
||||||
|
return self._groups[name]
|
||||||
|
group = QtWidgets.QTreeWidgetItem([name])
|
||||||
|
for col in range(3):
|
||||||
|
group.setBackground(col, self.palette().mid())
|
||||||
|
group.setForeground(col, self.palette().brightText())
|
||||||
|
font = group.font(col)
|
||||||
|
font.setBold(True)
|
||||||
|
group.setFont(col, font)
|
||||||
|
self.addTopLevelItem(group)
|
||||||
|
self._groups[name] = group
|
||||||
|
return group
|
||||||
|
|
||||||
def _load_clicked(self):
|
def _load_clicked(self):
|
||||||
asyncio.ensure_future(self._dock.load_hdf5_task())
|
asyncio.ensure_future(self._dock.load_hdf5_task())
|
||||||
|
@ -55,8 +130,8 @@ class _ArgumentEditor(EntryTreeWidget):
|
||||||
def _recompute_arguments_clicked(self):
|
def _recompute_arguments_clicked(self):
|
||||||
asyncio.ensure_future(self._dock._recompute_arguments())
|
asyncio.ensure_future(self._dock._recompute_arguments())
|
||||||
|
|
||||||
def reset_entry(self, key):
|
def _recompute_argument_clicked(self, name):
|
||||||
asyncio.ensure_future(self._recompute_argument(key))
|
asyncio.ensure_future(self._recompute_argument(name))
|
||||||
|
|
||||||
async def _recompute_argument(self, name):
|
async def _recompute_argument(self, name):
|
||||||
try:
|
try:
|
||||||
|
@ -71,7 +146,29 @@ class _ArgumentEditor(EntryTreeWidget):
|
||||||
state = procdesc_to_entry(procdesc).default_state(procdesc)
|
state = procdesc_to_entry(procdesc).default_state(procdesc)
|
||||||
argument["desc"] = procdesc
|
argument["desc"] = procdesc
|
||||||
argument["state"] = state
|
argument["state"] = state
|
||||||
self.update_argument(name, argument)
|
|
||||||
|
widgets = self._arg_to_widgets[name]
|
||||||
|
|
||||||
|
widgets["entry"].deleteLater()
|
||||||
|
widgets["entry"] = procdesc_to_entry(procdesc)(argument)
|
||||||
|
widgets["fix_layout"] = LayoutWidget()
|
||||||
|
widgets["fix_layout"].addWidget(widgets["entry"])
|
||||||
|
self.setItemWidget(widgets["widget_item"], 1, widgets["fix_layout"])
|
||||||
|
self.updateGeometries()
|
||||||
|
|
||||||
|
def save_state(self):
|
||||||
|
expanded = []
|
||||||
|
for k, v in self._groups.items():
|
||||||
|
if v.isExpanded():
|
||||||
|
expanded.append(k)
|
||||||
|
return {"expanded": expanded}
|
||||||
|
|
||||||
|
def restore_state(self, state):
|
||||||
|
for e in state["expanded"]:
|
||||||
|
try:
|
||||||
|
self._groups[e].setExpanded(True)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
||||||
|
@ -180,8 +277,8 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
state = self.argeditor.save_state()
|
state = self.argeditor.save_state()
|
||||||
self.argeditor.deleteLater()
|
self.argeditor.deleteLater()
|
||||||
self.argeditor = _ArgumentEditor(self)
|
self.argeditor = _ArgumentEditor(self)
|
||||||
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
|
|
||||||
self.argeditor.restore_state(state)
|
self.argeditor.restore_state(state)
|
||||||
|
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
|
||||||
|
|
||||||
async def load_hdf5_task(self, filename=None):
|
async def load_hdf5_task(self, filename=None):
|
||||||
if filename is None:
|
if filename is None:
|
||||||
|
@ -369,8 +466,6 @@ class ExperimentsArea(QtWidgets.QMdiArea):
|
||||||
def initialize_submission_arguments(self, arginfo):
|
def initialize_submission_arguments(self, arginfo):
|
||||||
arguments = OrderedDict()
|
arguments = OrderedDict()
|
||||||
for name, (procdesc, group, tooltip) in arginfo.items():
|
for name, (procdesc, group, tooltip) in arginfo.items():
|
||||||
if procdesc["ty"] == "EnumerationValue" and procdesc["quickstyle"]:
|
|
||||||
procdesc["quickstyle"] = False
|
|
||||||
state = procdesc_to_entry(procdesc).default_state(procdesc)
|
state = procdesc_to_entry(procdesc).default_state(procdesc)
|
||||||
arguments[name] = {
|
arguments[name] = {
|
||||||
"desc": procdesc,
|
"desc": procdesc,
|
||||||
|
|
|
@ -253,12 +253,6 @@ def fn_subkernel_await():
|
||||||
def fn_subkernel_preload():
|
def fn_subkernel_preload():
|
||||||
return types.TBuiltinFunction("subkernel_preload")
|
return types.TBuiltinFunction("subkernel_preload")
|
||||||
|
|
||||||
def fn_subkernel_send():
|
|
||||||
return types.TBuiltinFunction("subkernel_send")
|
|
||||||
|
|
||||||
def fn_subkernel_recv():
|
|
||||||
return types.TBuiltinFunction("subkernel_recv")
|
|
||||||
|
|
||||||
# Accessors
|
# Accessors
|
||||||
|
|
||||||
def is_none(typ):
|
def is_none(typ):
|
||||||
|
|
|
@ -47,15 +47,8 @@ class SpecializedFunction:
|
||||||
return hash((self.instance_type, self.host_function))
|
return hash((self.instance_type, self.host_function))
|
||||||
|
|
||||||
|
|
||||||
class SubkernelMessageType:
|
|
||||||
def __init__(self, name, value_type):
|
|
||||||
self.name = name
|
|
||||||
self.value_type = value_type
|
|
||||||
self.send_loc = None
|
|
||||||
self.recv_loc = None
|
|
||||||
|
|
||||||
class EmbeddingMap:
|
class EmbeddingMap:
|
||||||
def __init__(self, old_embedding_map=None):
|
def __init__(self):
|
||||||
self.object_current_key = 0
|
self.object_current_key = 0
|
||||||
self.object_forward_map = {}
|
self.object_forward_map = {}
|
||||||
self.object_reverse_map = {}
|
self.object_reverse_map = {}
|
||||||
|
@ -72,22 +65,6 @@ class EmbeddingMap:
|
||||||
self.str_forward_map = {}
|
self.str_forward_map = {}
|
||||||
self.str_reverse_map = {}
|
self.str_reverse_map = {}
|
||||||
|
|
||||||
# mapping `name` to object ID
|
|
||||||
self.subkernel_message_map = {}
|
|
||||||
|
|
||||||
# subkernels: dict of ID: function, just like object_forward_map
|
|
||||||
# allow the embedding map to be aware of subkernels from other kernels
|
|
||||||
if not old_embedding_map is None:
|
|
||||||
for key, obj_ref in old_embedding_map.subkernels().items():
|
|
||||||
self.object_forward_map[key] = obj_ref
|
|
||||||
obj_id = id(obj_ref)
|
|
||||||
self.object_reverse_map[obj_id] = key
|
|
||||||
for msg_id, msg_type in old_embedding_map.subkernel_messages().items():
|
|
||||||
self.object_forward_map[msg_id] = msg_type
|
|
||||||
obj_id = id(msg_type)
|
|
||||||
self.subkernel_message_map[msg_type.name] = msg_id
|
|
||||||
self.object_reverse_map[obj_id] = msg_id
|
|
||||||
|
|
||||||
self.preallocate_runtime_exception_names(["RuntimeError",
|
self.preallocate_runtime_exception_names(["RuntimeError",
|
||||||
"RTIOUnderflow",
|
"RTIOUnderflow",
|
||||||
"RTIOOverflow",
|
"RTIOOverflow",
|
||||||
|
@ -188,11 +165,6 @@ class EmbeddingMap:
|
||||||
return self.object_reverse_map[obj_id]
|
return self.object_reverse_map[obj_id]
|
||||||
|
|
||||||
self.object_current_key += 1
|
self.object_current_key += 1
|
||||||
while self.object_forward_map.get(self.object_current_key):
|
|
||||||
# make sure there's no collisions with previously inserted subkernels
|
|
||||||
# their identifiers must be consistent across all kernels/subkernels
|
|
||||||
self.object_current_key += 1
|
|
||||||
|
|
||||||
self.object_forward_map[self.object_current_key] = obj_ref
|
self.object_forward_map[self.object_current_key] = obj_ref
|
||||||
self.object_reverse_map[obj_id] = self.object_current_key
|
self.object_reverse_map[obj_id] = self.object_current_key
|
||||||
return self.object_current_key
|
return self.object_current_key
|
||||||
|
@ -205,7 +177,7 @@ class EmbeddingMap:
|
||||||
obj_ref = self.object_forward_map[obj_id]
|
obj_ref = self.object_forward_map[obj_id]
|
||||||
if isinstance(obj_ref, (pytypes.FunctionType, pytypes.MethodType,
|
if isinstance(obj_ref, (pytypes.FunctionType, pytypes.MethodType,
|
||||||
pytypes.BuiltinFunctionType, pytypes.ModuleType,
|
pytypes.BuiltinFunctionType, pytypes.ModuleType,
|
||||||
SpecializedFunction, SubkernelMessageType)):
|
SpecializedFunction)):
|
||||||
continue
|
continue
|
||||||
elif isinstance(obj_ref, type):
|
elif isinstance(obj_ref, type):
|
||||||
_, obj_typ = self.type_map[obj_ref]
|
_, obj_typ = self.type_map[obj_ref]
|
||||||
|
@ -221,35 +193,6 @@ class EmbeddingMap:
|
||||||
subkernels[k] = v
|
subkernels[k] = v
|
||||||
return subkernels
|
return subkernels
|
||||||
|
|
||||||
def store_subkernel_message(self, name, value_type, function_type, function_loc):
|
|
||||||
if name in self.subkernel_message_map:
|
|
||||||
msg_id = self.subkernel_message_map[name]
|
|
||||||
else:
|
|
||||||
msg_id = self.store_object(SubkernelMessageType(name, value_type))
|
|
||||||
self.subkernel_message_map[name] = msg_id
|
|
||||||
subkernel_msg = self.retrieve_object(msg_id)
|
|
||||||
if function_type == "send":
|
|
||||||
subkernel_msg.send_loc = function_loc
|
|
||||||
elif function_type == "recv":
|
|
||||||
subkernel_msg.recv_loc = function_loc
|
|
||||||
else:
|
|
||||||
assert False
|
|
||||||
return msg_id, subkernel_msg
|
|
||||||
|
|
||||||
def subkernel_messages(self):
|
|
||||||
messages = {}
|
|
||||||
for msg_id in self.subkernel_message_map.values():
|
|
||||||
messages[msg_id] = self.retrieve_object(msg_id)
|
|
||||||
return messages
|
|
||||||
|
|
||||||
def subkernel_messages_unpaired(self):
|
|
||||||
unpaired = []
|
|
||||||
for msg_id in self.subkernel_message_map.values():
|
|
||||||
msg_obj = self.retrieve_object(msg_id)
|
|
||||||
if msg_obj.send_loc is None or msg_obj.recv_loc is None:
|
|
||||||
unpaired.append(msg_obj)
|
|
||||||
return unpaired
|
|
||||||
|
|
||||||
def has_rpc(self):
|
def has_rpc(self):
|
||||||
return any(filter(
|
return any(filter(
|
||||||
lambda x: (inspect.isfunction(x) or inspect.ismethod(x)) and \
|
lambda x: (inspect.isfunction(x) or inspect.ismethod(x)) and \
|
||||||
|
@ -257,6 +200,10 @@ class EmbeddingMap:
|
||||||
self.object_forward_map.values()
|
self.object_forward_map.values()
|
||||||
))
|
))
|
||||||
|
|
||||||
|
def has_rpc_or_subkernel(self):
|
||||||
|
return any(filter(lambda x: inspect.isfunction(x) or inspect.ismethod(x),
|
||||||
|
self.object_forward_map.values()))
|
||||||
|
|
||||||
|
|
||||||
class ASTSynthesizer:
|
class ASTSynthesizer:
|
||||||
def __init__(self, embedding_map, value_map, quote_function=None, expanded_from=None):
|
def __init__(self, embedding_map, value_map, quote_function=None, expanded_from=None):
|
||||||
|
@ -847,7 +794,7 @@ class TypedtreeHasher(algorithm.Visitor):
|
||||||
return hash(tuple(freeze(getattr(node, field_name)) for field_name in fields))
|
return hash(tuple(freeze(getattr(node, field_name)) for field_name in fields))
|
||||||
|
|
||||||
class Stitcher:
|
class Stitcher:
|
||||||
def __init__(self, core, dmgr, engine=None, print_as_rpc=True, destination=0, subkernel_arg_types=[], old_embedding_map=None):
|
def __init__(self, core, dmgr, engine=None, print_as_rpc=True, destination=0, subkernel_arg_types=[]):
|
||||||
self.core = core
|
self.core = core
|
||||||
self.dmgr = dmgr
|
self.dmgr = dmgr
|
||||||
if engine is None:
|
if engine is None:
|
||||||
|
@ -869,7 +816,7 @@ class Stitcher:
|
||||||
|
|
||||||
self.functions = {}
|
self.functions = {}
|
||||||
|
|
||||||
self.embedding_map = EmbeddingMap(old_embedding_map)
|
self.embedding_map = EmbeddingMap()
|
||||||
self.value_map = defaultdict(lambda: [])
|
self.value_map = defaultdict(lambda: [])
|
||||||
self.definitely_changed = False
|
self.definitely_changed = False
|
||||||
|
|
||||||
|
|
|
@ -59,6 +59,4 @@ def globals():
|
||||||
# ARTIQ subkernel utility functions
|
# ARTIQ subkernel utility functions
|
||||||
"subkernel_await": builtins.fn_subkernel_await(),
|
"subkernel_await": builtins.fn_subkernel_await(),
|
||||||
"subkernel_preload": builtins.fn_subkernel_preload(),
|
"subkernel_preload": builtins.fn_subkernel_preload(),
|
||||||
"subkernel_send": builtins.fn_subkernel_send(),
|
|
||||||
"subkernel_recv": builtins.fn_subkernel_recv(),
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2537,7 +2537,7 @@ class ARTIQIRGenerator(algorithm.Visitor):
|
||||||
timeout = self.visit(node.args[1])
|
timeout = self.visit(node.args[1])
|
||||||
elif len(node.args) == 1 and len(node.keywords) == 0:
|
elif len(node.args) == 1 and len(node.keywords) == 0:
|
||||||
fn = node.args[0].type
|
fn = node.args[0].type
|
||||||
timeout = ir.Constant(-1, builtins.TInt64())
|
timeout = ir.Constant(10_000, builtins.TInt64())
|
||||||
else:
|
else:
|
||||||
assert False
|
assert False
|
||||||
if types.is_method(fn):
|
if types.is_method(fn):
|
||||||
|
@ -2557,44 +2557,7 @@ class ARTIQIRGenerator(algorithm.Visitor):
|
||||||
if types.is_method(fn):
|
if types.is_method(fn):
|
||||||
fn = types.get_method_function(fn)
|
fn = types.get_method_function(fn)
|
||||||
sid = ir.Constant(fn.sid, builtins.TInt32())
|
sid = ir.Constant(fn.sid, builtins.TInt32())
|
||||||
dest = ir.Constant(fn.destination, builtins.TInt32())
|
return self.append(ir.Builtin("subkernel_preload", [sid], builtins.TNone()))
|
||||||
return self.append(ir.Builtin("subkernel_preload", [sid, dest], builtins.TNone()))
|
|
||||||
elif types.is_builtin(typ, "subkernel_send"):
|
|
||||||
if len(node.args) == 3 and len(node.keywords) == 0:
|
|
||||||
dest = self.visit(node.args[0])
|
|
||||||
name = node.args[1].s
|
|
||||||
value = self.visit(node.args[2])
|
|
||||||
else:
|
|
||||||
assert False
|
|
||||||
msg_id, msg = self.embedding_map.store_subkernel_message(name, value.type, "send", node.loc)
|
|
||||||
msg_id = ir.Constant(msg_id, builtins.TInt32())
|
|
||||||
if value.type != msg.value_type:
|
|
||||||
diag = diagnostic.Diagnostic("error",
|
|
||||||
"type mismatch for subkernel message '{name}', receiver expects {recv} while sending {send}",
|
|
||||||
{"name": name, "recv": msg.value_type, "send": value.type},
|
|
||||||
node.loc)
|
|
||||||
self.engine.process(diag)
|
|
||||||
return self.append(ir.Builtin("subkernel_send", [msg_id, dest, value], builtins.TNone()))
|
|
||||||
elif types.is_builtin(typ, "subkernel_recv"):
|
|
||||||
if len(node.args) == 2 and len(node.keywords) == 0:
|
|
||||||
name = node.args[0].s
|
|
||||||
vartype = node.args[1].value
|
|
||||||
timeout = ir.Constant(-1, builtins.TInt64())
|
|
||||||
elif len(node.args) == 3 and len(node.keywords) == 0:
|
|
||||||
name = node.args[0].s
|
|
||||||
vartype = node.args[1].value
|
|
||||||
timeout = self.visit(node.args[2])
|
|
||||||
else:
|
|
||||||
assert False
|
|
||||||
msg_id, msg = self.embedding_map.store_subkernel_message(name, vartype, "recv", node.loc)
|
|
||||||
msg_id = ir.Constant(msg_id, builtins.TInt32())
|
|
||||||
if vartype != msg.value_type:
|
|
||||||
diag = diagnostic.Diagnostic("error",
|
|
||||||
"type mismatch for subkernel message '{name}', receiver expects {recv} while sending {send}",
|
|
||||||
{"name": name, "recv": vartype, "send": msg.value_type},
|
|
||||||
node.loc)
|
|
||||||
self.engine.process(diag)
|
|
||||||
return self.append(ir.Builtin("subkernel_recv", [msg_id, timeout], vartype))
|
|
||||||
elif types.is_exn_constructor(typ):
|
elif types.is_exn_constructor(typ):
|
||||||
return self.alloc_exn(node.type, *[self.visit(arg_node) for arg_node in node.args])
|
return self.alloc_exn(node.type, *[self.visit(arg_node) for arg_node in node.args])
|
||||||
elif types.is_constructor(typ):
|
elif types.is_constructor(typ):
|
||||||
|
|
|
@ -1343,57 +1343,6 @@ class Inferencer(algorithm.Visitor):
|
||||||
node.loc, None)
|
node.loc, None)
|
||||||
else:
|
else:
|
||||||
diagnose(valid_forms())
|
diagnose(valid_forms())
|
||||||
elif types.is_builtin(typ, "subkernel_send"):
|
|
||||||
valid_forms = lambda: [
|
|
||||||
valid_form("subkernel_send(dest: numpy.int?, name: str, value: V) -> None"),
|
|
||||||
]
|
|
||||||
self._unify(node.type, builtins.TNone(),
|
|
||||||
node.loc, None)
|
|
||||||
if len(node.args) == 3:
|
|
||||||
arg0 = node.args[0]
|
|
||||||
if types.is_var(arg0.type):
|
|
||||||
pass # undetermined yet
|
|
||||||
else:
|
|
||||||
if builtins.is_int(arg0.type):
|
|
||||||
self._unify(arg0.type, builtins.TInt8(),
|
|
||||||
arg0.loc, None)
|
|
||||||
else:
|
|
||||||
diagnose(valid_forms())
|
|
||||||
arg1 = node.args[1]
|
|
||||||
self._unify(arg1.type, builtins.TStr(),
|
|
||||||
arg1.loc, None)
|
|
||||||
else:
|
|
||||||
diagnose(valid_forms())
|
|
||||||
elif types.is_builtin(typ, "subkernel_recv"):
|
|
||||||
valid_forms = lambda: [
|
|
||||||
valid_form("subkernel_recv(name: str, value_type: type) -> value_type"),
|
|
||||||
valid_form("subkernel_recv(name: str, value_type: type, timeout: numpy.int64) -> value_type"),
|
|
||||||
]
|
|
||||||
if 2 <= len(node.args) <= 3:
|
|
||||||
arg0 = node.args[0]
|
|
||||||
if types.is_var(arg0.type):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self._unify(arg0.type, builtins.TStr(),
|
|
||||||
arg0.loc, None)
|
|
||||||
arg1 = node.args[1]
|
|
||||||
if types.is_var(arg1.type):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self._unify(node.type, arg1.value,
|
|
||||||
node.loc, None)
|
|
||||||
if len(node.args) == 3:
|
|
||||||
arg2 = node.args[2]
|
|
||||||
if types.is_var(arg2.type):
|
|
||||||
pass
|
|
||||||
elif builtins.is_int(arg2.type):
|
|
||||||
# promote to TInt64
|
|
||||||
self._unify(arg2.type, builtins.TInt64(),
|
|
||||||
arg2.loc, None)
|
|
||||||
else:
|
|
||||||
diagnose(valid_forms())
|
|
||||||
else:
|
|
||||||
diagnose(valid_forms())
|
|
||||||
else:
|
else:
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
|
|
|
@ -399,9 +399,9 @@ class LLVMIRGenerator:
|
||||||
llty = ll.FunctionType(lli32, [llptr])
|
llty = ll.FunctionType(lli32, [llptr])
|
||||||
|
|
||||||
elif name == "subkernel_send_message":
|
elif name == "subkernel_send_message":
|
||||||
llty = ll.FunctionType(llvoid, [lli32, lli1, lli8, lli8, llsliceptr, llptrptr])
|
llty = ll.FunctionType(llvoid, [lli32, lli8, llsliceptr, llptrptr])
|
||||||
elif name == "subkernel_load_run":
|
elif name == "subkernel_load_run":
|
||||||
llty = ll.FunctionType(llvoid, [lli32, lli8, lli1])
|
llty = ll.FunctionType(llvoid, [lli32, lli1])
|
||||||
elif name == "subkernel_await_finish":
|
elif name == "subkernel_await_finish":
|
||||||
llty = ll.FunctionType(llvoid, [lli32, lli64])
|
llty = ll.FunctionType(llvoid, [lli32, lli64])
|
||||||
elif name == "subkernel_await_message":
|
elif name == "subkernel_await_message":
|
||||||
|
@ -1417,23 +1417,8 @@ class LLVMIRGenerator:
|
||||||
return self._build_rpc_recv(insn.type, llstackptr)
|
return self._build_rpc_recv(insn.type, llstackptr)
|
||||||
elif insn.op == "subkernel_preload":
|
elif insn.op == "subkernel_preload":
|
||||||
llsid = self.map(insn.operands[0])
|
llsid = self.map(insn.operands[0])
|
||||||
lldest = ll.Constant(lli8, insn.operands[1].value)
|
return self.llbuilder.call(self.llbuiltin("subkernel_load_run"), [llsid, ll.Constant(lli1, 0)],
|
||||||
return self.llbuilder.call(self.llbuiltin("subkernel_load_run"), [llsid, lldest, ll.Constant(lli1, 0)],
|
|
||||||
name="subkernel.preload")
|
name="subkernel.preload")
|
||||||
elif insn.op == "subkernel_send":
|
|
||||||
llmsgid = self.map(insn.operands[0])
|
|
||||||
lldest = self.map(insn.operands[1])
|
|
||||||
return self._build_subkernel_message(llmsgid, lldest, [insn.operands[2]])
|
|
||||||
elif insn.op == "subkernel_recv":
|
|
||||||
llmsgid = self.map(insn.operands[0])
|
|
||||||
lltimeout = self.map(insn.operands[1])
|
|
||||||
lltagptr = self._build_subkernel_tags([insn.type])
|
|
||||||
self.llbuilder.call(self.llbuiltin("subkernel_await_message"),
|
|
||||||
[llmsgid, lltimeout, lltagptr, ll.Constant(lli8, 1), ll.Constant(lli8, 1)],
|
|
||||||
name="subkernel.await.message")
|
|
||||||
llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), [],
|
|
||||||
name="subkernel.arg.stack")
|
|
||||||
return self._build_rpc_recv(insn.type, llstackptr)
|
|
||||||
else:
|
else:
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
|
@ -1442,7 +1427,7 @@ class LLVMIRGenerator:
|
||||||
llmax = self.map(insn.operands[1])
|
llmax = self.map(insn.operands[1])
|
||||||
lltagptr = self._build_subkernel_tags(insn.arg_types)
|
lltagptr = self._build_subkernel_tags(insn.arg_types)
|
||||||
return self.llbuilder.call(self.llbuiltin("subkernel_await_message"),
|
return self.llbuilder.call(self.llbuiltin("subkernel_await_message"),
|
||||||
[ll.Constant(lli32, -1), ll.Constant(lli64, 10_000), lltagptr, llmin, llmax],
|
[ll.Constant(lli32, 0), ll.Constant(lli64, 10_000), lltagptr, llmin, llmax],
|
||||||
name="subkernel.await.args")
|
name="subkernel.await.args")
|
||||||
|
|
||||||
def process_Closure(self, insn):
|
def process_Closure(self, insn):
|
||||||
|
@ -1594,8 +1579,11 @@ class LLVMIRGenerator:
|
||||||
self.llbuilder.branch(llnormalblock)
|
self.llbuilder.branch(llnormalblock)
|
||||||
return llret
|
return llret
|
||||||
|
|
||||||
def _build_arg_tag(self, args, call_type):
|
def _build_rpc(self, fun_loc, fun_type, args, llnormalblock, llunwindblock):
|
||||||
|
llservice = ll.Constant(lli32, fun_type.service)
|
||||||
|
|
||||||
tag = b""
|
tag = b""
|
||||||
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
def arg_error_handler(typ):
|
def arg_error_handler(typ):
|
||||||
printer = types.TypePrinter()
|
printer = types.TypePrinter()
|
||||||
|
@ -1604,18 +1592,12 @@ class LLVMIRGenerator:
|
||||||
{"type": printer.name(typ)},
|
{"type": printer.name(typ)},
|
||||||
arg.loc)
|
arg.loc)
|
||||||
diag = diagnostic.Diagnostic("error",
|
diag = diagnostic.Diagnostic("error",
|
||||||
"type {type} is not supported in {call_type} calls",
|
"type {type} is not supported in remote procedure calls",
|
||||||
{"type": printer.name(arg.type), "call_type": call_type},
|
{"type": printer.name(arg.type)},
|
||||||
arg.loc, notes=[note])
|
arg.loc, notes=[note])
|
||||||
self.engine.process(diag)
|
self.engine.process(diag)
|
||||||
tag += ir.rpc_tag(arg.type, arg_error_handler)
|
tag += ir.rpc_tag(arg.type, arg_error_handler)
|
||||||
tag += b":"
|
tag += b":"
|
||||||
return tag
|
|
||||||
|
|
||||||
def _build_rpc(self, fun_loc, fun_type, args, llnormalblock, llunwindblock):
|
|
||||||
llservice = ll.Constant(lli32, fun_type.service)
|
|
||||||
|
|
||||||
tag = self._build_arg_tag(args, call_type="remote procedure")
|
|
||||||
|
|
||||||
def ret_error_handler(typ):
|
def ret_error_handler(typ):
|
||||||
printer = types.TypePrinter()
|
printer = types.TypePrinter()
|
||||||
|
@ -1678,48 +1660,59 @@ class LLVMIRGenerator:
|
||||||
|
|
||||||
def _build_subkernel_call(self, fun_loc, fun_type, args):
|
def _build_subkernel_call(self, fun_loc, fun_type, args):
|
||||||
llsid = ll.Constant(lli32, fun_type.sid)
|
llsid = ll.Constant(lli32, fun_type.sid)
|
||||||
lldest = ll.Constant(lli8, fun_type.destination)
|
tag = b""
|
||||||
|
|
||||||
|
for arg in args:
|
||||||
|
def arg_error_handler(typ):
|
||||||
|
printer = types.TypePrinter()
|
||||||
|
note = diagnostic.Diagnostic("note",
|
||||||
|
"value of type {type}",
|
||||||
|
{"type": printer.name(typ)},
|
||||||
|
arg.loc)
|
||||||
|
diag = diagnostic.Diagnostic("error",
|
||||||
|
"type {type} is not supported in subkernel calls",
|
||||||
|
{"type": printer.name(arg.type)},
|
||||||
|
arg.loc, notes=[note])
|
||||||
|
self.engine.process(diag)
|
||||||
|
tag += ir.rpc_tag(arg.type, arg_error_handler)
|
||||||
|
tag += b":"
|
||||||
|
|
||||||
# run the kernel first
|
# run the kernel first
|
||||||
self.llbuilder.call(self.llbuiltin("subkernel_load_run"), [llsid, lldest, ll.Constant(lli1, 1)])
|
self.llbuilder.call(self.llbuiltin("subkernel_load_run"), [llsid, ll.Constant(lli1, 1)])
|
||||||
|
|
||||||
if args:
|
|
||||||
# only send args if there's anything to send, 'self' is excluded
|
|
||||||
self._build_subkernel_message(llsid, lldest, args)
|
|
||||||
|
|
||||||
return llsid
|
|
||||||
|
|
||||||
def _build_subkernel_message(self, llid, lldest, args):
|
|
||||||
# args (or messages) are sent in the same vein as RPC
|
|
||||||
tag = self._build_arg_tag(args, call_type="subkernel")
|
|
||||||
|
|
||||||
|
# arg sent in the same vein as RPC
|
||||||
llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), [],
|
llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), [],
|
||||||
name="subkernel.stack")
|
name="subkernel.stack")
|
||||||
|
|
||||||
lltag = self.llconst_of_const(ir.Constant(tag, builtins.TStr()))
|
lltag = self.llconst_of_const(ir.Constant(tag, builtins.TStr()))
|
||||||
lltagptr = self.llbuilder.alloca(lltag.type)
|
lltagptr = self.llbuilder.alloca(lltag.type)
|
||||||
self.llbuilder.store(lltag, lltagptr)
|
self.llbuilder.store(lltag, lltagptr)
|
||||||
|
|
||||||
llargs = self.llbuilder.alloca(llptr, ll.Constant(lli32, len(args)),
|
if args:
|
||||||
name="subkernel.args")
|
# only send args if there's anything to send, 'self' is excluded
|
||||||
for index, arg in enumerate(args):
|
llargs = self.llbuilder.alloca(llptr, ll.Constant(lli32, len(args)),
|
||||||
if builtins.is_none(arg.type):
|
name="subkernel.args")
|
||||||
llargslot = self.llbuilder.alloca(llunit,
|
for index, arg in enumerate(args):
|
||||||
name="subkernel.arg{}".format(index))
|
if builtins.is_none(arg.type):
|
||||||
else:
|
llargslot = self.llbuilder.alloca(llunit,
|
||||||
llarg = self.map(arg)
|
name="subkernel.arg{}".format(index))
|
||||||
llargslot = self.llbuilder.alloca(llarg.type,
|
else:
|
||||||
name="subkernel.arg{}".format(index))
|
llarg = self.map(arg)
|
||||||
self.llbuilder.store(llarg, llargslot)
|
llargslot = self.llbuilder.alloca(llarg.type,
|
||||||
llargslot = self.llbuilder.bitcast(llargslot, llptr)
|
name="subkernel.arg{}".format(index))
|
||||||
|
self.llbuilder.store(llarg, llargslot)
|
||||||
|
llargslot = self.llbuilder.bitcast(llargslot, llptr)
|
||||||
|
|
||||||
llargptr = self.llbuilder.gep(llargs, [ll.Constant(lli32, index)])
|
llargptr = self.llbuilder.gep(llargs, [ll.Constant(lli32, index)])
|
||||||
self.llbuilder.store(llargslot, llargptr)
|
self.llbuilder.store(llargslot, llargptr)
|
||||||
|
|
||||||
llargcount = ll.Constant(lli8, len(args))
|
llargcount = ll.Constant(lli8, len(args))
|
||||||
|
|
||||||
llisreturn = ll.Constant(lli1, False)
|
self.llbuilder.call(self.llbuiltin("subkernel_send_message"),
|
||||||
self.llbuilder.call(self.llbuiltin("subkernel_send_message"),
|
[llsid, llargcount, lltagptr, llargs])
|
||||||
[llid, llisreturn, lldest, llargcount, lltagptr, llargs])
|
self.llbuilder.call(self.llbuiltin("llvm.stackrestore"), [llstackptr])
|
||||||
return self.llbuilder.call(self.llbuiltin("llvm.stackrestore"), [llstackptr])
|
|
||||||
|
return llsid
|
||||||
|
|
||||||
def _build_subkernel_return(self, insn):
|
def _build_subkernel_return(self, insn):
|
||||||
# builds a remote return.
|
# builds a remote return.
|
||||||
|
@ -1753,12 +1746,10 @@ class LLVMIRGenerator:
|
||||||
llretslot = self.llbuilder.bitcast(llretslot, llptr)
|
llretslot = self.llbuilder.bitcast(llretslot, llptr)
|
||||||
self.llbuilder.store(llretslot, llrets)
|
self.llbuilder.store(llretslot, llrets)
|
||||||
|
|
||||||
llsid = ll.Constant(lli32, 0) # return goes back to the caller, sid is ignored
|
llsid = ll.Constant(lli32, 0) # return goes back to master, sid is ignored
|
||||||
lltagcount = ll.Constant(lli8, 1) # only one thing is returned
|
lltagcount = ll.Constant(lli8, 1) # only one thing is returned
|
||||||
llisreturn = ll.Constant(lli1, True) # it's a return, so destination is ignored
|
|
||||||
lldest = ll.Constant(lli8, 0)
|
|
||||||
self.llbuilder.call(self.llbuiltin("subkernel_send_message"),
|
self.llbuilder.call(self.llbuiltin("subkernel_send_message"),
|
||||||
[llsid, llisreturn, lldest, lltagcount, lltagptr, llrets])
|
[llsid, lltagcount, lltagptr, llrets])
|
||||||
|
|
||||||
def process_Call(self, insn):
|
def process_Call(self, insn):
|
||||||
functiontyp = insn.target_function().type
|
functiontyp = insn.target_function().type
|
||||||
|
|
|
@ -999,7 +999,7 @@ class AD9910:
|
||||||
"""
|
"""
|
||||||
if not self.cpld.sync_div:
|
if not self.cpld.sync_div:
|
||||||
raise ValueError("parent cpld does not drive SYNC")
|
raise ValueError("parent cpld does not drive SYNC")
|
||||||
search_span = 13
|
search_span = 31
|
||||||
# FIXME https://github.com/sinara-hw/Urukul/issues/16
|
# FIXME https://github.com/sinara-hw/Urukul/issues/16
|
||||||
# should both be 2-4 once kasli sync_in jitter is identified
|
# should both be 2-4 once kasli sync_in jitter is identified
|
||||||
min_window = 0
|
min_window = 0
|
||||||
|
|
|
@ -2,22 +2,15 @@ from operator import itemgetter
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from itertools import count
|
from itertools import count
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from sipyco import keepalive
|
|
||||||
import asyncio
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
import struct
|
import struct
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
import math
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_REF_PERIOD = 1e-9
|
|
||||||
ANALYZER_MAGIC = b"ARTIQ Analyzer Proxy\n"
|
|
||||||
|
|
||||||
|
|
||||||
class MessageType(Enum):
|
class MessageType(Enum):
|
||||||
output = 0b00
|
output = 0b00
|
||||||
input = 0b01
|
input = 0b01
|
||||||
|
@ -41,13 +34,6 @@ class ExceptionType(Enum):
|
||||||
i_overflow = 0b100001
|
i_overflow = 0b100001
|
||||||
|
|
||||||
|
|
||||||
class WaveformType(Enum):
|
|
||||||
ANALOG = 0
|
|
||||||
BIT = 1
|
|
||||||
VECTOR = 2
|
|
||||||
LOG = 3
|
|
||||||
|
|
||||||
|
|
||||||
def get_analyzer_dump(host, port=1382):
|
def get_analyzer_dump(host, port=1382):
|
||||||
sock = socket.create_connection((host, port))
|
sock = socket.create_connection((host, port))
|
||||||
try:
|
try:
|
||||||
|
@ -118,8 +104,6 @@ def decode_dump(data):
|
||||||
(sent_bytes, total_byte_count,
|
(sent_bytes, total_byte_count,
|
||||||
error_occurred, log_channel, dds_onehot_sel) = parts
|
error_occurred, log_channel, dds_onehot_sel) = parts
|
||||||
|
|
||||||
logger.debug("analyzer dump has length %d", sent_bytes)
|
|
||||||
|
|
||||||
expected_len = sent_bytes + 15
|
expected_len = sent_bytes + 15
|
||||||
if expected_len != len(data):
|
if expected_len != len(data):
|
||||||
raise ValueError("analyzer dump has incorrect length "
|
raise ValueError("analyzer dump has incorrect length "
|
||||||
|
@ -131,83 +115,15 @@ def decode_dump(data):
|
||||||
if total_byte_count > sent_bytes:
|
if total_byte_count > sent_bytes:
|
||||||
logger.info("analyzer ring buffer has wrapped %d times",
|
logger.info("analyzer ring buffer has wrapped %d times",
|
||||||
total_byte_count//sent_bytes)
|
total_byte_count//sent_bytes)
|
||||||
if sent_bytes == 0:
|
|
||||||
logger.warning("analyzer dump is empty")
|
|
||||||
|
|
||||||
position = 15
|
position = 15
|
||||||
messages = []
|
messages = []
|
||||||
for _ in range(sent_bytes//32):
|
for _ in range(sent_bytes//32):
|
||||||
messages.append(decode_message(data[position:position+32]))
|
messages.append(decode_message(data[position:position+32]))
|
||||||
position += 32
|
position += 32
|
||||||
|
|
||||||
if len(messages) == 1 and isinstance(messages[0], StoppedMessage):
|
|
||||||
logger.warning("analyzer dump is empty aside from stop message")
|
|
||||||
|
|
||||||
return DecodedDump(log_channel, bool(dds_onehot_sel), messages)
|
return DecodedDump(log_channel, bool(dds_onehot_sel), messages)
|
||||||
|
|
||||||
|
|
||||||
# simplified from sipyco broadcast Receiver
|
|
||||||
class AnalyzerProxyReceiver:
|
|
||||||
def __init__(self, receive_cb, disconnect_cb=None):
|
|
||||||
self.receive_cb = receive_cb
|
|
||||||
self.disconnect_cb = disconnect_cb
|
|
||||||
|
|
||||||
async def connect(self, host, port):
|
|
||||||
self.reader, self.writer = \
|
|
||||||
await keepalive.async_open_connection(host, port)
|
|
||||||
try:
|
|
||||||
line = await self.reader.readline()
|
|
||||||
assert line == ANALYZER_MAGIC
|
|
||||||
self.receive_task = asyncio.create_task(self._receive_cr())
|
|
||||||
except:
|
|
||||||
self.writer.close()
|
|
||||||
del self.reader
|
|
||||||
del self.writer
|
|
||||||
raise
|
|
||||||
|
|
||||||
async def close(self):
|
|
||||||
self.disconnect_cb = None
|
|
||||||
try:
|
|
||||||
self.receive_task.cancel()
|
|
||||||
try:
|
|
||||||
await self.receive_task
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
self.writer.close()
|
|
||||||
del self.reader
|
|
||||||
del self.writer
|
|
||||||
|
|
||||||
async def _receive_cr(self):
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
endian_byte = await self.reader.read(1)
|
|
||||||
if endian_byte == b"E":
|
|
||||||
endian = '>'
|
|
||||||
elif endian_byte == b"e":
|
|
||||||
endian = '<'
|
|
||||||
elif endian_byte == b"":
|
|
||||||
# EOF reached, connection lost
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
raise ValueError
|
|
||||||
payload_length_word = await self.reader.readexactly(4)
|
|
||||||
payload_length = struct.unpack(endian + "I", payload_length_word)[0]
|
|
||||||
if payload_length > 10 * 512 * 1024:
|
|
||||||
# 10x buffer size of firmware
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
# The remaining header length is 11 bytes.
|
|
||||||
remaining_data = await self.reader.readexactly(payload_length + 11)
|
|
||||||
data = endian_byte + payload_length_word + remaining_data
|
|
||||||
self.receive_cb(data)
|
|
||||||
except Exception:
|
|
||||||
logger.error("analyzer receiver connection terminating with exception", exc_info=True)
|
|
||||||
finally:
|
|
||||||
if self.disconnect_cb is not None:
|
|
||||||
self.disconnect_cb()
|
|
||||||
|
|
||||||
|
|
||||||
def vcd_codes():
|
def vcd_codes():
|
||||||
codechars = [chr(i) for i in range(33, 127)]
|
codechars = [chr(i) for i in range(33, 127)]
|
||||||
for n in count():
|
for n in count():
|
||||||
|
@ -234,129 +150,38 @@ class VCDChannel:
|
||||||
integer_cast = struct.unpack(">Q", struct.pack(">d", x))[0]
|
integer_cast = struct.unpack(">Q", struct.pack(">d", x))[0]
|
||||||
self.set_value("{:064b}".format(integer_cast))
|
self.set_value("{:064b}".format(integer_cast))
|
||||||
|
|
||||||
def set_log(self, log_message):
|
|
||||||
value = ""
|
|
||||||
for c in log_message:
|
|
||||||
value += "{:08b}".format(ord(c))
|
|
||||||
self.set_value(value)
|
|
||||||
|
|
||||||
|
|
||||||
class VCDManager:
|
class VCDManager:
|
||||||
def __init__(self, fileobj):
|
def __init__(self, fileobj):
|
||||||
self.out = fileobj
|
self.out = fileobj
|
||||||
self.codes = vcd_codes()
|
self.codes = vcd_codes()
|
||||||
self.current_time = None
|
self.current_time = None
|
||||||
self.start_time = 0
|
|
||||||
|
|
||||||
def set_timescale_ps(self, timescale):
|
def set_timescale_ps(self, timescale):
|
||||||
self.out.write("$timescale {}ps $end\n".format(round(timescale)))
|
self.out.write("$timescale {}ps $end\n".format(round(timescale)))
|
||||||
|
|
||||||
def get_channel(self, name, width, ty, precision=0, unit=""):
|
def get_channel(self, name, width):
|
||||||
code = next(self.codes)
|
code = next(self.codes)
|
||||||
self.out.write("$var wire {width} {code} {name} $end\n"
|
self.out.write("$var wire {width} {code} {name} $end\n"
|
||||||
.format(name=name, code=code, width=width))
|
.format(name=name, code=code, width=width))
|
||||||
return VCDChannel(self.out, code)
|
return VCDChannel(self.out, code)
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def scope(self, scope, name):
|
def scope(self, name):
|
||||||
self.out.write("$scope module {}/{} $end\n".format(scope, name))
|
self.out.write("$scope module {} $end\n".format(name))
|
||||||
yield
|
yield
|
||||||
self.out.write("$upscope $end\n")
|
self.out.write("$upscope $end\n")
|
||||||
|
|
||||||
def set_time(self, time):
|
def set_time(self, time):
|
||||||
time -= self.start_time
|
|
||||||
if time != self.current_time:
|
if time != self.current_time:
|
||||||
self.out.write("#{}\n".format(time))
|
self.out.write("#{}\n".format(time))
|
||||||
self.current_time = time
|
self.current_time = time
|
||||||
|
|
||||||
def set_start_time(self, time):
|
|
||||||
self.start_time = time
|
|
||||||
|
|
||||||
def set_end_time(self, time):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class WaveformManager:
|
|
||||||
def __init__(self):
|
|
||||||
self.current_time = 0
|
|
||||||
self.start_time = 0
|
|
||||||
self.end_time = 0
|
|
||||||
self.channels = list()
|
|
||||||
self.current_scope = ""
|
|
||||||
self.trace = {"timescale": 1, "stopped_x": None, "logs": dict(), "data": dict()}
|
|
||||||
|
|
||||||
def set_timescale_ps(self, timescale):
|
|
||||||
self.trace["timescale"] = int(timescale)
|
|
||||||
|
|
||||||
def get_channel(self, name, width, ty, precision=0, unit=""):
|
|
||||||
if ty == WaveformType.LOG:
|
|
||||||
self.trace["logs"][self.current_scope + name] = (ty, width, precision, unit)
|
|
||||||
data = self.trace["data"][self.current_scope + name] = list()
|
|
||||||
channel = WaveformChannel(data, self.current_time)
|
|
||||||
self.channels.append(channel)
|
|
||||||
return channel
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def scope(self, scope, name):
|
|
||||||
old_scope = self.current_scope
|
|
||||||
self.current_scope = scope + "/"
|
|
||||||
yield
|
|
||||||
self.current_scope = old_scope
|
|
||||||
|
|
||||||
def set_time(self, time):
|
|
||||||
time -= self.start_time
|
|
||||||
for channel in self.channels:
|
|
||||||
channel.set_time(time)
|
|
||||||
|
|
||||||
def set_start_time(self, time):
|
|
||||||
self.start_time = time
|
|
||||||
if self.trace["stopped_x"] is not None:
|
|
||||||
self.trace["stopped_x"] = self.end_time - self.start_time
|
|
||||||
|
|
||||||
def set_end_time(self, time):
|
|
||||||
self.end_time = time
|
|
||||||
self.trace["stopped_x"] = self.end_time - self.start_time
|
|
||||||
|
|
||||||
|
|
||||||
class WaveformChannel:
|
|
||||||
def __init__(self, data, current_time):
|
|
||||||
self.data = data
|
|
||||||
self.current_time = current_time
|
|
||||||
|
|
||||||
def set_value(self, value):
|
|
||||||
self.data.append((self.current_time, value))
|
|
||||||
|
|
||||||
def set_value_double(self, x):
|
|
||||||
self.data.append((self.current_time, x))
|
|
||||||
|
|
||||||
def set_time(self, time):
|
|
||||||
self.current_time = time
|
|
||||||
|
|
||||||
def set_log(self, log_message):
|
|
||||||
self.data.append((self.current_time, log_message))
|
|
||||||
|
|
||||||
|
|
||||||
class ChannelSignatureManager:
|
|
||||||
def __init__(self):
|
|
||||||
self.current_scope = ""
|
|
||||||
self.channels = dict()
|
|
||||||
|
|
||||||
def get_channel(self, name, width, ty, precision=0, unit=""):
|
|
||||||
self.channels[self.current_scope + name] = (ty, width, precision, unit)
|
|
||||||
return None
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def scope(self, scope, name):
|
|
||||||
old_scope = self.current_scope
|
|
||||||
self.current_scope = scope + "/"
|
|
||||||
yield
|
|
||||||
self.current_scope = old_scope
|
|
||||||
|
|
||||||
|
|
||||||
class TTLHandler:
|
class TTLHandler:
|
||||||
def __init__(self, manager, name):
|
def __init__(self, vcd_manager, name):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.channel_value = manager.get_channel("ttl/" + name, 1, ty=WaveformType.BIT)
|
self.channel_value = vcd_manager.get_channel("ttl/" + name, 1)
|
||||||
self.last_value = "X"
|
self.last_value = "X"
|
||||||
self.oe = True
|
self.oe = True
|
||||||
|
|
||||||
|
@ -381,12 +206,11 @@ class TTLHandler:
|
||||||
|
|
||||||
|
|
||||||
class TTLClockGenHandler:
|
class TTLClockGenHandler:
|
||||||
def __init__(self, manager, name, ref_period):
|
def __init__(self, vcd_manager, name, ref_period):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.ref_period = ref_period
|
self.ref_period = ref_period
|
||||||
precision = max(0, math.ceil(math.log10(2**24 * ref_period) + 6))
|
self.channel_frequency = vcd_manager.get_channel(
|
||||||
self.channel_frequency = manager.get_channel(
|
"ttl_clkgen/" + name, 64)
|
||||||
"ttl_clkgen/" + name, 64, ty=WaveformType.ANALOG, precision=precision, unit="MHz")
|
|
||||||
|
|
||||||
def process_message(self, message):
|
def process_message(self, message):
|
||||||
if isinstance(message, OutputMessage):
|
if isinstance(message, OutputMessage):
|
||||||
|
@ -397,8 +221,8 @@ class TTLClockGenHandler:
|
||||||
|
|
||||||
|
|
||||||
class DDSHandler:
|
class DDSHandler:
|
||||||
def __init__(self, manager, onehot_sel, sysclk):
|
def __init__(self, vcd_manager, onehot_sel, sysclk):
|
||||||
self.manager = manager
|
self.vcd_manager = vcd_manager
|
||||||
self.onehot_sel = onehot_sel
|
self.onehot_sel = onehot_sel
|
||||||
self.sysclk = sysclk
|
self.sysclk = sysclk
|
||||||
|
|
||||||
|
@ -407,18 +231,11 @@ class DDSHandler:
|
||||||
|
|
||||||
def add_dds_channel(self, name, dds_channel_nr):
|
def add_dds_channel(self, name, dds_channel_nr):
|
||||||
dds_channel = dict()
|
dds_channel = dict()
|
||||||
frequency_precision = max(0, math.ceil(math.log10(2**32 / self.sysclk) + 6))
|
with self.vcd_manager.scope("dds/{}".format(name)):
|
||||||
phase_precision = max(0, math.ceil(math.log10(2**16)))
|
|
||||||
with self.manager.scope("dds", name):
|
|
||||||
dds_channel["vcd_frequency"] = \
|
dds_channel["vcd_frequency"] = \
|
||||||
self.manager.get_channel(name + "/frequency", 64,
|
self.vcd_manager.get_channel(name + "/frequency", 64)
|
||||||
ty=WaveformType.ANALOG,
|
|
||||||
precision=frequency_precision,
|
|
||||||
unit="MHz")
|
|
||||||
dds_channel["vcd_phase"] = \
|
dds_channel["vcd_phase"] = \
|
||||||
self.manager.get_channel(name + "/phase", 64,
|
self.vcd_manager.get_channel(name + "/phase", 64)
|
||||||
ty=WaveformType.ANALOG,
|
|
||||||
precision=phase_precision)
|
|
||||||
dds_channel["ftw"] = [None, None]
|
dds_channel["ftw"] = [None, None]
|
||||||
dds_channel["pow"] = None
|
dds_channel["pow"] = None
|
||||||
self.dds_channels[dds_channel_nr] = dds_channel
|
self.dds_channels[dds_channel_nr] = dds_channel
|
||||||
|
@ -468,10 +285,10 @@ class DDSHandler:
|
||||||
|
|
||||||
|
|
||||||
class WishboneHandler:
|
class WishboneHandler:
|
||||||
def __init__(self, manager, name, read_bit):
|
def __init__(self, vcd_manager, name, read_bit):
|
||||||
self._reads = []
|
self._reads = []
|
||||||
self._read_bit = read_bit
|
self._read_bit = read_bit
|
||||||
self.stb = manager.get_channel(name + "/stb", 1, ty=WaveformType.BIT)
|
self.stb = vcd_manager.get_channel("{}/{}".format(name, "stb"), 1)
|
||||||
|
|
||||||
def process_message(self, message):
|
def process_message(self, message):
|
||||||
self.stb.set_value("1")
|
self.stb.set_value("1")
|
||||||
|
@ -501,17 +318,16 @@ class WishboneHandler:
|
||||||
|
|
||||||
|
|
||||||
class SPIMasterHandler(WishboneHandler):
|
class SPIMasterHandler(WishboneHandler):
|
||||||
def __init__(self, manager, name):
|
def __init__(self, vcd_manager, name):
|
||||||
self.channels = {}
|
self.channels = {}
|
||||||
self.scope = "spi"
|
with vcd_manager.scope("spi/{}".format(name)):
|
||||||
with manager.scope("spi", name):
|
super().__init__(vcd_manager, name, read_bit=0b100)
|
||||||
super().__init__(manager, name, read_bit=0b100)
|
|
||||||
for reg_name, reg_width in [
|
for reg_name, reg_width in [
|
||||||
("config", 32), ("chip_select", 16),
|
("config", 32), ("chip_select", 16),
|
||||||
("write_length", 8), ("read_length", 8),
|
("write_length", 8), ("read_length", 8),
|
||||||
("write", 32), ("read", 32)]:
|
("write", 32), ("read", 32)]:
|
||||||
self.channels[reg_name] = manager.get_channel(
|
self.channels[reg_name] = vcd_manager.get_channel(
|
||||||
"{}/{}".format(name, reg_name), reg_width, ty=WaveformType.VECTOR)
|
"{}/{}".format(name, reg_name), reg_width)
|
||||||
|
|
||||||
def process_write(self, address, data):
|
def process_write(self, address, data):
|
||||||
if address == 0:
|
if address == 0:
|
||||||
|
@ -536,12 +352,11 @@ class SPIMasterHandler(WishboneHandler):
|
||||||
|
|
||||||
|
|
||||||
class SPIMaster2Handler(WishboneHandler):
|
class SPIMaster2Handler(WishboneHandler):
|
||||||
def __init__(self, manager, name):
|
def __init__(self, vcd_manager, name):
|
||||||
self._reads = []
|
self._reads = []
|
||||||
self.channels = {}
|
self.channels = {}
|
||||||
self.scope = "spi2"
|
with vcd_manager.scope("spi2/{}".format(name)):
|
||||||
with manager.scope("spi2", name):
|
self.stb = vcd_manager.get_channel("{}/{}".format(name, "stb"), 1)
|
||||||
self.stb = manager.get_channel(name + "/stb", 1, ty=WaveformType.BIT)
|
|
||||||
for reg_name, reg_width in [
|
for reg_name, reg_width in [
|
||||||
("flags", 8),
|
("flags", 8),
|
||||||
("length", 5),
|
("length", 5),
|
||||||
|
@ -549,8 +364,8 @@ class SPIMaster2Handler(WishboneHandler):
|
||||||
("chip_select", 8),
|
("chip_select", 8),
|
||||||
("write", 32),
|
("write", 32),
|
||||||
("read", 32)]:
|
("read", 32)]:
|
||||||
self.channels[reg_name] = manager.get_channel(
|
self.channels[reg_name] = vcd_manager.get_channel(
|
||||||
"{}/{}".format(name, reg_name), reg_width, ty=WaveformType.VECTOR)
|
"{}/{}".format(name, reg_name), reg_width)
|
||||||
|
|
||||||
def process_message(self, message):
|
def process_message(self, message):
|
||||||
self.stb.set_value("1")
|
self.stb.set_value("1")
|
||||||
|
@ -598,12 +413,11 @@ def _extract_log_chars(data):
|
||||||
|
|
||||||
|
|
||||||
class LogHandler:
|
class LogHandler:
|
||||||
def __init__(self, manager, log_channels):
|
def __init__(self, vcd_manager, vcd_log_channels):
|
||||||
self.channels = dict()
|
self.vcd_channels = dict()
|
||||||
for name, maxlength in log_channels.items():
|
for name, maxlength in vcd_log_channels.items():
|
||||||
self.channels[name] = manager.get_channel("logs/" + name,
|
self.vcd_channels[name] = vcd_manager.get_channel("log/" + name,
|
||||||
maxlength * 8,
|
maxlength*8)
|
||||||
ty=WaveformType.LOG)
|
|
||||||
self.current_entry = ""
|
self.current_entry = ""
|
||||||
|
|
||||||
def process_message(self, message):
|
def process_message(self, message):
|
||||||
|
@ -611,12 +425,15 @@ class LogHandler:
|
||||||
self.current_entry += _extract_log_chars(message.data)
|
self.current_entry += _extract_log_chars(message.data)
|
||||||
if len(self.current_entry) > 1 and self.current_entry[-1] == "\x1D":
|
if len(self.current_entry) > 1 and self.current_entry[-1] == "\x1D":
|
||||||
channel_name, log_message = self.current_entry[:-1].split("\x1E", maxsplit=1)
|
channel_name, log_message = self.current_entry[:-1].split("\x1E", maxsplit=1)
|
||||||
self.channels[channel_name].set_log(log_message)
|
vcd_value = ""
|
||||||
|
for c in log_message:
|
||||||
|
vcd_value += "{:08b}".format(ord(c))
|
||||||
|
self.vcd_channels[channel_name].set_value(vcd_value)
|
||||||
self.current_entry = ""
|
self.current_entry = ""
|
||||||
|
|
||||||
|
|
||||||
def get_log_channels(log_channel, messages):
|
def get_vcd_log_channels(log_channel, messages):
|
||||||
log_channels = dict()
|
vcd_log_channels = dict()
|
||||||
log_entry = ""
|
log_entry = ""
|
||||||
for message in messages:
|
for message in messages:
|
||||||
if (isinstance(message, OutputMessage)
|
if (isinstance(message, OutputMessage)
|
||||||
|
@ -625,13 +442,13 @@ def get_log_channels(log_channel, messages):
|
||||||
if len(log_entry) > 1 and log_entry[-1] == "\x1D":
|
if len(log_entry) > 1 and log_entry[-1] == "\x1D":
|
||||||
channel_name, log_message = log_entry[:-1].split("\x1E", maxsplit=1)
|
channel_name, log_message = log_entry[:-1].split("\x1E", maxsplit=1)
|
||||||
l = len(log_message)
|
l = len(log_message)
|
||||||
if channel_name in log_channels:
|
if channel_name in vcd_log_channels:
|
||||||
if log_channels[channel_name] < l:
|
if vcd_log_channels[channel_name] < l:
|
||||||
log_channels[channel_name] = l
|
vcd_log_channels[channel_name] = l
|
||||||
else:
|
else:
|
||||||
log_channels[channel_name] = l
|
vcd_log_channels[channel_name] = l
|
||||||
log_entry = ""
|
log_entry = ""
|
||||||
return log_channels
|
return vcd_log_channels
|
||||||
|
|
||||||
|
|
||||||
def get_single_device_argument(devices, module, cls, argument):
|
def get_single_device_argument(devices, module, cls, argument):
|
||||||
|
@ -658,7 +475,7 @@ def get_dds_sysclk(devices):
|
||||||
("AD9914",), "sysclk")
|
("AD9914",), "sysclk")
|
||||||
|
|
||||||
|
|
||||||
def create_channel_handlers(manager, devices, ref_period,
|
def create_channel_handlers(vcd_manager, devices, ref_period,
|
||||||
dds_sysclk, dds_onehot_sel):
|
dds_sysclk, dds_onehot_sel):
|
||||||
channel_handlers = dict()
|
channel_handlers = dict()
|
||||||
for name, desc in sorted(devices.items(), key=itemgetter(0)):
|
for name, desc in sorted(devices.items(), key=itemgetter(0)):
|
||||||
|
@ -666,11 +483,11 @@ def create_channel_handlers(manager, devices, ref_period,
|
||||||
if (desc["module"] == "artiq.coredevice.ttl"
|
if (desc["module"] == "artiq.coredevice.ttl"
|
||||||
and desc["class"] in {"TTLOut", "TTLInOut"}):
|
and desc["class"] in {"TTLOut", "TTLInOut"}):
|
||||||
channel = desc["arguments"]["channel"]
|
channel = desc["arguments"]["channel"]
|
||||||
channel_handlers[channel] = TTLHandler(manager, name)
|
channel_handlers[channel] = TTLHandler(vcd_manager, name)
|
||||||
if (desc["module"] == "artiq.coredevice.ttl"
|
if (desc["module"] == "artiq.coredevice.ttl"
|
||||||
and desc["class"] == "TTLClockGen"):
|
and desc["class"] == "TTLClockGen"):
|
||||||
channel = desc["arguments"]["channel"]
|
channel = desc["arguments"]["channel"]
|
||||||
channel_handlers[channel] = TTLClockGenHandler(manager, name, ref_period)
|
channel_handlers[channel] = TTLClockGenHandler(vcd_manager, name, ref_period)
|
||||||
if (desc["module"] == "artiq.coredevice.ad9914"
|
if (desc["module"] == "artiq.coredevice.ad9914"
|
||||||
and desc["class"] == "AD9914"):
|
and desc["class"] == "AD9914"):
|
||||||
dds_bus_channel = desc["arguments"]["bus_channel"]
|
dds_bus_channel = desc["arguments"]["bus_channel"]
|
||||||
|
@ -678,60 +495,37 @@ def create_channel_handlers(manager, devices, ref_period,
|
||||||
if dds_bus_channel in channel_handlers:
|
if dds_bus_channel in channel_handlers:
|
||||||
dds_handler = channel_handlers[dds_bus_channel]
|
dds_handler = channel_handlers[dds_bus_channel]
|
||||||
else:
|
else:
|
||||||
dds_handler = DDSHandler(manager, dds_onehot_sel, dds_sysclk)
|
dds_handler = DDSHandler(vcd_manager, dds_onehot_sel, dds_sysclk)
|
||||||
channel_handlers[dds_bus_channel] = dds_handler
|
channel_handlers[dds_bus_channel] = dds_handler
|
||||||
dds_handler.add_dds_channel(name, dds_channel)
|
dds_handler.add_dds_channel(name, dds_channel)
|
||||||
if (desc["module"] == "artiq.coredevice.spi2" and
|
if (desc["module"] == "artiq.coredevice.spi2" and
|
||||||
desc["class"] == "SPIMaster"):
|
desc["class"] == "SPIMaster"):
|
||||||
channel = desc["arguments"]["channel"]
|
channel = desc["arguments"]["channel"]
|
||||||
channel_handlers[channel] = SPIMaster2Handler(
|
channel_handlers[channel] = SPIMaster2Handler(
|
||||||
manager, name)
|
vcd_manager, name)
|
||||||
return channel_handlers
|
return channel_handlers
|
||||||
|
|
||||||
|
|
||||||
def get_channel_list(devices):
|
|
||||||
manager = ChannelSignatureManager()
|
|
||||||
create_channel_handlers(manager, devices, 1e-9, 3e9, False)
|
|
||||||
ref_period = get_ref_period(devices)
|
|
||||||
if ref_period is None:
|
|
||||||
ref_period = DEFAULT_REF_PERIOD
|
|
||||||
precision = max(0, math.ceil(math.log10(1 / ref_period) - 6))
|
|
||||||
manager.get_channel("rtio_slack", 64, ty=WaveformType.ANALOG, precision=precision, unit="us")
|
|
||||||
return manager.channels
|
|
||||||
|
|
||||||
|
|
||||||
def get_message_time(message):
|
def get_message_time(message):
|
||||||
return getattr(message, "timestamp", message.rtio_counter)
|
return getattr(message, "timestamp", message.rtio_counter)
|
||||||
|
|
||||||
|
|
||||||
def decoded_dump_to_vcd(fileobj, devices, dump, uniform_interval=False):
|
def decoded_dump_to_vcd(fileobj, devices, dump, uniform_interval=False):
|
||||||
vcd_manager = VCDManager(fileobj)
|
vcd_manager = VCDManager(fileobj)
|
||||||
decoded_dump_to_target(vcd_manager, devices, dump, uniform_interval)
|
|
||||||
|
|
||||||
|
|
||||||
def decoded_dump_to_waveform_data(devices, dump, uniform_interval=False):
|
|
||||||
manager = WaveformManager()
|
|
||||||
decoded_dump_to_target(manager, devices, dump, uniform_interval)
|
|
||||||
return manager.trace
|
|
||||||
|
|
||||||
|
|
||||||
def decoded_dump_to_target(manager, devices, dump, uniform_interval):
|
|
||||||
ref_period = get_ref_period(devices)
|
ref_period = get_ref_period(devices)
|
||||||
|
|
||||||
if ref_period is None:
|
if ref_period is not None:
|
||||||
|
if not uniform_interval:
|
||||||
|
vcd_manager.set_timescale_ps(ref_period*1e12)
|
||||||
|
else:
|
||||||
logger.warning("unable to determine core device ref_period")
|
logger.warning("unable to determine core device ref_period")
|
||||||
ref_period = DEFAULT_REF_PERIOD
|
ref_period = 1e-9 # guess
|
||||||
if not uniform_interval:
|
|
||||||
manager.set_timescale_ps(ref_period*1e12)
|
|
||||||
dds_sysclk = get_dds_sysclk(devices)
|
dds_sysclk = get_dds_sysclk(devices)
|
||||||
if dds_sysclk is None:
|
if dds_sysclk is None:
|
||||||
logger.warning("unable to determine DDS sysclk")
|
logger.warning("unable to determine DDS sysclk")
|
||||||
dds_sysclk = 3e9 # guess
|
dds_sysclk = 3e9 # guess
|
||||||
|
|
||||||
if isinstance(dump.messages[-1], StoppedMessage):
|
if isinstance(dump.messages[-1], StoppedMessage):
|
||||||
m = dump.messages[-1]
|
|
||||||
end_time = get_message_time(m)
|
|
||||||
manager.set_end_time(end_time)
|
|
||||||
messages = dump.messages[:-1]
|
messages = dump.messages[:-1]
|
||||||
else:
|
else:
|
||||||
logger.warning("StoppedMessage missing")
|
logger.warning("StoppedMessage missing")
|
||||||
|
@ -739,39 +533,38 @@ def decoded_dump_to_target(manager, devices, dump, uniform_interval):
|
||||||
messages = sorted(messages, key=get_message_time)
|
messages = sorted(messages, key=get_message_time)
|
||||||
|
|
||||||
channel_handlers = create_channel_handlers(
|
channel_handlers = create_channel_handlers(
|
||||||
manager, devices, ref_period,
|
vcd_manager, devices, ref_period,
|
||||||
dds_sysclk, dump.dds_onehot_sel)
|
dds_sysclk, dump.dds_onehot_sel)
|
||||||
log_channels = get_log_channels(dump.log_channel, messages)
|
vcd_log_channels = get_vcd_log_channels(dump.log_channel, messages)
|
||||||
channel_handlers[dump.log_channel] = LogHandler(
|
channel_handlers[dump.log_channel] = LogHandler(
|
||||||
manager, log_channels)
|
vcd_manager, vcd_log_channels)
|
||||||
if uniform_interval:
|
if uniform_interval:
|
||||||
# RTIO event timestamp in machine units
|
# RTIO event timestamp in machine units
|
||||||
timestamp = manager.get_channel("timestamp", 64, ty=WaveformType.VECTOR)
|
timestamp = vcd_manager.get_channel("timestamp", 64)
|
||||||
# RTIO time interval between this and the next timed event
|
# RTIO time interval between this and the next timed event
|
||||||
# in SI seconds
|
# in SI seconds
|
||||||
interval = manager.get_channel("interval", 64, ty=WaveformType.ANALOG)
|
interval = vcd_manager.get_channel("interval", 64)
|
||||||
slack = manager.get_channel("rtio_slack", 64, ty=WaveformType.ANALOG)
|
slack = vcd_manager.get_channel("rtio_slack", 64)
|
||||||
|
|
||||||
manager.set_time(0)
|
vcd_manager.set_time(0)
|
||||||
start_time = 0
|
start_time = 0
|
||||||
for m in messages:
|
for m in messages:
|
||||||
start_time = get_message_time(m)
|
start_time = get_message_time(m)
|
||||||
if start_time:
|
if start_time:
|
||||||
break
|
break
|
||||||
if not uniform_interval:
|
|
||||||
manager.set_start_time(start_time)
|
t0 = 0
|
||||||
t0 = start_time
|
|
||||||
for i, message in enumerate(messages):
|
for i, message in enumerate(messages):
|
||||||
if message.channel in channel_handlers:
|
if message.channel in channel_handlers:
|
||||||
t = get_message_time(message)
|
t = get_message_time(message) - start_time
|
||||||
if t >= 0:
|
if t >= 0:
|
||||||
if uniform_interval:
|
if uniform_interval:
|
||||||
interval.set_value_double((t - t0)*ref_period)
|
interval.set_value_double((t - t0)*ref_period)
|
||||||
manager.set_time(i)
|
vcd_manager.set_time(i)
|
||||||
timestamp.set_value("{:064b}".format(t))
|
timestamp.set_value("{:064b}".format(t))
|
||||||
t0 = t
|
t0 = t
|
||||||
else:
|
else:
|
||||||
manager.set_time(t)
|
vcd_manager.set_time(t)
|
||||||
channel_handlers[message.channel].process_message(message)
|
channel_handlers[message.channel].process_message(message)
|
||||||
if isinstance(message, OutputMessage):
|
if isinstance(message, OutputMessage):
|
||||||
slack.set_value_double(
|
slack.set_value_double(
|
||||||
|
|
|
@ -94,7 +94,9 @@ class CommMonInj:
|
||||||
self.injection_status_cb(channel, override, value)
|
self.injection_status_cb(channel, override, value)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unknown packet type", ty)
|
raise ValueError("Unknown packet type", ty)
|
||||||
except Exception:
|
except asyncio.CancelledError:
|
||||||
|
raise
|
||||||
|
except:
|
||||||
logger.error("Moninj connection terminating with exception", exc_info=True)
|
logger.error("Moninj connection terminating with exception", exc_info=True)
|
||||||
finally:
|
finally:
|
||||||
if self.disconnect_cb is not None:
|
if self.disconnect_cb is not None:
|
||||||
|
|
|
@ -120,15 +120,13 @@ class Core:
|
||||||
|
|
||||||
def compile(self, function, args, kwargs, set_result=None,
|
def compile(self, function, args, kwargs, set_result=None,
|
||||||
attribute_writeback=True, print_as_rpc=True,
|
attribute_writeback=True, print_as_rpc=True,
|
||||||
target=None, destination=0, subkernel_arg_types=[],
|
target=None, destination=0, subkernel_arg_types=[]):
|
||||||
old_embedding_map=None):
|
|
||||||
try:
|
try:
|
||||||
engine = _DiagnosticEngine(all_errors_are_fatal=True)
|
engine = _DiagnosticEngine(all_errors_are_fatal=True)
|
||||||
|
|
||||||
stitcher = Stitcher(engine=engine, core=self, dmgr=self.dmgr,
|
stitcher = Stitcher(engine=engine, core=self, dmgr=self.dmgr,
|
||||||
print_as_rpc=print_as_rpc,
|
print_as_rpc=print_as_rpc,
|
||||||
destination=destination, subkernel_arg_types=subkernel_arg_types,
|
destination=destination, subkernel_arg_types=subkernel_arg_types)
|
||||||
old_embedding_map=old_embedding_map)
|
|
||||||
stitcher.stitch_call(function, args, kwargs, set_result)
|
stitcher.stitch_call(function, args, kwargs, set_result)
|
||||||
stitcher.finalize()
|
stitcher.finalize()
|
||||||
|
|
||||||
|
@ -167,7 +165,7 @@ class Core:
|
||||||
self._run_compiled(kernel_library, embedding_map, symbolizer, demangler)
|
self._run_compiled(kernel_library, embedding_map, symbolizer, demangler)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def compile_subkernel(self, sid, subkernel_fn, embedding_map, args, subkernel_arg_types, subkernels):
|
def compile_subkernel(self, sid, subkernel_fn, embedding_map, args, subkernel_arg_types):
|
||||||
# pass self to subkernels (if applicable)
|
# pass self to subkernels (if applicable)
|
||||||
# assuming the first argument is self
|
# assuming the first argument is self
|
||||||
subkernel_args = getfullargspec(subkernel_fn.artiq_embedded.function)
|
subkernel_args = getfullargspec(subkernel_fn.artiq_embedded.function)
|
||||||
|
@ -181,49 +179,17 @@ class Core:
|
||||||
object_map, kernel_library, _, _, _ = \
|
object_map, kernel_library, _, _, _ = \
|
||||||
self.compile(subkernel_fn, self_arg, {}, attribute_writeback=False,
|
self.compile(subkernel_fn, self_arg, {}, attribute_writeback=False,
|
||||||
print_as_rpc=False, target=target, destination=destination,
|
print_as_rpc=False, target=target, destination=destination,
|
||||||
subkernel_arg_types=subkernel_arg_types.get(sid, []),
|
subkernel_arg_types=subkernel_arg_types.get(sid, []))
|
||||||
old_embedding_map=embedding_map)
|
if object_map.has_rpc_or_subkernel():
|
||||||
if object_map.has_rpc():
|
raise ValueError("Subkernel must not use RPC or subkernels in other destinations")
|
||||||
raise ValueError("Subkernel must not use RPC")
|
return destination, kernel_library
|
||||||
return destination, kernel_library, object_map
|
|
||||||
|
|
||||||
def compile_and_upload_subkernels(self, embedding_map, args, subkernel_arg_types):
|
def compile_and_upload_subkernels(self, embedding_map, args, subkernel_arg_types):
|
||||||
subkernels = embedding_map.subkernels()
|
for sid, subkernel_fn in embedding_map.subkernels().items():
|
||||||
subkernels_compiled = []
|
destination, kernel_library = \
|
||||||
while True:
|
self.compile_subkernel(sid, subkernel_fn, embedding_map,
|
||||||
new_subkernels = {}
|
args, subkernel_arg_types)
|
||||||
for sid, subkernel_fn in subkernels.items():
|
self.comm.upload_subkernel(kernel_library, sid, destination)
|
||||||
if sid in subkernels_compiled:
|
|
||||||
continue
|
|
||||||
destination, kernel_library, embedding_map = \
|
|
||||||
self.compile_subkernel(sid, subkernel_fn, embedding_map,
|
|
||||||
args, subkernel_arg_types, subkernels)
|
|
||||||
self.comm.upload_subkernel(kernel_library, sid, destination)
|
|
||||||
new_subkernels.update(embedding_map.subkernels())
|
|
||||||
subkernels_compiled.append(sid)
|
|
||||||
if new_subkernels == subkernels:
|
|
||||||
break
|
|
||||||
subkernels.update(new_subkernels)
|
|
||||||
# check for messages without a send/recv pair
|
|
||||||
unpaired_messages = embedding_map.subkernel_messages_unpaired()
|
|
||||||
if unpaired_messages:
|
|
||||||
for unpaired_message in unpaired_messages:
|
|
||||||
engine = _DiagnosticEngine(all_errors_are_fatal=False)
|
|
||||||
# errors are non-fatal in order to display
|
|
||||||
# all unpaired message errors before raising an excption
|
|
||||||
if unpaired_message.send_loc is None:
|
|
||||||
diag = diagnostic.Diagnostic("error",
|
|
||||||
"subkernel message '{name}' only has a receiver but no sender",
|
|
||||||
{"name": unpaired_message.name},
|
|
||||||
unpaired_message.recv_loc)
|
|
||||||
else:
|
|
||||||
diag = diagnostic.Diagnostic("error",
|
|
||||||
"subkernel message '{name}' only has a sender but no receiver",
|
|
||||||
{"name": unpaired_message.name},
|
|
||||||
unpaired_message.send_loc)
|
|
||||||
engine.process(diag)
|
|
||||||
raise ValueError("Found subkernel message(s) without a full send/recv pair")
|
|
||||||
|
|
||||||
|
|
||||||
def precompile(self, function, *args, **kwargs):
|
def precompile(self, function, *args, **kwargs):
|
||||||
"""Precompile a kernel and return a callable that executes it on the core device
|
"""Precompile a kernel and return a callable that executes it on the core device
|
||||||
|
@ -353,4 +319,6 @@ class Core:
|
||||||
if self.analyzer_proxy is None:
|
if self.analyzer_proxy is None:
|
||||||
raise IOError("No analyzer proxy configured")
|
raise IOError("No analyzer proxy configured")
|
||||||
else:
|
else:
|
||||||
self.analyzer_proxy.trigger()
|
success = self.analyzer_proxy.trigger()
|
||||||
|
if not success:
|
||||||
|
raise IOError("Analyzer proxy reported failure")
|
||||||
|
|
|
@ -49,10 +49,6 @@
|
||||||
"default": 125e6,
|
"default": 125e6,
|
||||||
"description": "RTIO frequency"
|
"description": "RTIO frequency"
|
||||||
},
|
},
|
||||||
"enable_wrpll": {
|
|
||||||
"type": "boolean",
|
|
||||||
"default": false
|
|
||||||
},
|
|
||||||
"core_addr": {
|
"core_addr": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"format": "ipv4",
|
"format": "ipv4",
|
||||||
|
@ -312,7 +308,8 @@
|
||||||
"clk_div": {
|
"clk_div": {
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"minimum": 0,
|
"minimum": 0,
|
||||||
"maximum": 3
|
"maximum": 3,
|
||||||
|
"default": 0
|
||||||
},
|
},
|
||||||
"pll_n": {
|
"pll_n": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
|
|
|
@ -2,7 +2,7 @@ from numpy import int32, int64
|
||||||
|
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
from artiq.language.types import *
|
from artiq.language.types import *
|
||||||
from artiq.coredevice.rtio import rtio_output, rtio_input_timestamped_data
|
from artiq.coredevice.rtio import rtio_output, rtio_input_data
|
||||||
|
|
||||||
|
|
||||||
class OutOfSyncException(Exception):
|
class OutOfSyncException(Exception):
|
||||||
|
@ -11,11 +11,6 @@ class OutOfSyncException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class GrabberTimeoutException(Exception):
|
|
||||||
"""Raised when a timeout occurs while attempting to read Grabber RTIO input events."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Grabber:
|
class Grabber:
|
||||||
"""Driver for the Grabber camera interface."""
|
"""Driver for the Grabber camera interface."""
|
||||||
kernel_invariants = {"core", "channel_base", "sentinel"}
|
kernel_invariants = {"core", "channel_base", "sentinel"}
|
||||||
|
@ -87,10 +82,10 @@ class Grabber:
|
||||||
self.gate_roi(0)
|
self.gate_roi(0)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def input_mu(self, data, timeout_mu=-1):
|
def input_mu(self, data):
|
||||||
"""
|
"""
|
||||||
Retrieves the accumulated values for one frame from the ROI engines.
|
Retrieves the accumulated values for one frame from the ROI engines.
|
||||||
Blocks until values are available or timeout is reached.
|
Blocks until values are available.
|
||||||
|
|
||||||
The input list must be a list of integers of the same length as there
|
The input list must be a list of integers of the same length as there
|
||||||
are enabled ROI engines. This method replaces the elements of the
|
are enabled ROI engines. This method replaces the elements of the
|
||||||
|
@ -100,26 +95,15 @@ class Grabber:
|
||||||
If the number of elements in the list does not match the number of
|
If the number of elements in the list does not match the number of
|
||||||
ROI engines that produced output, an exception will be raised during
|
ROI engines that produced output, an exception will be raised during
|
||||||
this call or the next.
|
this call or the next.
|
||||||
|
|
||||||
If the timeout is reached before data is available, the exception
|
|
||||||
GrabberTimeoutException is raised.
|
|
||||||
|
|
||||||
:param timeout_mu: Timestamp at which a timeout will occur. Set to -1
|
|
||||||
(default) to disable timeout.
|
|
||||||
"""
|
"""
|
||||||
channel = self.channel_base + 1
|
channel = self.channel_base + 1
|
||||||
|
|
||||||
timestamp, sentinel = rtio_input_timestamped_data(timeout_mu, channel)
|
sentinel = rtio_input_data(channel)
|
||||||
if timestamp == -1:
|
|
||||||
raise GrabberTimeoutException("Timeout before Grabber frame available")
|
|
||||||
if sentinel != self.sentinel:
|
if sentinel != self.sentinel:
|
||||||
raise OutOfSyncException
|
raise OutOfSyncException
|
||||||
|
|
||||||
for i in range(len(data)):
|
for i in range(len(data)):
|
||||||
timestamp, roi_output = rtio_input_timestamped_data(timeout_mu, channel)
|
roi_output = rtio_input_data(channel)
|
||||||
if roi_output == self.sentinel:
|
if roi_output == self.sentinel:
|
||||||
raise OutOfSyncException
|
raise OutOfSyncException
|
||||||
if timestamp == -1:
|
|
||||||
raise GrabberTimeoutException(
|
|
||||||
"Timeout retrieving ROIs (attempting to read more ROIs than enabled?)")
|
|
||||||
data[i] = roi_output
|
data[i] = roi_output
|
||||||
|
|
|
@ -21,7 +21,7 @@ def adc_mu_to_volt(data, gain=0, corrected_fs=True):
|
||||||
:param data: 16 bit signed ADC word
|
:param data: 16 bit signed ADC word
|
||||||
:param gain: PGIA gain setting (0: 1, ..., 3: 1000)
|
:param gain: PGIA gain setting (0: 1, ..., 3: 1000)
|
||||||
:param corrected_fs: use corrected ADC FS reference.
|
:param corrected_fs: use corrected ADC FS reference.
|
||||||
Should be True for Samplers' revisions after v2.1. False for v2.1 and earlier.
|
Should be True for Samplers' revisions after v2.1. False for v2.1 and earlier.
|
||||||
:return: Voltage in Volts
|
:return: Voltage in Volts
|
||||||
"""
|
"""
|
||||||
if gain == 0:
|
if gain == 0:
|
||||||
|
|
|
@ -36,7 +36,7 @@ class AppletsCCBDock(applets.AppletsDock):
|
||||||
ccbp_group_menu.addAction(self.ccbp_group_create)
|
ccbp_group_menu.addAction(self.ccbp_group_create)
|
||||||
actiongroup.addAction(self.ccbp_group_create)
|
actiongroup.addAction(self.ccbp_group_create)
|
||||||
self.ccbp_group_enable = QtWidgets.QAction("Create and enable/disable applets",
|
self.ccbp_group_enable = QtWidgets.QAction("Create and enable/disable applets",
|
||||||
self.table)
|
self.table)
|
||||||
self.ccbp_group_enable.setCheckable(True)
|
self.ccbp_group_enable.setCheckable(True)
|
||||||
self.ccbp_group_enable.triggered.connect(lambda: self.set_ccbp("enable"))
|
self.ccbp_group_enable.triggered.connect(lambda: self.set_ccbp("enable"))
|
||||||
ccbp_group_menu.addAction(self.ccbp_group_enable)
|
ccbp_group_menu.addAction(self.ccbp_group_enable)
|
||||||
|
|
|
@ -8,6 +8,7 @@ from sipyco import pyon
|
||||||
from artiq.tools import scale_from_metadata, short_format, exc_to_warning
|
from artiq.tools import scale_from_metadata, short_format, exc_to_warning
|
||||||
from artiq.gui.tools import LayoutWidget, QRecursiveFilterProxyModel
|
from artiq.gui.tools import LayoutWidget, QRecursiveFilterProxyModel
|
||||||
from artiq.gui.models import DictSyncTreeSepModel
|
from artiq.gui.models import DictSyncTreeSepModel
|
||||||
|
from artiq.gui.scientific_spinbox import ScientificSpinBox
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -80,7 +81,8 @@ class CreateEditDialog(QtWidgets.QDialog):
|
||||||
t = value.dtype if value is np.ndarray else type(value)
|
t = value.dtype if value is np.ndarray else type(value)
|
||||||
if scale != 1 and np.issubdtype(t, np.number):
|
if scale != 1 and np.issubdtype(t, np.number):
|
||||||
# degenerates to float type
|
# degenerates to float type
|
||||||
value_edit_string = self.value_to_edit_string(value / scale)
|
value_edit_string = self.value_to_edit_string(
|
||||||
|
np.float64(value / scale))
|
||||||
self.unit_widget.setText(metadata.get('unit', ''))
|
self.unit_widget.setText(metadata.get('unit', ''))
|
||||||
self.scale_widget.setText(str(metadata.get('scale', '')))
|
self.scale_widget.setText(str(metadata.get('scale', '')))
|
||||||
self.precision_widget.setText(str(metadata.get('precision', '')))
|
self.precision_widget.setText(str(metadata.get('precision', '')))
|
||||||
|
@ -107,13 +109,11 @@ class CreateEditDialog(QtWidgets.QDialog):
|
||||||
t = value.dtype if value is np.ndarray else type(value)
|
t = value.dtype if value is np.ndarray else type(value)
|
||||||
if scale != 1 and np.issubdtype(t, np.number):
|
if scale != 1 and np.issubdtype(t, np.number):
|
||||||
# degenerates to float type
|
# degenerates to float type
|
||||||
value = float(value * scale)
|
value = np.float64(value * scale)
|
||||||
if self.key and self.key != key:
|
if self.key and self.key != key:
|
||||||
asyncio.ensure_future(exc_to_warning(rename(self.key, key, value, metadata, persist,
|
asyncio.ensure_future(exc_to_warning(rename(self.key, key, value, metadata, persist, self.dataset_ctl)))
|
||||||
self.dataset_ctl)))
|
|
||||||
else:
|
else:
|
||||||
asyncio.ensure_future(exc_to_warning(self.dataset_ctl.set(key, value, metadata=metadata,
|
asyncio.ensure_future(exc_to_warning(self.dataset_ctl.set(key, value, metadata=metadata, persist=persist)))
|
||||||
persist=persist)))
|
|
||||||
self.key = key
|
self.key = key
|
||||||
QtWidgets.QDialog.accept(self)
|
QtWidgets.QDialog.accept(self)
|
||||||
|
|
||||||
|
@ -163,7 +163,7 @@ class CreateEditDialog(QtWidgets.QDialog):
|
||||||
|
|
||||||
|
|
||||||
class Model(DictSyncTreeSepModel):
|
class Model(DictSyncTreeSepModel):
|
||||||
def __init__(self, init):
|
def __init__(self, init):
|
||||||
DictSyncTreeSepModel.__init__(self, ".",
|
DictSyncTreeSepModel.__init__(self, ".",
|
||||||
["Dataset", "Persistent", "Value"],
|
["Dataset", "Persistent", "Value"],
|
||||||
init)
|
init)
|
||||||
|
|
|
@ -9,9 +9,10 @@ import h5py
|
||||||
|
|
||||||
from sipyco import pyon
|
from sipyco import pyon
|
||||||
|
|
||||||
from artiq.gui.entries import procdesc_to_entry, EntryTreeWidget
|
from artiq.gui.entries import procdesc_to_entry, ScanEntry
|
||||||
from artiq.gui.fuzzy_select import FuzzySelectWidget
|
from artiq.gui.fuzzy_select import FuzzySelectWidget
|
||||||
from artiq.gui.tools import (LayoutWidget, log_level_to_name, get_open_file_name)
|
from artiq.gui.tools import (LayoutWidget, WheelFilter,
|
||||||
|
log_level_to_name, get_open_file_name)
|
||||||
from artiq.tools import parse_devarg_override, unparse_devarg_override
|
from artiq.tools import parse_devarg_override, unparse_devarg_override
|
||||||
|
|
||||||
|
|
||||||
|
@ -24,23 +25,99 @@ logger = logging.getLogger(__name__)
|
||||||
# 2. file:<class name>@<file name>
|
# 2. file:<class name>@<file name>
|
||||||
|
|
||||||
|
|
||||||
class _ArgumentEditor(EntryTreeWidget):
|
class _ArgumentEditor(QtWidgets.QTreeWidget):
|
||||||
def __init__(self, manager, dock, expurl):
|
def __init__(self, manager, dock, expurl):
|
||||||
self.manager = manager
|
self.manager = manager
|
||||||
self.expurl = expurl
|
self.expurl = expurl
|
||||||
|
|
||||||
EntryTreeWidget.__init__(self)
|
QtWidgets.QTreeWidget.__init__(self)
|
||||||
|
self.setColumnCount(3)
|
||||||
|
self.header().setStretchLastSection(False)
|
||||||
|
if hasattr(self.header(), "setSectionResizeMode"):
|
||||||
|
set_resize_mode = self.header().setSectionResizeMode
|
||||||
|
else:
|
||||||
|
set_resize_mode = self.header().setResizeMode
|
||||||
|
set_resize_mode(0, QtWidgets.QHeaderView.ResizeToContents)
|
||||||
|
set_resize_mode(1, QtWidgets.QHeaderView.Stretch)
|
||||||
|
set_resize_mode(2, QtWidgets.QHeaderView.ResizeToContents)
|
||||||
|
self.header().setVisible(False)
|
||||||
|
self.setSelectionMode(self.NoSelection)
|
||||||
|
self.setHorizontalScrollMode(self.ScrollPerPixel)
|
||||||
|
self.setVerticalScrollMode(self.ScrollPerPixel)
|
||||||
|
|
||||||
|
self.setStyleSheet("QTreeWidget {background: " +
|
||||||
|
self.palette().midlight().color().name() + " ;}")
|
||||||
|
|
||||||
|
self.viewport().installEventFilter(WheelFilter(self.viewport(), True))
|
||||||
|
|
||||||
|
self._groups = dict()
|
||||||
|
self._arg_to_widgets = dict()
|
||||||
|
|
||||||
arguments = self.manager.get_submission_arguments(self.expurl)
|
arguments = self.manager.get_submission_arguments(self.expurl)
|
||||||
|
|
||||||
if not arguments:
|
if not arguments:
|
||||||
self.insertTopLevelItem(0, QtWidgets.QTreeWidgetItem(["No arguments"]))
|
self.addTopLevelItem(QtWidgets.QTreeWidgetItem(["No arguments"]))
|
||||||
|
|
||||||
|
gradient = QtGui.QLinearGradient(
|
||||||
|
0, 0, 0, QtGui.QFontMetrics(self.font()).lineSpacing()*2.5)
|
||||||
|
gradient.setColorAt(0, self.palette().base().color())
|
||||||
|
gradient.setColorAt(1, self.palette().midlight().color())
|
||||||
for name, argument in arguments.items():
|
for name, argument in arguments.items():
|
||||||
self.set_argument(name, argument)
|
widgets = dict()
|
||||||
|
self._arg_to_widgets[name] = widgets
|
||||||
|
|
||||||
self.quickStyleClicked.connect(dock.submit_clicked)
|
entry = procdesc_to_entry(argument["desc"])(argument)
|
||||||
|
widget_item = QtWidgets.QTreeWidgetItem([name])
|
||||||
|
if argument["tooltip"]:
|
||||||
|
widget_item.setToolTip(0, argument["tooltip"])
|
||||||
|
widgets["entry"] = entry
|
||||||
|
widgets["widget_item"] = widget_item
|
||||||
|
|
||||||
|
for col in range(3):
|
||||||
|
widget_item.setBackground(col, gradient)
|
||||||
|
font = widget_item.font(0)
|
||||||
|
font.setBold(True)
|
||||||
|
widget_item.setFont(0, font)
|
||||||
|
|
||||||
|
if argument["group"] is None:
|
||||||
|
self.addTopLevelItem(widget_item)
|
||||||
|
else:
|
||||||
|
self._get_group(argument["group"]).addChild(widget_item)
|
||||||
|
fix_layout = LayoutWidget()
|
||||||
|
widgets["fix_layout"] = fix_layout
|
||||||
|
fix_layout.addWidget(entry)
|
||||||
|
self.setItemWidget(widget_item, 1, fix_layout)
|
||||||
|
recompute_argument = QtWidgets.QToolButton()
|
||||||
|
recompute_argument.setToolTip("Re-run the experiment's build "
|
||||||
|
"method and take the default value")
|
||||||
|
recompute_argument.setIcon(
|
||||||
|
QtWidgets.QApplication.style().standardIcon(
|
||||||
|
QtWidgets.QStyle.SP_BrowserReload))
|
||||||
|
recompute_argument.clicked.connect(
|
||||||
|
partial(self._recompute_argument_clicked, name))
|
||||||
|
|
||||||
|
tool_buttons = LayoutWidget()
|
||||||
|
tool_buttons.addWidget(recompute_argument, 1)
|
||||||
|
|
||||||
|
disable_other_scans = QtWidgets.QToolButton()
|
||||||
|
widgets["disable_other_scans"] = disable_other_scans
|
||||||
|
disable_other_scans.setIcon(
|
||||||
|
QtWidgets.QApplication.style().standardIcon(
|
||||||
|
QtWidgets.QStyle.SP_DialogResetButton))
|
||||||
|
disable_other_scans.setToolTip("Disable all other scans in "
|
||||||
|
"this experiment")
|
||||||
|
disable_other_scans.clicked.connect(
|
||||||
|
partial(self._disable_other_scans, name))
|
||||||
|
tool_buttons.layout.setRowStretch(0, 1)
|
||||||
|
tool_buttons.layout.setRowStretch(3, 1)
|
||||||
|
tool_buttons.addWidget(disable_other_scans, 2)
|
||||||
|
if not isinstance(entry, ScanEntry):
|
||||||
|
disable_other_scans.setVisible(False)
|
||||||
|
|
||||||
|
self.setItemWidget(widget_item, 2, tool_buttons)
|
||||||
|
|
||||||
|
widget_item = QtWidgets.QTreeWidgetItem()
|
||||||
|
self.addTopLevelItem(widget_item)
|
||||||
recompute_arguments = QtWidgets.QPushButton("Recompute all arguments")
|
recompute_arguments = QtWidgets.QPushButton("Recompute all arguments")
|
||||||
recompute_arguments.setIcon(
|
recompute_arguments.setIcon(
|
||||||
QtWidgets.QApplication.style().standardIcon(
|
QtWidgets.QApplication.style().standardIcon(
|
||||||
|
@ -59,10 +136,41 @@ class _ArgumentEditor(EntryTreeWidget):
|
||||||
buttons.layout.setColumnStretch(1, 0)
|
buttons.layout.setColumnStretch(1, 0)
|
||||||
buttons.layout.setColumnStretch(2, 0)
|
buttons.layout.setColumnStretch(2, 0)
|
||||||
buttons.layout.setColumnStretch(3, 1)
|
buttons.layout.setColumnStretch(3, 1)
|
||||||
self.setItemWidget(self.bottom_item, 1, buttons)
|
self.setItemWidget(widget_item, 1, buttons)
|
||||||
|
|
||||||
def reset_entry(self, key):
|
def _get_group(self, name):
|
||||||
asyncio.ensure_future(self._recompute_argument(key))
|
if name in self._groups:
|
||||||
|
return self._groups[name]
|
||||||
|
group = QtWidgets.QTreeWidgetItem([name])
|
||||||
|
for col in range(3):
|
||||||
|
group.setBackground(col, self.palette().mid())
|
||||||
|
group.setForeground(col, self.palette().brightText())
|
||||||
|
font = group.font(col)
|
||||||
|
font.setBold(True)
|
||||||
|
group.setFont(col, font)
|
||||||
|
self.addTopLevelItem(group)
|
||||||
|
self._groups[name] = group
|
||||||
|
return group
|
||||||
|
|
||||||
|
def update_argument(self, name, argument):
|
||||||
|
widgets = self._arg_to_widgets[name]
|
||||||
|
|
||||||
|
# Qt needs a setItemWidget() to handle layout correctly,
|
||||||
|
# simply replacing the entry inside the LayoutWidget
|
||||||
|
# results in a bug.
|
||||||
|
|
||||||
|
widgets["entry"].deleteLater()
|
||||||
|
widgets["entry"] = procdesc_to_entry(argument["desc"])(argument)
|
||||||
|
widgets["disable_other_scans"].setVisible(
|
||||||
|
isinstance(widgets["entry"], ScanEntry))
|
||||||
|
widgets["fix_layout"].deleteLater()
|
||||||
|
widgets["fix_layout"] = LayoutWidget()
|
||||||
|
widgets["fix_layout"].addWidget(widgets["entry"])
|
||||||
|
self.setItemWidget(widgets["widget_item"], 1, widgets["fix_layout"])
|
||||||
|
self.updateGeometries()
|
||||||
|
|
||||||
|
def _recompute_argument_clicked(self, name):
|
||||||
|
asyncio.ensure_future(self._recompute_argument(name))
|
||||||
|
|
||||||
async def _recompute_argument(self, name):
|
async def _recompute_argument(self, name):
|
||||||
try:
|
try:
|
||||||
|
@ -79,6 +187,30 @@ class _ArgumentEditor(EntryTreeWidget):
|
||||||
argument["state"] = state
|
argument["state"] = state
|
||||||
self.update_argument(name, argument)
|
self.update_argument(name, argument)
|
||||||
|
|
||||||
|
def _disable_other_scans(self, current_name):
|
||||||
|
for name, widgets in self._arg_to_widgets.items():
|
||||||
|
if (name != current_name
|
||||||
|
and isinstance(widgets["entry"], ScanEntry)):
|
||||||
|
widgets["entry"].disable()
|
||||||
|
|
||||||
|
def save_state(self):
|
||||||
|
expanded = []
|
||||||
|
for k, v in self._groups.items():
|
||||||
|
if v.isExpanded():
|
||||||
|
expanded.append(k)
|
||||||
|
return {
|
||||||
|
"expanded": expanded,
|
||||||
|
"scroll": self.verticalScrollBar().value()
|
||||||
|
}
|
||||||
|
|
||||||
|
def restore_state(self, state):
|
||||||
|
for e in state["expanded"]:
|
||||||
|
try:
|
||||||
|
self._groups[e].setExpanded(True)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
self.verticalScrollBar().setValue(state["scroll"])
|
||||||
|
|
||||||
# Hooks that allow user-supplied argument editors to react to imminent user
|
# Hooks that allow user-supplied argument editors to react to imminent user
|
||||||
# actions. Here, we always keep the manager-stored submission arguments
|
# actions. Here, we always keep the manager-stored submission arguments
|
||||||
# up-to-date, so no further action is required.
|
# up-to-date, so no further action is required.
|
||||||
|
@ -98,7 +230,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
def __init__(self, manager, expurl):
|
def __init__(self, manager, expurl):
|
||||||
QtWidgets.QMdiSubWindow.__init__(self)
|
QtWidgets.QMdiSubWindow.__init__(self)
|
||||||
qfm = QtGui.QFontMetrics(self.font())
|
qfm = QtGui.QFontMetrics(self.font())
|
||||||
self.resize(100 * qfm.averageCharWidth(), 30 * qfm.lineSpacing())
|
self.resize(100*qfm.averageCharWidth(), 30*qfm.lineSpacing())
|
||||||
self.setWindowTitle(expurl)
|
self.setWindowTitle(expurl)
|
||||||
self.setWindowIcon(QtWidgets.QApplication.style().standardIcon(
|
self.setWindowIcon(QtWidgets.QApplication.style().standardIcon(
|
||||||
QtWidgets.QStyle.SP_FileDialogContentsView))
|
QtWidgets.QStyle.SP_FileDialogContentsView))
|
||||||
|
@ -131,17 +263,17 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
datetime.setDate(QtCore.QDate.currentDate())
|
datetime.setDate(QtCore.QDate.currentDate())
|
||||||
else:
|
else:
|
||||||
datetime.setDateTime(QtCore.QDateTime.fromMSecsSinceEpoch(
|
datetime.setDateTime(QtCore.QDateTime.fromMSecsSinceEpoch(
|
||||||
int(scheduling["due_date"] * 1000)))
|
int(scheduling["due_date"]*1000)))
|
||||||
datetime_en.setChecked(scheduling["due_date"] is not None)
|
datetime_en.setChecked(scheduling["due_date"] is not None)
|
||||||
|
|
||||||
def update_datetime(dt):
|
def update_datetime(dt):
|
||||||
scheduling["due_date"] = dt.toMSecsSinceEpoch() / 1000
|
scheduling["due_date"] = dt.toMSecsSinceEpoch()/1000
|
||||||
datetime_en.setChecked(True)
|
datetime_en.setChecked(True)
|
||||||
datetime.dateTimeChanged.connect(update_datetime)
|
datetime.dateTimeChanged.connect(update_datetime)
|
||||||
|
|
||||||
def update_datetime_en(checked):
|
def update_datetime_en(checked):
|
||||||
if checked:
|
if checked:
|
||||||
due_date = datetime.dateTime().toMSecsSinceEpoch() / 1000
|
due_date = datetime.dateTime().toMSecsSinceEpoch()/1000
|
||||||
else:
|
else:
|
||||||
due_date = None
|
due_date = None
|
||||||
scheduling["due_date"] = due_date
|
scheduling["due_date"] = due_date
|
||||||
|
@ -217,10 +349,9 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
repo_rev = QtWidgets.QLineEdit()
|
repo_rev = QtWidgets.QLineEdit()
|
||||||
repo_rev.setPlaceholderText("current")
|
repo_rev.setPlaceholderText("current")
|
||||||
repo_rev.setClearButtonEnabled(True)
|
repo_rev.setClearButtonEnabled(True)
|
||||||
repo_rev_label = QtWidgets.QLabel("Rev / ref:")
|
repo_rev_label = QtWidgets.QLabel("Revision:")
|
||||||
repo_rev_label.setToolTip("Experiment repository revision "
|
repo_rev_label.setToolTip("Experiment repository revision "
|
||||||
"(commit ID) or reference (branch "
|
"(commit ID) to use")
|
||||||
"or tag) to use")
|
|
||||||
self.layout.addWidget(repo_rev_label, 3, 2)
|
self.layout.addWidget(repo_rev_label, 3, 2)
|
||||||
self.layout.addWidget(repo_rev, 3, 3)
|
self.layout.addWidget(repo_rev, 3, 3)
|
||||||
|
|
||||||
|
@ -237,7 +368,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
|
|
||||||
submit = QtWidgets.QPushButton("Submit")
|
submit = QtWidgets.QPushButton("Submit")
|
||||||
submit.setIcon(QtWidgets.QApplication.style().standardIcon(
|
submit.setIcon(QtWidgets.QApplication.style().standardIcon(
|
||||||
QtWidgets.QStyle.SP_DialogOkButton))
|
QtWidgets.QStyle.SP_DialogOkButton))
|
||||||
submit.setToolTip("Schedule the experiment (Ctrl+Return)")
|
submit.setToolTip("Schedule the experiment (Ctrl+Return)")
|
||||||
submit.setShortcut("CTRL+RETURN")
|
submit.setShortcut("CTRL+RETURN")
|
||||||
submit.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
|
submit.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
|
||||||
|
@ -247,7 +378,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
|
|
||||||
reqterm = QtWidgets.QPushButton("Terminate instances")
|
reqterm = QtWidgets.QPushButton("Terminate instances")
|
||||||
reqterm.setIcon(QtWidgets.QApplication.style().standardIcon(
|
reqterm.setIcon(QtWidgets.QApplication.style().standardIcon(
|
||||||
QtWidgets.QStyle.SP_DialogCancelButton))
|
QtWidgets.QStyle.SP_DialogCancelButton))
|
||||||
reqterm.setToolTip("Request termination of instances (Ctrl+Backspace)")
|
reqterm.setToolTip("Request termination of instances (Ctrl+Backspace)")
|
||||||
reqterm.setShortcut("CTRL+BACKSPACE")
|
reqterm.setShortcut("CTRL+BACKSPACE")
|
||||||
reqterm.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
|
reqterm.setSizePolicy(QtWidgets.QSizePolicy.Expanding,
|
||||||
|
@ -289,7 +420,8 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
arginfo = expdesc["arginfo"]
|
arginfo = expdesc["arginfo"]
|
||||||
for k, v in overrides.items():
|
for k, v in overrides.items():
|
||||||
# Some values (e.g. scans) may have multiple defaults in a list
|
# Some values (e.g. scans) may have multiple defaults in a list
|
||||||
if ("default" in arginfo[k][0] and isinstance(arginfo[k][0]["default"], list)):
|
if ("default" in arginfo[k][0]
|
||||||
|
and isinstance(arginfo[k][0]["default"], list)):
|
||||||
arginfo[k][0]["default"].insert(0, v)
|
arginfo[k][0]["default"].insert(0, v)
|
||||||
else:
|
else:
|
||||||
arginfo[k][0]["default"] = v
|
arginfo[k][0]["default"] = v
|
||||||
|
@ -300,8 +432,8 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
|
|
||||||
editor_class = self.manager.get_argument_editor_class(self.expurl)
|
editor_class = self.manager.get_argument_editor_class(self.expurl)
|
||||||
self.argeditor = editor_class(self.manager, self, self.expurl)
|
self.argeditor = editor_class(self.manager, self, self.expurl)
|
||||||
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
|
|
||||||
self.argeditor.restore_state(argeditor_state)
|
self.argeditor.restore_state(argeditor_state)
|
||||||
|
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
|
||||||
|
|
||||||
def contextMenuEvent(self, event):
|
def contextMenuEvent(self, event):
|
||||||
menu = QtWidgets.QMenu(self)
|
menu = QtWidgets.QMenu(self)
|
||||||
|
@ -354,9 +486,9 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
unparse_devarg_override(expid["devarg_override"]))
|
unparse_devarg_override(expid["devarg_override"]))
|
||||||
self.log_level.setCurrentIndex(log_levels.index(
|
self.log_level.setCurrentIndex(log_levels.index(
|
||||||
log_level_to_name(expid["log_level"])))
|
log_level_to_name(expid["log_level"])))
|
||||||
if "repo_rev" in expid and \
|
if ("repo_rev" in expid and
|
||||||
expid["repo_rev"] != "N/A" and \
|
expid["repo_rev"] != "N/A" and
|
||||||
hasattr(self, "repo_rev"):
|
hasattr(self, "repo_rev")):
|
||||||
self.repo_rev.setText(expid["repo_rev"])
|
self.repo_rev.setText(expid["repo_rev"])
|
||||||
except:
|
except:
|
||||||
logger.error("Could not set submission options from HDF5 expid",
|
logger.error("Could not set submission options from HDF5 expid",
|
||||||
|
@ -546,7 +678,7 @@ class ExperimentManager:
|
||||||
self.submission_arguments[expurl] = arguments
|
self.submission_arguments[expurl] = arguments
|
||||||
self.argument_ui_names[expurl] = ui_name
|
self.argument_ui_names[expurl] = ui_name
|
||||||
return arguments
|
return arguments
|
||||||
|
|
||||||
def set_argument_value(self, expurl, name, value):
|
def set_argument_value(self, expurl, name, value):
|
||||||
try:
|
try:
|
||||||
argument = self.submission_arguments[expurl][name]
|
argument = self.submission_arguments[expurl][name]
|
||||||
|
@ -559,8 +691,7 @@ class ExperimentManager:
|
||||||
if expurl in self.open_experiments.keys():
|
if expurl in self.open_experiments.keys():
|
||||||
self.open_experiments[expurl].argeditor.update_argument(name, argument)
|
self.open_experiments[expurl].argeditor.update_argument(name, argument)
|
||||||
except:
|
except:
|
||||||
logger.warn("Failed to set value for argument \"{}\" in experiment: {}."
|
logger.warn("Failed to set value for argument \"{}\" in experiment: {}.".format(name, expurl), exc_info=1)
|
||||||
.format(name, expurl), exc_info=1)
|
|
||||||
|
|
||||||
def get_submission_arguments(self, expurl):
|
def get_submission_arguments(self, expurl):
|
||||||
if expurl in self.submission_arguments:
|
if expurl in self.submission_arguments:
|
||||||
|
@ -570,8 +701,8 @@ class ExperimentManager:
|
||||||
raise ValueError("Submission arguments must be preinitialized "
|
raise ValueError("Submission arguments must be preinitialized "
|
||||||
"when not using repository")
|
"when not using repository")
|
||||||
class_desc = self.explist[expurl[5:]]
|
class_desc = self.explist[expurl[5:]]
|
||||||
return self.initialize_submission_arguments(expurl, class_desc["arginfo"],
|
return self.initialize_submission_arguments(expurl,
|
||||||
class_desc.get("argument_ui", None))
|
class_desc["arginfo"], class_desc.get("argument_ui", None))
|
||||||
|
|
||||||
def open_experiment(self, expurl):
|
def open_experiment(self, expurl):
|
||||||
if expurl in self.open_experiments:
|
if expurl in self.open_experiments:
|
||||||
|
@ -608,13 +739,8 @@ class ExperimentManager:
|
||||||
del self.open_experiments[expurl]
|
del self.open_experiments[expurl]
|
||||||
|
|
||||||
async def _submit_task(self, expurl, *args):
|
async def _submit_task(self, expurl, *args):
|
||||||
try:
|
rid = await self.schedule_ctl.submit(*args)
|
||||||
rid = await self.schedule_ctl.submit(*args)
|
logger.info("Submitted '%s', RID is %d", expurl, rid)
|
||||||
except KeyError:
|
|
||||||
expid = args[1]
|
|
||||||
logger.error("Submission failed - revision \"%s\" was not found", expid["repo_rev"])
|
|
||||||
else:
|
|
||||||
logger.info("Submitted '%s', RID is %d", expurl, rid)
|
|
||||||
|
|
||||||
def submit(self, expurl):
|
def submit(self, expurl):
|
||||||
file, class_name, _ = self.resolve_expurl(expurl)
|
file, class_name, _ = self.resolve_expurl(expurl)
|
||||||
|
@ -671,9 +797,9 @@ class ExperimentManager:
|
||||||
repo_match = "repo_rev" in expid
|
repo_match = "repo_rev" in expid
|
||||||
else:
|
else:
|
||||||
repo_match = "repo_rev" not in expid
|
repo_match = "repo_rev" not in expid
|
||||||
if repo_match and \
|
if (repo_match and
|
||||||
("file" in expid and expid["file"] == file) and \
|
("file" in expid and expid["file"] == file) and
|
||||||
expid["class_name"] == class_name:
|
expid["class_name"] == class_name):
|
||||||
rids.append(rid)
|
rids.append(rid)
|
||||||
asyncio.ensure_future(self._request_term_multiple(rids))
|
asyncio.ensure_future(self._request_term_multiple(rids))
|
||||||
|
|
||||||
|
@ -693,7 +819,7 @@ class ExperimentManager:
|
||||||
for class_name, class_desc in description.items():
|
for class_name, class_desc in description.items():
|
||||||
expurl = "file:{}@{}".format(class_name, file)
|
expurl = "file:{}@{}".format(class_name, file)
|
||||||
self.initialize_submission_arguments(expurl, class_desc["arginfo"],
|
self.initialize_submission_arguments(expurl, class_desc["arginfo"],
|
||||||
class_desc.get("argument_ui", None))
|
class_desc.get("argument_ui", None))
|
||||||
if expurl in self.open_experiments:
|
if expurl in self.open_experiments:
|
||||||
self.open_experiments[expurl].close()
|
self.open_experiments[expurl].close()
|
||||||
self.open_experiment(expurl)
|
self.open_experiment(expurl)
|
||||||
|
@ -727,7 +853,6 @@ class ExperimentManager:
|
||||||
|
|
||||||
self.is_quick_open_shown = True
|
self.is_quick_open_shown = True
|
||||||
dialog = _QuickOpenDialog(self)
|
dialog = _QuickOpenDialog(self)
|
||||||
|
|
||||||
def closed():
|
def closed():
|
||||||
self.is_quick_open_shown = False
|
self.is_quick_open_shown = False
|
||||||
dialog.closed.connect(closed)
|
dialog.closed.connect(closed)
|
||||||
|
|
|
@ -94,7 +94,7 @@ class _OpenFileDialog(QtWidgets.QDialog):
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
self.explorer.current_directory = \
|
self.explorer.current_directory = \
|
||||||
self.explorer.current_directory[:idx + 1]
|
self.explorer.current_directory[:idx+1]
|
||||||
if self.explorer.current_directory == "/":
|
if self.explorer.current_directory == "/":
|
||||||
self.explorer.current_directory = ""
|
self.explorer.current_directory = ""
|
||||||
asyncio.ensure_future(self.refresh_view())
|
asyncio.ensure_future(self.refresh_view())
|
||||||
|
@ -103,7 +103,6 @@ class _OpenFileDialog(QtWidgets.QDialog):
|
||||||
asyncio.ensure_future(self.refresh_view())
|
asyncio.ensure_future(self.refresh_view())
|
||||||
else:
|
else:
|
||||||
file = self.explorer.current_directory + selected
|
file = self.explorer.current_directory + selected
|
||||||
|
|
||||||
async def open_task():
|
async def open_task():
|
||||||
try:
|
try:
|
||||||
await self.exp_manager.open_file(file)
|
await self.exp_manager.open_file(file)
|
||||||
|
@ -233,7 +232,7 @@ class ExplorerDock(QtWidgets.QDockWidget):
|
||||||
|
|
||||||
set_shortcut_menu = QtWidgets.QMenu()
|
set_shortcut_menu = QtWidgets.QMenu()
|
||||||
for i in range(12):
|
for i in range(12):
|
||||||
action = QtWidgets.QAction("F" + str(i + 1), self.el)
|
action = QtWidgets.QAction("F" + str(i+1), self.el)
|
||||||
action.triggered.connect(partial(self.set_shortcut, i))
|
action.triggered.connect(partial(self.set_shortcut, i))
|
||||||
set_shortcut_menu.addAction(action)
|
set_shortcut_menu.addAction(action)
|
||||||
|
|
||||||
|
@ -247,14 +246,12 @@ class ExplorerDock(QtWidgets.QDockWidget):
|
||||||
|
|
||||||
scan_repository_action = QtWidgets.QAction("Scan repository HEAD",
|
scan_repository_action = QtWidgets.QAction("Scan repository HEAD",
|
||||||
self.el)
|
self.el)
|
||||||
|
|
||||||
def scan_repository():
|
def scan_repository():
|
||||||
asyncio.ensure_future(experiment_db_ctl.scan_repository_async())
|
asyncio.ensure_future(experiment_db_ctl.scan_repository_async())
|
||||||
scan_repository_action.triggered.connect(scan_repository)
|
scan_repository_action.triggered.connect(scan_repository)
|
||||||
self.el.addAction(scan_repository_action)
|
self.el.addAction(scan_repository_action)
|
||||||
|
|
||||||
scan_ddb_action = QtWidgets.QAction("Scan device database", self.el)
|
scan_ddb_action = QtWidgets.QAction("Scan device database", self.el)
|
||||||
|
|
||||||
def scan_ddb():
|
def scan_ddb():
|
||||||
asyncio.ensure_future(device_db_ctl.scan())
|
asyncio.ensure_future(device_db_ctl.scan())
|
||||||
scan_ddb_action.triggered.connect(scan_ddb)
|
scan_ddb_action.triggered.connect(scan_ddb)
|
||||||
|
@ -295,7 +292,7 @@ class ExplorerDock(QtWidgets.QDockWidget):
|
||||||
if expname is not None:
|
if expname is not None:
|
||||||
expurl = "repo:" + expname
|
expurl = "repo:" + expname
|
||||||
self.d_shortcuts.set_shortcut(nr, expurl)
|
self.d_shortcuts.set_shortcut(nr, expurl)
|
||||||
logger.info("Set shortcut F%d to '%s'", nr + 1, expurl)
|
logger.info("Set shortcut F%d to '%s'", nr+1, expurl)
|
||||||
|
|
||||||
def update_scanning(self, scanning):
|
def update_scanning(self, scanning):
|
||||||
if scanning:
|
if scanning:
|
||||||
|
|
|
@ -1,155 +0,0 @@
|
||||||
import logging
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtWidgets, QtGui
|
|
||||||
|
|
||||||
from artiq.gui.models import DictSyncModel
|
|
||||||
from artiq.gui.entries import EntryTreeWidget, procdesc_to_entry
|
|
||||||
from artiq.gui.tools import LayoutWidget
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Model(DictSyncModel):
|
|
||||||
def __init__(self, init):
|
|
||||||
DictSyncModel.__init__(self, ["RID", "Title", "Args"], init)
|
|
||||||
|
|
||||||
def convert(self, k, v, column):
|
|
||||||
if column == 0:
|
|
||||||
return k
|
|
||||||
elif column == 1:
|
|
||||||
txt = ": " + v["title"] if v["title"] != "" else ""
|
|
||||||
return str(k) + txt
|
|
||||||
elif column == 2:
|
|
||||||
return v["arglist_desc"]
|
|
||||||
else:
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
def sort_key(self, k, v):
|
|
||||||
return k
|
|
||||||
|
|
||||||
|
|
||||||
class _InteractiveArgsRequest(EntryTreeWidget):
|
|
||||||
supplied = QtCore.pyqtSignal(int, dict)
|
|
||||||
cancelled = QtCore.pyqtSignal(int)
|
|
||||||
|
|
||||||
def __init__(self, rid, arglist_desc):
|
|
||||||
EntryTreeWidget.__init__(self)
|
|
||||||
self.rid = rid
|
|
||||||
self.arguments = dict()
|
|
||||||
for key, procdesc, group, tooltip in arglist_desc:
|
|
||||||
self.arguments[key] = {"desc": procdesc, "group": group, "tooltip": tooltip}
|
|
||||||
self.set_argument(key, self.arguments[key])
|
|
||||||
self.quickStyleClicked.connect(self.supply)
|
|
||||||
cancel_btn = QtWidgets.QPushButton("Cancel")
|
|
||||||
cancel_btn.setIcon(QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_DialogCancelButton))
|
|
||||||
cancel_btn.clicked.connect(self.cancel)
|
|
||||||
supply_btn = QtWidgets.QPushButton("Supply")
|
|
||||||
supply_btn.setIcon(QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_DialogOkButton))
|
|
||||||
supply_btn.clicked.connect(self.supply)
|
|
||||||
buttons = LayoutWidget()
|
|
||||||
buttons.addWidget(cancel_btn, 1, 1)
|
|
||||||
buttons.addWidget(supply_btn, 1, 2)
|
|
||||||
buttons.layout.setColumnStretch(0, 1)
|
|
||||||
buttons.layout.setColumnStretch(1, 0)
|
|
||||||
buttons.layout.setColumnStretch(2, 0)
|
|
||||||
buttons.layout.setColumnStretch(3, 1)
|
|
||||||
self.setItemWidget(self.bottom_item, 1, buttons)
|
|
||||||
|
|
||||||
def supply(self):
|
|
||||||
argument_values = dict()
|
|
||||||
for key, argument in self.arguments.items():
|
|
||||||
entry_cls = procdesc_to_entry(argument["desc"])
|
|
||||||
argument_values[key] = entry_cls.state_to_value(argument["state"])
|
|
||||||
self.supplied.emit(self.rid, argument_values)
|
|
||||||
|
|
||||||
def cancel(self):
|
|
||||||
self.cancelled.emit(self.rid)
|
|
||||||
|
|
||||||
|
|
||||||
class _InteractiveArgsView(QtWidgets.QStackedWidget):
|
|
||||||
supplied = QtCore.pyqtSignal(int, dict)
|
|
||||||
cancelled = QtCore.pyqtSignal(int)
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
QtWidgets.QStackedWidget.__init__(self)
|
|
||||||
self.tabs = QtWidgets.QTabWidget()
|
|
||||||
self.default_label = QtWidgets.QLabel("No pending interactive arguments requests.")
|
|
||||||
self.default_label.setAlignment(QtCore.Qt.AlignCenter)
|
|
||||||
font = QtGui.QFont(self.default_label.font())
|
|
||||||
font.setItalic(True)
|
|
||||||
self.default_label.setFont(font)
|
|
||||||
self.addWidget(self.tabs)
|
|
||||||
self.addWidget(self.default_label)
|
|
||||||
self.model = Model({})
|
|
||||||
|
|
||||||
def setModel(self, model):
|
|
||||||
self.setCurrentIndex(1)
|
|
||||||
for i in range(self.tabs.count()):
|
|
||||||
widget = self.tabs.widget(i)
|
|
||||||
self.tabs.removeTab(i)
|
|
||||||
widget.deleteLater()
|
|
||||||
self.model = model
|
|
||||||
self.model.rowsInserted.connect(self.rowsInserted)
|
|
||||||
self.model.rowsRemoved.connect(self.rowsRemoved)
|
|
||||||
for i in range(self.model.rowCount(QtCore.QModelIndex())):
|
|
||||||
self._insert_widget(i)
|
|
||||||
|
|
||||||
def _insert_widget(self, row):
|
|
||||||
rid = self.model.data(self.model.index(row, 0), QtCore.Qt.DisplayRole)
|
|
||||||
title = self.model.data(self.model.index(row, 1), QtCore.Qt.DisplayRole)
|
|
||||||
arglist_desc = self.model.data(self.model.index(row, 2), QtCore.Qt.DisplayRole)
|
|
||||||
inter_args_request = _InteractiveArgsRequest(rid, arglist_desc)
|
|
||||||
inter_args_request.supplied.connect(self.supplied)
|
|
||||||
inter_args_request.cancelled.connect(self.cancelled)
|
|
||||||
self.tabs.insertTab(row, inter_args_request, title)
|
|
||||||
|
|
||||||
def rowsInserted(self, parent, first, last):
|
|
||||||
assert first == last
|
|
||||||
self.setCurrentIndex(0)
|
|
||||||
self._insert_widget(first)
|
|
||||||
|
|
||||||
def rowsRemoved(self, parent, first, last):
|
|
||||||
assert first == last
|
|
||||||
widget = self.tabs.widget(first)
|
|
||||||
self.tabs.removeTab(first)
|
|
||||||
widget.deleteLater()
|
|
||||||
if self.tabs.count() == 0:
|
|
||||||
self.setCurrentIndex(1)
|
|
||||||
|
|
||||||
|
|
||||||
class InteractiveArgsDock(QtWidgets.QDockWidget):
|
|
||||||
def __init__(self, interactive_args_sub, interactive_args_rpc):
|
|
||||||
QtWidgets.QDockWidget.__init__(self, "Interactive Args")
|
|
||||||
self.setObjectName("Interactive Args")
|
|
||||||
self.setFeatures(
|
|
||||||
QtWidgets.QDockWidget.DockWidgetMovable | QtWidgets.QDockWidget.DockWidgetFloatable)
|
|
||||||
self.interactive_args_rpc = interactive_args_rpc
|
|
||||||
self.request_view = _InteractiveArgsView()
|
|
||||||
self.request_view.supplied.connect(self.supply)
|
|
||||||
self.request_view.cancelled.connect(self.cancel)
|
|
||||||
self.setWidget(self.request_view)
|
|
||||||
interactive_args_sub.add_setmodel_callback(self.request_view.setModel)
|
|
||||||
|
|
||||||
def supply(self, rid, values):
|
|
||||||
asyncio.ensure_future(self._supply_task(rid, values))
|
|
||||||
|
|
||||||
async def _supply_task(self, rid, values):
|
|
||||||
try:
|
|
||||||
await self.interactive_args_rpc.supply(rid, values)
|
|
||||||
except Exception:
|
|
||||||
logger.error("failed to supply interactive arguments for experiment: %d",
|
|
||||||
rid, exc_info=True)
|
|
||||||
|
|
||||||
def cancel(self, rid):
|
|
||||||
asyncio.ensure_future(self._cancel_task(rid))
|
|
||||||
|
|
||||||
async def _cancel_task(self, rid):
|
|
||||||
try:
|
|
||||||
await self.interactive_args_rpc.cancel(rid)
|
|
||||||
except Exception:
|
|
||||||
logger.error("failed to cancel interactive args request for experiment: %d",
|
|
||||||
rid, exc_info=True)
|
|
|
@ -3,17 +3,22 @@ import logging
|
||||||
import textwrap
|
import textwrap
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtWidgets
|
from PyQt5 import QtCore, QtWidgets, QtGui
|
||||||
|
|
||||||
from artiq.coredevice.comm_moninj import CommMonInj, TTLOverride, TTLProbe
|
from sipyco.sync_struct import Subscriber
|
||||||
from artiq.coredevice.ad9912_reg import AD9912_SER_CONF
|
|
||||||
|
from artiq.coredevice.comm_moninj import *
|
||||||
|
from artiq.coredevice.ad9910 import (
|
||||||
|
_AD9910_REG_PROFILE0, _AD9910_REG_PROFILE7,
|
||||||
|
_AD9910_REG_FTW, _AD9910_REG_CFR1
|
||||||
|
)
|
||||||
|
from artiq.coredevice.ad9912_reg import AD9912_POW1, AD9912_SER_CONF
|
||||||
from artiq.gui.tools import LayoutWidget
|
from artiq.gui.tools import LayoutWidget
|
||||||
from artiq.gui.flowlayout import FlowLayout
|
from artiq.gui.flowlayout import FlowLayout
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class _CancellableLineEdit(QtWidgets.QLineEdit):
|
class _CancellableLineEdit(QtWidgets.QLineEdit):
|
||||||
def escapePressedConnect(self, cb):
|
def escapePressedConnect(self, cb):
|
||||||
self.esc_cb = cb
|
self.esc_cb = cb
|
||||||
|
@ -26,11 +31,13 @@ class _CancellableLineEdit(QtWidgets.QLineEdit):
|
||||||
|
|
||||||
|
|
||||||
class _TTLWidget(QtWidgets.QFrame):
|
class _TTLWidget(QtWidgets.QFrame):
|
||||||
override_toggled = QtCore.pyqtSignal(bool)
|
def __init__(self, dm, channel, force_out, title):
|
||||||
level_toggled = QtCore.pyqtSignal(bool)
|
|
||||||
|
|
||||||
def __init__(self, title):
|
|
||||||
QtWidgets.QFrame.__init__(self)
|
QtWidgets.QFrame.__init__(self)
|
||||||
|
|
||||||
|
self.channel = channel
|
||||||
|
self.set_mode = dm.ttl_set_mode
|
||||||
|
self.force_out = force_out
|
||||||
|
|
||||||
self.setFrameShape(QtWidgets.QFrame.Box)
|
self.setFrameShape(QtWidgets.QFrame.Box)
|
||||||
self.setFrameShadow(QtWidgets.QFrame.Raised)
|
self.setFrameShadow(QtWidgets.QFrame.Raised)
|
||||||
|
|
||||||
|
@ -77,28 +84,15 @@ class _TTLWidget(QtWidgets.QFrame):
|
||||||
grid.setRowStretch(3, 0)
|
grid.setRowStretch(3, 0)
|
||||||
grid.setRowStretch(4, 1)
|
grid.setRowStretch(4, 1)
|
||||||
|
|
||||||
|
self.programmatic_change = False
|
||||||
self.override.clicked.connect(self.override_toggled)
|
self.override.clicked.connect(self.override_toggled)
|
||||||
self.level.clicked.connect(self.level_toggled)
|
self.level.clicked.connect(self.level_toggled)
|
||||||
|
|
||||||
def setValueText(self, override, level):
|
self.cur_level = False
|
||||||
value_s = "1" if level else "0"
|
self.cur_oe = False
|
||||||
if override:
|
self.cur_override = False
|
||||||
value_s = "<b>" + value_s + "</b>"
|
self.cur_override_level = False
|
||||||
color = " color=\"red\""
|
self.refresh_display()
|
||||||
else:
|
|
||||||
color = ""
|
|
||||||
self.value.setText("<font size=\"5\"{}>{}</font>".format(
|
|
||||||
color, value_s))
|
|
||||||
|
|
||||||
def setDirectionText(self, oe):
|
|
||||||
direction = "OUT" if oe else "IN"
|
|
||||||
self.direction.setText("<font size=\"2\">" + direction + "</font>")
|
|
||||||
|
|
||||||
def setButtonsState(self, override, level):
|
|
||||||
self.override.setChecked(override)
|
|
||||||
if override:
|
|
||||||
self.stack.setCurrentIndex(1)
|
|
||||||
self.level.setChecked(level)
|
|
||||||
|
|
||||||
def enterEvent(self, event):
|
def enterEvent(self, event):
|
||||||
self.stack.setCurrentIndex(1)
|
self.stack.setCurrentIndex(1)
|
||||||
|
@ -109,24 +103,11 @@ class _TTLWidget(QtWidgets.QFrame):
|
||||||
self.stack.setCurrentIndex(0)
|
self.stack.setCurrentIndex(0)
|
||||||
QtWidgets.QFrame.leaveEvent(self, event)
|
QtWidgets.QFrame.leaveEvent(self, event)
|
||||||
|
|
||||||
|
|
||||||
class _TTLHandler:
|
|
||||||
def __init__(self, dm, channel, force_out, title):
|
|
||||||
self.channel = channel
|
|
||||||
self.force_out = force_out
|
|
||||||
self.set_mode = dm.ttl_set_mode
|
|
||||||
self.cur_level = False
|
|
||||||
self.cur_oe = False
|
|
||||||
self.cur_override = False
|
|
||||||
self.cur_override_level = False
|
|
||||||
self.widget = _TTLWidget(title)
|
|
||||||
self.widget.override_toggled.connect(self.override_toggled)
|
|
||||||
self.widget.level_toggled.connect(self.level_toggled)
|
|
||||||
self.refresh_display()
|
|
||||||
|
|
||||||
def override_toggled(self, override):
|
def override_toggled(self, override):
|
||||||
|
if self.programmatic_change:
|
||||||
|
return
|
||||||
if override:
|
if override:
|
||||||
if self.widget.level.isChecked():
|
if self.level.isChecked():
|
||||||
self.set_mode(self.channel, "1")
|
self.set_mode(self.channel, "1")
|
||||||
else:
|
else:
|
||||||
self.set_mode(self.channel, "0")
|
self.set_mode(self.channel, "0")
|
||||||
|
@ -134,7 +115,9 @@ class _TTLHandler:
|
||||||
self.set_mode(self.channel, "exp")
|
self.set_mode(self.channel, "exp")
|
||||||
|
|
||||||
def level_toggled(self, level):
|
def level_toggled(self, level):
|
||||||
if self.widget.override.isChecked():
|
if self.programmatic_change:
|
||||||
|
return
|
||||||
|
if self.override.isChecked():
|
||||||
if level:
|
if level:
|
||||||
self.set_mode(self.channel, "1")
|
self.set_mode(self.channel, "1")
|
||||||
else:
|
else:
|
||||||
|
@ -142,24 +125,106 @@ class _TTLHandler:
|
||||||
|
|
||||||
def refresh_display(self):
|
def refresh_display(self):
|
||||||
level = self.cur_override_level if self.cur_override else self.cur_level
|
level = self.cur_override_level if self.cur_override else self.cur_level
|
||||||
|
value_s = "1" if level else "0"
|
||||||
|
|
||||||
|
if self.cur_override:
|
||||||
|
value_s = "<b>" + value_s + "</b>"
|
||||||
|
color = " color=\"red\""
|
||||||
|
else:
|
||||||
|
color = ""
|
||||||
|
self.value.setText("<font size=\"5\"{}>{}</font>".format(
|
||||||
|
color, value_s))
|
||||||
oe = self.cur_oe or self.force_out
|
oe = self.cur_oe or self.force_out
|
||||||
self.widget.setValueText(self.cur_override, level)
|
direction = "OUT" if oe else "IN"
|
||||||
self.widget.setDirectionText(oe)
|
self.direction.setText("<font size=\"2\">" + direction + "</font>")
|
||||||
with QtCore.QSignalBlocker(self.widget):
|
|
||||||
self.widget.setButtonsState(self.cur_override, self.cur_level)
|
self.programmatic_change = True
|
||||||
|
try:
|
||||||
|
self.override.setChecked(self.cur_override)
|
||||||
|
if self.cur_override:
|
||||||
|
self.stack.setCurrentIndex(1)
|
||||||
|
self.level.setChecked(self.cur_level)
|
||||||
|
finally:
|
||||||
|
self.programmatic_change = False
|
||||||
|
|
||||||
def sort_key(self):
|
def sort_key(self):
|
||||||
return self.channel
|
return self.channel
|
||||||
|
|
||||||
|
|
||||||
class _DDSWidget(QtWidgets.QFrame):
|
class _SimpleDisplayWidget(QtWidgets.QFrame):
|
||||||
apply_changes = QtCore.pyqtSignal()
|
def __init__(self, title):
|
||||||
off_clicked = QtCore.pyqtSignal()
|
|
||||||
|
|
||||||
def __init__(self, title, is_urukul):
|
|
||||||
QtWidgets.QFrame.__init__(self)
|
QtWidgets.QFrame.__init__(self)
|
||||||
|
|
||||||
self._value = 0
|
self.setFrameShape(QtWidgets.QFrame.Box)
|
||||||
|
self.setFrameShadow(QtWidgets.QFrame.Raised)
|
||||||
|
|
||||||
|
grid = QtWidgets.QGridLayout()
|
||||||
|
grid.setContentsMargins(0, 0, 0, 0)
|
||||||
|
grid.setHorizontalSpacing(0)
|
||||||
|
grid.setVerticalSpacing(0)
|
||||||
|
self.setLayout(grid)
|
||||||
|
label = QtWidgets.QLabel(title)
|
||||||
|
label.setAlignment(QtCore.Qt.AlignCenter)
|
||||||
|
grid.addWidget(label, 1, 1)
|
||||||
|
|
||||||
|
self.value = QtWidgets.QLabel()
|
||||||
|
self.value.setAlignment(QtCore.Qt.AlignCenter)
|
||||||
|
grid.addWidget(self.value, 2, 1, 6, 1)
|
||||||
|
|
||||||
|
grid.setRowStretch(1, 1)
|
||||||
|
grid.setRowStretch(2, 0)
|
||||||
|
grid.setRowStretch(3, 1)
|
||||||
|
|
||||||
|
self.refresh_display()
|
||||||
|
|
||||||
|
def refresh_display(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def sort_key(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class _DDSModel:
|
||||||
|
def __init__(self, dds_type, ref_clk, cpld=None, pll=1, clk_div=0):
|
||||||
|
self.cpld = cpld
|
||||||
|
self.cur_frequency = 0
|
||||||
|
self.cur_reg = 0
|
||||||
|
self.dds_type = dds_type
|
||||||
|
self.is_urukul = dds_type in ["AD9910", "AD9912"]
|
||||||
|
|
||||||
|
if dds_type == "AD9914":
|
||||||
|
self.ftw_per_hz = 2**32 / ref_clk
|
||||||
|
else:
|
||||||
|
if dds_type == "AD9910":
|
||||||
|
max_freq = 1 << 32
|
||||||
|
clk_mult = [4, 1, 2, 4]
|
||||||
|
elif dds_type == "AD9912": # AD9912
|
||||||
|
max_freq = 1 << 48
|
||||||
|
clk_mult = [1, 1, 2, 4]
|
||||||
|
else:
|
||||||
|
raise NotImplementedError
|
||||||
|
sysclk = ref_clk / clk_mult[clk_div] * pll
|
||||||
|
self.ftw_per_hz = 1 / sysclk * max_freq
|
||||||
|
|
||||||
|
def monitor_update(self, probe, value):
|
||||||
|
if self.dds_type == "AD9912":
|
||||||
|
value = value << 16
|
||||||
|
self.cur_frequency = self._ftw_to_freq(value)
|
||||||
|
|
||||||
|
def _ftw_to_freq(self, ftw):
|
||||||
|
return ftw / self.ftw_per_hz
|
||||||
|
|
||||||
|
|
||||||
|
class _DDSWidget(QtWidgets.QFrame):
|
||||||
|
def __init__(self, dm, title, bus_channel=0, channel=0, dds_model=None):
|
||||||
|
self.dm = dm
|
||||||
|
self.bus_channel = bus_channel
|
||||||
|
self.channel = channel
|
||||||
|
self.dds_name = title
|
||||||
|
self.cur_frequency = 0
|
||||||
|
self.dds_model = dds_model
|
||||||
|
|
||||||
|
QtWidgets.QFrame.__init__(self)
|
||||||
|
|
||||||
self.setFrameShape(QtWidgets.QFrame.Box)
|
self.setFrameShape(QtWidgets.QFrame.Box)
|
||||||
self.setFrameShadow(QtWidgets.QFrame.Raised)
|
self.setFrameShadow(QtWidgets.QFrame.Raised)
|
||||||
|
@ -218,9 +283,9 @@ class _DDSWidget(QtWidgets.QFrame):
|
||||||
set_btn.setText("Set")
|
set_btn.setText("Set")
|
||||||
set_btn.setToolTip("Set frequency")
|
set_btn.setToolTip("Set frequency")
|
||||||
set_grid.addWidget(set_btn, 0, 1, 1, 1)
|
set_grid.addWidget(set_btn, 0, 1, 1, 1)
|
||||||
|
|
||||||
# for urukuls also allow switching off RF
|
# for urukuls also allow switching off RF
|
||||||
if is_urukul:
|
if self.dds_model.is_urukul:
|
||||||
off_btn = QtWidgets.QToolButton()
|
off_btn = QtWidgets.QToolButton()
|
||||||
off_btn.setText("Off")
|
off_btn.setText("Off")
|
||||||
off_btn.setToolTip("Switch off the output")
|
off_btn.setToolTip("Switch off the output")
|
||||||
|
@ -246,149 +311,67 @@ class _DDSWidget(QtWidgets.QFrame):
|
||||||
grid.setRowStretch(3, 1)
|
grid.setRowStretch(3, 1)
|
||||||
|
|
||||||
set_btn.clicked.connect(self.set_clicked)
|
set_btn.clicked.connect(self.set_clicked)
|
||||||
apply.clicked.connect(self._apply_changes)
|
apply.clicked.connect(self.apply_changes)
|
||||||
if is_urukul:
|
if self.dds_model.is_urukul:
|
||||||
off_btn.clicked.connect(self.off_clicked)
|
off_btn.clicked.connect(self.off_clicked)
|
||||||
off_btn.setToolTip(textwrap.dedent(
|
off_btn.setToolTip(textwrap.dedent(
|
||||||
"""Note: If TTL RTIO sw for the channel is switched high,
|
"""Note: If TTL RTIO sw for the channel is switched high,
|
||||||
this button will not disable the channel.
|
this button will not disable the channel.
|
||||||
Use the TTL override instead."""))
|
Use the TTL override instead."""))
|
||||||
self.value_edit.returnPressed.connect(self._apply_changes)
|
self.value_edit.returnPressed.connect(lambda: self.apply_changes(None))
|
||||||
|
self.value_edit.escapePressedConnect(self.cancel_changes)
|
||||||
|
cancel.clicked.connect(self.cancel_changes)
|
||||||
|
|
||||||
def cancel_changes(cancel):
|
self.refresh_display()
|
||||||
self.set_page(0)
|
|
||||||
self.value_edit.escapePressedConnect(cancel_changes)
|
|
||||||
cancel.clicked.connect(cancel_changes)
|
|
||||||
|
|
||||||
def _apply_changes(self):
|
def set_clicked(self, set):
|
||||||
self.widget.set_page(0)
|
self.data_stack.setCurrentIndex(1)
|
||||||
self.apply_changes.emit()
|
self.button_stack.setCurrentIndex(1)
|
||||||
|
self.value_edit.setText("{:.7f}"
|
||||||
def set_clicked(self):
|
.format(self.cur_frequency/1e6))
|
||||||
self.set_page(1)
|
|
||||||
self.set_edit_value(self._value)
|
|
||||||
self.start_edit()
|
|
||||||
|
|
||||||
def set_page(self, page):
|
|
||||||
self.data_stack.setCurrentIndex(page)
|
|
||||||
self.button_stack.setCurrentIndex(page)
|
|
||||||
|
|
||||||
def get_value(self):
|
|
||||||
return float(self.value_edit.text())
|
|
||||||
|
|
||||||
def set_edit_value(self, value):
|
|
||||||
self.value_edit.setText("{:.7f}".format(value))
|
|
||||||
|
|
||||||
def set_value(self, value):
|
|
||||||
self.value_label.setText("<font size=\"4\">{:.7f}</font>".format(value))
|
|
||||||
self._value = value
|
|
||||||
self.set_edit_value(value)
|
|
||||||
|
|
||||||
def start_edit(self):
|
|
||||||
self.value_edit.setFocus()
|
self.value_edit.setFocus()
|
||||||
self.value_edit.selectAll()
|
self.value_edit.selectAll()
|
||||||
|
|
||||||
|
def off_clicked(self, set):
|
||||||
|
self.dm.dds_channel_toggle(self.dds_name, self.dds_model, sw=False)
|
||||||
|
|
||||||
|
def apply_changes(self, apply):
|
||||||
|
self.data_stack.setCurrentIndex(0)
|
||||||
|
self.button_stack.setCurrentIndex(0)
|
||||||
|
frequency = float(self.value_edit.text())*1e6
|
||||||
|
self.dm.dds_set_frequency(self.dds_name, self.dds_model, frequency)
|
||||||
|
|
||||||
class _DDSHandler:
|
def cancel_changes(self, cancel):
|
||||||
def __init__(self, dm, title, bus_channel, channel, dds_type,
|
self.data_stack.setCurrentIndex(0)
|
||||||
ref_clk, cpld=None, pll=1, clk_div=0):
|
self.button_stack.setCurrentIndex(0)
|
||||||
self.dm = dm
|
|
||||||
self.bus_channel = bus_channel
|
|
||||||
self.channel = channel
|
|
||||||
self.cur_frequency = 0
|
|
||||||
self.dds_name = title
|
|
||||||
self.cpld = cpld
|
|
||||||
self.cur_frequency = 0
|
|
||||||
self.cur_reg = 0
|
|
||||||
self.dds_type = dds_type
|
|
||||||
self.is_urukul = dds_type in ["AD9910", "AD9912"]
|
|
||||||
|
|
||||||
if dds_type == "AD9914":
|
|
||||||
self.ftw_per_hz = 2**32 / ref_clk
|
|
||||||
else:
|
|
||||||
if dds_type == "AD9910":
|
|
||||||
max_freq = 1 << 32
|
|
||||||
clk_mult = [4, 1, 2, 4]
|
|
||||||
elif dds_type == "AD9912": # AD9912
|
|
||||||
max_freq = 1 << 48
|
|
||||||
clk_mult = [1, 1, 2, 4]
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
sysclk = ref_clk / clk_mult[clk_div] * pll
|
|
||||||
self.ftw_per_hz = 1 / sysclk * max_freq
|
|
||||||
self.widget = _DDSWidget(title, self.is_urukul)
|
|
||||||
self.widget.apply_changes.connect(self.apply_changes)
|
|
||||||
self.widget.off_clicked.connect(self.off_clicked)
|
|
||||||
self.refresh_display()
|
|
||||||
|
|
||||||
def monitor_update(self, probe, value):
|
|
||||||
if self.dds_type == "AD9912":
|
|
||||||
value = value << 16
|
|
||||||
self.cur_frequency = self._ftw_to_freq(value)
|
|
||||||
|
|
||||||
def _ftw_to_freq(self, ftw):
|
|
||||||
return ftw / self.ftw_per_hz
|
|
||||||
|
|
||||||
def refresh_display(self):
|
def refresh_display(self):
|
||||||
self.widget.set_value(self.cur_frequency / 1e6)
|
self.cur_frequency = self.dds_model.cur_frequency
|
||||||
|
self.value_label.setText("<font size=\"4\">{:.7f}</font>"
|
||||||
def apply_changes(self):
|
.format(self.cur_frequency/1e6))
|
||||||
frequency = self.widget.get_value() * 1e6
|
self.value_edit.setText("{:.7f}"
|
||||||
self.dm.dds_set_frequency(self.dds_name, frequency)
|
.format(self.cur_frequency/1e6))
|
||||||
|
|
||||||
|
|
||||||
def off_clicked(self):
|
|
||||||
self.dm.dds_channel_toggle(self.dds_name, sw=False)
|
|
||||||
|
|
||||||
def sort_key(self):
|
def sort_key(self):
|
||||||
return (self.bus_channel, self.channel)
|
return (self.bus_channel, self.channel)
|
||||||
|
|
||||||
|
|
||||||
class _DACWidget(QtWidgets.QFrame):
|
class _DACWidget(_SimpleDisplayWidget):
|
||||||
def __init__(self, channel, title):
|
|
||||||
QtWidgets.QFrame.__init__(self)
|
|
||||||
|
|
||||||
self.setFrameShape(QtWidgets.QFrame.Box)
|
|
||||||
self.setFrameShadow(QtWidgets.QFrame.Raised)
|
|
||||||
|
|
||||||
grid = QtWidgets.QGridLayout()
|
|
||||||
grid.setContentsMargins(0, 0, 0, 0)
|
|
||||||
grid.setHorizontalSpacing(0)
|
|
||||||
grid.setVerticalSpacing(0)
|
|
||||||
self.setLayout(grid)
|
|
||||||
label = QtWidgets.QLabel("{} ch{}".format(title, channel))
|
|
||||||
label.setAlignment(QtCore.Qt.AlignCenter)
|
|
||||||
grid.addWidget(label, 1, 1)
|
|
||||||
|
|
||||||
self.value = QtWidgets.QLabel()
|
|
||||||
self.value.setAlignment(QtCore.Qt.AlignCenter)
|
|
||||||
grid.addWidget(self.value, 2, 1, 6, 1)
|
|
||||||
|
|
||||||
grid.setRowStretch(1, 1)
|
|
||||||
grid.setRowStretch(2, 0)
|
|
||||||
grid.setRowStretch(3, 1)
|
|
||||||
|
|
||||||
def set_value(self, value):
|
|
||||||
self.value.setText("<font size=\"4\">{:.3f}</font><font size=\"2\"> %</font>"
|
|
||||||
.format(value))
|
|
||||||
|
|
||||||
|
|
||||||
class _DACHandler:
|
|
||||||
def __init__(self, dm, spi_channel, channel, title):
|
def __init__(self, dm, spi_channel, channel, title):
|
||||||
self.widget = _DACWidget(channel, title)
|
|
||||||
self.cur_value = 0
|
|
||||||
self.spi_channel = spi_channel
|
self.spi_channel = spi_channel
|
||||||
self.channel = channel
|
self.channel = channel
|
||||||
self.refresh_display()
|
self.cur_value = 0
|
||||||
|
_SimpleDisplayWidget.__init__(self, "{} ch{}".format(title, channel))
|
||||||
|
|
||||||
def refresh_display(self):
|
def refresh_display(self):
|
||||||
self.widget.set_value(self.cur_value * 100 / 2**16)
|
self.value.setText("<font size=\"4\">{:.3f}</font><font size=\"2\"> %</font>"
|
||||||
|
.format(self.cur_value*100/2**16))
|
||||||
|
|
||||||
def sort_key(self):
|
def sort_key(self):
|
||||||
return (self.spi_channel, self.channel)
|
return (self.spi_channel, self.channel)
|
||||||
|
|
||||||
|
|
||||||
_HandlerDesc = namedtuple("_HandlerDesc", "uid comment cls arguments")
|
_WidgetDesc = namedtuple("_WidgetDesc", "uid comment cls arguments")
|
||||||
|
|
||||||
|
|
||||||
def setup_from_ddb(ddb):
|
def setup_from_ddb(ddb):
|
||||||
|
@ -407,17 +390,20 @@ def setup_from_ddb(ddb):
|
||||||
continue
|
continue
|
||||||
channel = v["arguments"]["channel"]
|
channel = v["arguments"]["channel"]
|
||||||
force_out = v["class"] == "TTLOut"
|
force_out = v["class"] == "TTLOut"
|
||||||
handler = _HandlerDesc(k, comment, _TTLHandler, (channel, force_out, k))
|
widget = _WidgetDesc(k, comment, _TTLWidget, (channel, force_out, k))
|
||||||
description.add(handler)
|
description.add(widget)
|
||||||
elif (v["module"] == "artiq.coredevice.ad9914" and v["class"] == "AD9914"):
|
elif (v["module"] == "artiq.coredevice.ad9914"
|
||||||
|
and v["class"] == "AD9914"):
|
||||||
bus_channel = v["arguments"]["bus_channel"]
|
bus_channel = v["arguments"]["bus_channel"]
|
||||||
channel = v["arguments"]["channel"]
|
channel = v["arguments"]["channel"]
|
||||||
dds_sysclk = v["arguments"]["sysclk"]
|
dds_sysclk = v["arguments"]["sysclk"]
|
||||||
handler = _HandlerDesc(k, comment, _DDSHandler,
|
model = _DDSModel(v["class"], dds_sysclk)
|
||||||
(k, bus_channel, channel, v["class"], dds_sysclk))
|
widget = _WidgetDesc(k, comment, _DDSWidget, (k, bus_channel, channel, model))
|
||||||
description.add(handler)
|
description.add(widget)
|
||||||
elif (v["module"] == "artiq.coredevice.ad9910" and v["class"] == "AD9910") or \
|
elif (v["module"] == "artiq.coredevice.ad9910"
|
||||||
(v["module"] == "artiq.coredevice.ad9912" and v["class"] == "AD9912"):
|
and v["class"] == "AD9910") or \
|
||||||
|
(v["module"] == "artiq.coredevice.ad9912"
|
||||||
|
and v["class"] == "AD9912"):
|
||||||
channel = v["arguments"]["chip_select"] - 4
|
channel = v["arguments"]["chip_select"] - 4
|
||||||
if channel < 0:
|
if channel < 0:
|
||||||
continue
|
continue
|
||||||
|
@ -427,21 +413,19 @@ def setup_from_ddb(ddb):
|
||||||
pll = v["arguments"]["pll_n"]
|
pll = v["arguments"]["pll_n"]
|
||||||
refclk = ddb[dds_cpld]["arguments"]["refclk"]
|
refclk = ddb[dds_cpld]["arguments"]["refclk"]
|
||||||
clk_div = v["arguments"].get("clk_div", 0)
|
clk_div = v["arguments"].get("clk_div", 0)
|
||||||
handler = _HandlerDesc(k, comment, _DDSHandler,
|
model = _DDSModel( v["class"], refclk, dds_cpld, pll, clk_div)
|
||||||
(k, bus_channel, channel, v["class"], refclk,
|
widget = _WidgetDesc(k, comment, _DDSWidget, (k, bus_channel, channel, model))
|
||||||
dds_cpld, pll, clk_div))
|
description.add(widget)
|
||||||
description.add(handler)
|
elif ( (v["module"] == "artiq.coredevice.ad53xx" and v["class"] == "AD53xx")
|
||||||
elif (v["module"] == "artiq.coredevice.ad53xx" and v["class"] == "AD53xx") or \
|
or (v["module"] == "artiq.coredevice.zotino" and v["class"] == "Zotino")):
|
||||||
(v["module"] == "artiq.coredevice.zotino" and v["class"] == "Zotino"):
|
|
||||||
spi_device = v["arguments"]["spi_device"]
|
spi_device = v["arguments"]["spi_device"]
|
||||||
spi_device = ddb[spi_device]
|
spi_device = ddb[spi_device]
|
||||||
while isinstance(spi_device, str):
|
while isinstance(spi_device, str):
|
||||||
spi_device = ddb[spi_device]
|
spi_device = ddb[spi_device]
|
||||||
spi_channel = spi_device["arguments"]["channel"]
|
spi_channel = spi_device["arguments"]["channel"]
|
||||||
for channel in range(32):
|
for channel in range(32):
|
||||||
handler = _HandlerDesc((k, channel), comment, _DACHandler,
|
widget = _WidgetDesc((k, channel), comment, _DACWidget, (spi_channel, channel, k))
|
||||||
(spi_channel, channel, k))
|
description.add(widget)
|
||||||
description.add(handler)
|
|
||||||
elif v["type"] == "controller" and k == "core_moninj":
|
elif v["type"] == "controller" and k == "core_moninj":
|
||||||
mi_addr = v["host"]
|
mi_addr = v["host"]
|
||||||
mi_port = v.get("port_proxy", 1383)
|
mi_port = v.get("port_proxy", 1383)
|
||||||
|
@ -462,20 +446,21 @@ class _DeviceManager:
|
||||||
|
|
||||||
self.ddb = dict()
|
self.ddb = dict()
|
||||||
self.description = set()
|
self.description = set()
|
||||||
self.handlers_by_uid = dict()
|
self.widgets_by_uid = dict()
|
||||||
|
|
||||||
self.dds_sysclk = 0
|
self.dds_sysclk = 0
|
||||||
self.ttl_cb = lambda: None
|
self.ttl_cb = lambda: None
|
||||||
self.ttl_handlers = dict()
|
self.ttl_widgets = dict()
|
||||||
self.dds_cb = lambda: None
|
self.dds_cb = lambda: None
|
||||||
self.dds_handlers = dict()
|
self.dds_widgets = dict()
|
||||||
self.dac_cb = lambda: None
|
self.dac_cb = lambda: None
|
||||||
self.dac_handlers = dict()
|
self.dac_widgets = dict()
|
||||||
|
|
||||||
def init_ddb(self, ddb):
|
def init_ddb(self, ddb):
|
||||||
self.ddb = ddb
|
self.ddb = ddb
|
||||||
|
return ddb
|
||||||
|
|
||||||
def notify_ddb(self, mod):
|
def notify(self, mod):
|
||||||
mi_addr, mi_port, description = setup_from_ddb(self.ddb)
|
mi_addr, mi_port, description = setup_from_ddb(self.ddb)
|
||||||
|
|
||||||
if (mi_addr, mi_port) != (self.mi_addr, self.mi_port):
|
if (mi_addr, mi_port) != (self.mi_addr, self.mi_port):
|
||||||
|
@ -484,45 +469,45 @@ class _DeviceManager:
|
||||||
self.reconnect_mi.set()
|
self.reconnect_mi.set()
|
||||||
|
|
||||||
for to_remove in self.description - description:
|
for to_remove in self.description - description:
|
||||||
handler = self.handlers_by_uid[to_remove.uid]
|
widget = self.widgets_by_uid[to_remove.uid]
|
||||||
del self.handlers_by_uid[to_remove.uid]
|
del self.widgets_by_uid[to_remove.uid]
|
||||||
|
|
||||||
if isinstance(handler, _TTLHandler):
|
if isinstance(widget, _TTLWidget):
|
||||||
self.setup_ttl_monitoring(False, handler.channel)
|
self.setup_ttl_monitoring(False, widget.channel)
|
||||||
handler.widget.deleteLater()
|
widget.deleteLater()
|
||||||
del self.ttl_handlers[handler.channel]
|
del self.ttl_widgets[widget.channel]
|
||||||
self.ttl_cb()
|
self.ttl_cb()
|
||||||
elif isinstance(handler, _DDSHandler):
|
elif isinstance(widget, _DDSWidget):
|
||||||
self.setup_dds_monitoring(False, handler.bus_channel, handler.channel)
|
self.setup_dds_monitoring(False, widget.bus_channel, widget.channel)
|
||||||
handler.widget.deleteLater()
|
widget.deleteLater()
|
||||||
del self.dds_handlers[(handler.bus_channel, handler.channel)]
|
del self.dds_widgets[(widget.bus_channel, widget.channel)]
|
||||||
self.dds_cb()
|
self.dds_cb()
|
||||||
elif isinstance(handler, _DACHandler):
|
elif isinstance(widget, _DACWidget):
|
||||||
self.setup_dac_monitoring(False, handler.spi_channel, handler.channel)
|
self.setup_dac_monitoring(False, widget.spi_channel, widget.channel)
|
||||||
handler.widget.deleteLater()
|
widget.deleteLater()
|
||||||
del self.dac_handlers[(handler.spi_channel, handler.channel)]
|
del self.dac_widgets[(widget.spi_channel, widget.channel)]
|
||||||
self.dac_cb()
|
self.dac_cb()
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
for to_add in description - self.description:
|
for to_add in description - self.description:
|
||||||
handler = to_add.cls(self, *to_add.arguments)
|
widget = to_add.cls(self, *to_add.arguments)
|
||||||
if to_add.comment is not None:
|
if to_add.comment is not None:
|
||||||
handler.widget.setToolTip(to_add.comment)
|
widget.setToolTip(to_add.comment)
|
||||||
self.handlers_by_uid[to_add.uid] = handler
|
self.widgets_by_uid[to_add.uid] = widget
|
||||||
|
|
||||||
if isinstance(handler, _TTLHandler):
|
if isinstance(widget, _TTLWidget):
|
||||||
self.ttl_handlers[handler.channel] = handler
|
self.ttl_widgets[widget.channel] = widget
|
||||||
self.ttl_cb()
|
self.ttl_cb()
|
||||||
self.setup_ttl_monitoring(True, handler.channel)
|
self.setup_ttl_monitoring(True, widget.channel)
|
||||||
elif isinstance(handler, _DDSHandler):
|
elif isinstance(widget, _DDSWidget):
|
||||||
self.dds_handlers[(handler.bus_channel, handler.channel)] = handler
|
self.dds_widgets[(widget.bus_channel, widget.channel)] = widget
|
||||||
self.dds_cb()
|
self.dds_cb()
|
||||||
self.setup_dds_monitoring(True, handler.bus_channel, handler.channel)
|
self.setup_dds_monitoring(True, widget.bus_channel, widget.channel)
|
||||||
elif isinstance(handler, _DACHandler):
|
elif isinstance(widget, _DACWidget):
|
||||||
self.dac_handlers[(handler.spi_channel, handler.channel)] = handler
|
self.dac_widgets[(widget.spi_channel, widget.channel)] = widget
|
||||||
self.dac_cb()
|
self.dac_cb()
|
||||||
self.setup_dac_monitoring(True, handler.spi_channel, handler.channel)
|
self.setup_dac_monitoring(True, widget.spi_channel, widget.channel)
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
|
@ -530,26 +515,26 @@ class _DeviceManager:
|
||||||
|
|
||||||
def ttl_set_mode(self, channel, mode):
|
def ttl_set_mode(self, channel, mode):
|
||||||
if self.mi_connection is not None:
|
if self.mi_connection is not None:
|
||||||
handler = self.ttl_handlers[channel]
|
widget = self.ttl_widgets[channel]
|
||||||
if mode == "0":
|
if mode == "0":
|
||||||
handler.cur_override = True
|
widget.cur_override = True
|
||||||
handler.cur_level = False
|
widget.cur_level = False
|
||||||
self.mi_connection.inject(channel, TTLOverride.level.value, 0)
|
self.mi_connection.inject(channel, TTLOverride.level.value, 0)
|
||||||
self.mi_connection.inject(channel, TTLOverride.oe.value, 1)
|
self.mi_connection.inject(channel, TTLOverride.oe.value, 1)
|
||||||
self.mi_connection.inject(channel, TTLOverride.en.value, 1)
|
self.mi_connection.inject(channel, TTLOverride.en.value, 1)
|
||||||
elif mode == "1":
|
elif mode == "1":
|
||||||
handler.cur_override = True
|
widget.cur_override = True
|
||||||
handler.cur_level = True
|
widget.cur_level = True
|
||||||
self.mi_connection.inject(channel, TTLOverride.level.value, 1)
|
self.mi_connection.inject(channel, TTLOverride.level.value, 1)
|
||||||
self.mi_connection.inject(channel, TTLOverride.oe.value, 1)
|
self.mi_connection.inject(channel, TTLOverride.oe.value, 1)
|
||||||
self.mi_connection.inject(channel, TTLOverride.en.value, 1)
|
self.mi_connection.inject(channel, TTLOverride.en.value, 1)
|
||||||
elif mode == "exp":
|
elif mode == "exp":
|
||||||
handler.cur_override = False
|
widget.cur_override = False
|
||||||
self.mi_connection.inject(channel, TTLOverride.en.value, 0)
|
self.mi_connection.inject(channel, TTLOverride.en.value, 0)
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
# override state may have changed
|
# override state may have changed
|
||||||
handler.refresh_display()
|
widget.refresh_display()
|
||||||
|
|
||||||
async def _submit_by_content(self, content, class_name, title):
|
async def _submit_by_content(self, content, class_name, title):
|
||||||
expid = {
|
expid = {
|
||||||
|
@ -571,17 +556,16 @@ class _DeviceManager:
|
||||||
scheduling["flush"])
|
scheduling["flush"])
|
||||||
logger.info("Submitted '%s', RID is %d", title, rid)
|
logger.info("Submitted '%s', RID is %d", title, rid)
|
||||||
|
|
||||||
def _dds_faux_injection(self, dds_channel, action, title, log_msg):
|
def _dds_faux_injection(self, dds_channel, dds_model, action, title, log_msg):
|
||||||
handler = self.handlers_by_uid[dds_channel]
|
|
||||||
# create kernel and fill it in and send-by-content
|
# create kernel and fill it in and send-by-content
|
||||||
|
|
||||||
# initialize CPLD (if applicable)
|
# initialize CPLD (if applicable)
|
||||||
if handler.is_urukul:
|
if dds_model.is_urukul:
|
||||||
# urukuls need CPLD init and switch to on
|
# urukuls need CPLD init and switch to on
|
||||||
cpld_dev = """self.setattr_device("core_cache")
|
cpld_dev = """self.setattr_device("core_cache")
|
||||||
self.setattr_device("{}")""".format(handler.cpld)
|
self.setattr_device("{}")""".format(dds_model.cpld)
|
||||||
|
|
||||||
# `sta`/`rf_sw`` variables are guaranteed for urukuls
|
# `sta`/`rf_sw`` variables are guaranteed for urukuls
|
||||||
# so {action} can use it
|
# so {action} can use it
|
||||||
# if there's no RF enabled, CPLD may have not been initialized
|
# if there's no RF enabled, CPLD may have not been initialized
|
||||||
# but if there is, it has been initialised - no need to do again
|
# but if there is, it has been initialised - no need to do again
|
||||||
|
@ -593,20 +577,20 @@ class _DeviceManager:
|
||||||
delay(15*ms)
|
delay(15*ms)
|
||||||
self.{cpld}.init()
|
self.{cpld}.init()
|
||||||
self.core_cache.put("_{cpld}_init", [1])
|
self.core_cache.put("_{cpld}_init", [1])
|
||||||
""".format(cpld=handler.cpld)
|
""".format(cpld=dds_model.cpld)
|
||||||
else:
|
else:
|
||||||
cpld_dev = ""
|
cpld_dev = ""
|
||||||
cpld_init = ""
|
cpld_init = ""
|
||||||
|
|
||||||
# AD9912/9910: init channel (if uninitialized)
|
# AD9912/9910: init channel (if uninitialized)
|
||||||
if handler.dds_type == "AD9912":
|
if dds_model.dds_type == "AD9912":
|
||||||
# 0xFF before init, 0x99 after
|
# 0xFF before init, 0x99 after
|
||||||
channel_init = """
|
channel_init = """
|
||||||
if self.{dds_channel}.read({cfgreg}, length=1) == 0xFF:
|
if self.{dds_channel}.read({cfgreg}, length=1) == 0xFF:
|
||||||
delay(10*ms)
|
delay(10*ms)
|
||||||
self.{dds_channel}.init()
|
self.{dds_channel}.init()
|
||||||
""".format(dds_channel=dds_channel, cfgreg=AD9912_SER_CONF)
|
""".format(dds_channel=dds_channel, cfgreg=AD9912_SER_CONF)
|
||||||
elif handler.dds_type == "AD9910":
|
elif dds_model.dds_type == "AD9910":
|
||||||
# -1 before init, 2 after
|
# -1 before init, 2 after
|
||||||
channel_init = """
|
channel_init = """
|
||||||
if self.{dds_channel}.read32({cfgreg}) == -1:
|
if self.{dds_channel}.read32({cfgreg}) == -1:
|
||||||
|
@ -640,27 +624,26 @@ class _DeviceManager:
|
||||||
channel_init=channel_init))
|
channel_init=channel_init))
|
||||||
asyncio.ensure_future(
|
asyncio.ensure_future(
|
||||||
self._submit_by_content(
|
self._submit_by_content(
|
||||||
dds_exp,
|
dds_exp,
|
||||||
title,
|
title,
|
||||||
log_msg))
|
log_msg))
|
||||||
|
|
||||||
def dds_set_frequency(self, dds_channel, freq):
|
def dds_set_frequency(self, dds_channel, dds_model, freq):
|
||||||
handler = self.handlers_by_uid[dds_channel]
|
|
||||||
action = "self.{ch}.set({freq})".format(
|
action = "self.{ch}.set({freq})".format(
|
||||||
freq=freq, ch=dds_channel)
|
freq=freq, ch=dds_channel)
|
||||||
if handler.is_urukul:
|
if dds_model.is_urukul:
|
||||||
action += """
|
action += """
|
||||||
ch_no = self.{ch}.chip_select - 4
|
ch_no = self.{ch}.chip_select - 4
|
||||||
self.{cpld}.cfg_switches(rf_sw | 1 << ch_no)
|
self.{cpld}.cfg_switches(rf_sw | 1 << ch_no)
|
||||||
""".format(ch=dds_channel, cpld=handler.cpld)
|
""".format(ch=dds_channel, cpld=dds_model.cpld)
|
||||||
self._dds_faux_injection(
|
self._dds_faux_injection(
|
||||||
dds_channel,
|
dds_channel,
|
||||||
|
dds_model,
|
||||||
action,
|
action,
|
||||||
"SetDDS",
|
"SetDDS",
|
||||||
"Set DDS {} {}MHz".format(dds_channel, freq / 1e6))
|
"Set DDS {} {}MHz".format(dds_channel, freq/1e6))
|
||||||
|
|
||||||
def dds_channel_toggle(self, dds_channel, sw=True):
|
def dds_channel_toggle(self, dds_channel, dds_model, sw=True):
|
||||||
handler = self.handlers_by_uid[dds_channel]
|
|
||||||
# urukul only
|
# urukul only
|
||||||
if sw:
|
if sw:
|
||||||
switch = "| 1 << ch_no"
|
switch = "| 1 << ch_no"
|
||||||
|
@ -671,13 +654,14 @@ class _DeviceManager:
|
||||||
self.{cpld}.cfg_switches(rf_sw {switch})
|
self.{cpld}.cfg_switches(rf_sw {switch})
|
||||||
""".format(
|
""".format(
|
||||||
dds_channel=dds_channel,
|
dds_channel=dds_channel,
|
||||||
cpld=handler.cpld,
|
cpld=dds_model.cpld,
|
||||||
switch=switch
|
switch=switch
|
||||||
)
|
)
|
||||||
self._dds_faux_injection(
|
self._dds_faux_injection(
|
||||||
dds_channel,
|
dds_channel,
|
||||||
|
dds_model,
|
||||||
action,
|
action,
|
||||||
"ToggleDDS",
|
"ToggleDDS",
|
||||||
"Toggle DDS {} {}".format(dds_channel, "on" if sw else "off"))
|
"Toggle DDS {} {}".format(dds_channel, "on" if sw else "off"))
|
||||||
|
|
||||||
def setup_ttl_monitoring(self, enable, channel):
|
def setup_ttl_monitoring(self, enable, channel):
|
||||||
|
@ -698,30 +682,30 @@ class _DeviceManager:
|
||||||
self.mi_connection.monitor_probe(enable, spi_channel, channel)
|
self.mi_connection.monitor_probe(enable, spi_channel, channel)
|
||||||
|
|
||||||
def monitor_cb(self, channel, probe, value):
|
def monitor_cb(self, channel, probe, value):
|
||||||
if channel in self.ttl_handlers:
|
if channel in self.ttl_widgets:
|
||||||
handler = self.ttl_handlers[channel]
|
widget = self.ttl_widgets[channel]
|
||||||
if probe == TTLProbe.level.value:
|
if probe == TTLProbe.level.value:
|
||||||
handler.cur_level = bool(value)
|
widget.cur_level = bool(value)
|
||||||
elif probe == TTLProbe.oe.value:
|
elif probe == TTLProbe.oe.value:
|
||||||
handler.cur_oe = bool(value)
|
widget.cur_oe = bool(value)
|
||||||
handler.refresh_display()
|
widget.refresh_display()
|
||||||
elif (channel, probe) in self.dds_handlers:
|
elif (channel, probe) in self.dds_widgets:
|
||||||
handler = self.dds_handlers[(channel, probe)]
|
widget = self.dds_widgets[(channel, probe)]
|
||||||
handler.monitor_update(probe, value)
|
widget.dds_model.monitor_update(probe, value)
|
||||||
handler.refresh_display()
|
widget.refresh_display()
|
||||||
elif (channel, probe) in self.dac_handlers:
|
elif (channel, probe) in self.dac_widgets:
|
||||||
handler = self.dac_handlers[(channel, probe)]
|
widget = self.dac_widgets[(channel, probe)]
|
||||||
handler.cur_value = value
|
widget.cur_value = value
|
||||||
handler.refresh_display()
|
widget.refresh_display()
|
||||||
|
|
||||||
def injection_status_cb(self, channel, override, value):
|
def injection_status_cb(self, channel, override, value):
|
||||||
if channel in self.ttl_handlers:
|
if channel in self.ttl_widgets:
|
||||||
handler = self.ttl_handlers[channel]
|
widget = self.ttl_widgets[channel]
|
||||||
if override == TTLOverride.en.value:
|
if override == TTLOverride.en.value:
|
||||||
handler.cur_override = bool(value)
|
widget.cur_override = bool(value)
|
||||||
if override == TTLOverride.level.value:
|
if override == TTLOverride.level.value:
|
||||||
handler.cur_override_level = bool(value)
|
widget.cur_override_level = bool(value)
|
||||||
handler.refresh_display()
|
widget.refresh_display()
|
||||||
|
|
||||||
def disconnect_cb(self):
|
def disconnect_cb(self):
|
||||||
logger.error("lost connection to moninj")
|
logger.error("lost connection to moninj")
|
||||||
|
@ -735,23 +719,25 @@ class _DeviceManager:
|
||||||
await self.mi_connection.close()
|
await self.mi_connection.close()
|
||||||
self.mi_connection = None
|
self.mi_connection = None
|
||||||
new_mi_connection = CommMonInj(self.monitor_cb, self.injection_status_cb,
|
new_mi_connection = CommMonInj(self.monitor_cb, self.injection_status_cb,
|
||||||
self.disconnect_cb)
|
self.disconnect_cb)
|
||||||
try:
|
try:
|
||||||
await new_mi_connection.connect(self.mi_addr, self.mi_port)
|
await new_mi_connection.connect(self.mi_addr, self.mi_port)
|
||||||
except Exception:
|
except asyncio.CancelledError:
|
||||||
logger.error("failed to connect to moninj. Is aqctl_moninj_proxy running?",
|
logger.info("cancelled connection to moninj")
|
||||||
exc_info=True)
|
break
|
||||||
|
except:
|
||||||
|
logger.error("failed to connect to moninj. Is aqctl_moninj_proxy running?", exc_info=True)
|
||||||
await asyncio.sleep(10.)
|
await asyncio.sleep(10.)
|
||||||
self.reconnect_mi.set()
|
self.reconnect_mi.set()
|
||||||
else:
|
else:
|
||||||
logger.info("ARTIQ dashboard connected to moninj (%s)",
|
logger.info("ARTIQ dashboard connected to moninj (%s)",
|
||||||
self.mi_addr)
|
self.mi_addr)
|
||||||
self.mi_connection = new_mi_connection
|
self.mi_connection = new_mi_connection
|
||||||
for ttl_channel in self.ttl_handlers.keys():
|
for ttl_channel in self.ttl_widgets.keys():
|
||||||
self.setup_ttl_monitoring(True, ttl_channel)
|
self.setup_ttl_monitoring(True, ttl_channel)
|
||||||
for bus_channel, channel in self.dds_handlers.keys():
|
for bus_channel, channel in self.dds_widgets.keys():
|
||||||
self.setup_dds_monitoring(True, bus_channel, channel)
|
self.setup_dds_monitoring(True, bus_channel, channel)
|
||||||
for spi_channel, channel in self.dac_handlers.keys():
|
for spi_channel, channel in self.dac_widgets.keys():
|
||||||
self.setup_dac_monitoring(True, spi_channel, channel)
|
self.setup_dac_monitoring(True, spi_channel, channel)
|
||||||
|
|
||||||
async def close(self):
|
async def close(self):
|
||||||
|
@ -771,7 +757,7 @@ class _MonInjDock(QtWidgets.QDockWidget):
|
||||||
self.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable |
|
self.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable |
|
||||||
QtWidgets.QDockWidget.DockWidgetFloatable)
|
QtWidgets.QDockWidget.DockWidgetFloatable)
|
||||||
|
|
||||||
def layout_widgets(self, handlers):
|
def layout_widgets(self, widgets):
|
||||||
scroll_area = QtWidgets.QScrollArea()
|
scroll_area = QtWidgets.QScrollArea()
|
||||||
self.setWidget(scroll_area)
|
self.setWidget(scroll_area)
|
||||||
|
|
||||||
|
@ -779,8 +765,8 @@ class _MonInjDock(QtWidgets.QDockWidget):
|
||||||
grid_widget = QtWidgets.QWidget()
|
grid_widget = QtWidgets.QWidget()
|
||||||
grid_widget.setLayout(grid)
|
grid_widget.setLayout(grid)
|
||||||
|
|
||||||
for handler in sorted(handlers, key=lambda h: h.sort_key()):
|
for widget in sorted(widgets, key=lambda w: w.sort_key()):
|
||||||
grid.addWidget(handler.widget)
|
grid.addWidget(widget)
|
||||||
|
|
||||||
scroll_area.setWidgetResizable(True)
|
scroll_area.setWidgetResizable(True)
|
||||||
scroll_area.setWidget(grid_widget)
|
scroll_area.setWidget(grid_widget)
|
||||||
|
@ -793,10 +779,19 @@ class MonInj:
|
||||||
self.dac_dock = _MonInjDock("DAC")
|
self.dac_dock = _MonInjDock("DAC")
|
||||||
|
|
||||||
self.dm = _DeviceManager(schedule_ctl)
|
self.dm = _DeviceManager(schedule_ctl)
|
||||||
self.dm.ttl_cb = lambda: self.ttl_dock.layout_widgets(self.dm.ttl_handlers.values())
|
self.dm.ttl_cb = lambda: self.ttl_dock.layout_widgets(
|
||||||
self.dm.dds_cb = lambda: self.dds_dock.layout_widgets(self.dm.dds_handlers.values())
|
self.dm.ttl_widgets.values())
|
||||||
self.dm.dac_cb = lambda: self.dac_dock.layout_widgets(self.dm.dac_handlers.values())
|
self.dm.dds_cb = lambda: self.dds_dock.layout_widgets(
|
||||||
|
self.dm.dds_widgets.values())
|
||||||
|
self.dm.dac_cb = lambda: self.dac_dock.layout_widgets(
|
||||||
|
self.dm.dac_widgets.values())
|
||||||
|
|
||||||
|
self.subscriber = Subscriber("devices", self.dm.init_ddb, self.dm.notify)
|
||||||
|
|
||||||
|
async def start(self, server, port):
|
||||||
|
await self.subscriber.connect(server, port)
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self):
|
||||||
|
await self.subscriber.close()
|
||||||
if self.dm is not None:
|
if self.dm is not None:
|
||||||
await self.dm.close()
|
await self.dm.close()
|
||||||
|
|
|
@ -15,8 +15,9 @@ logger = logging.getLogger(__name__)
|
||||||
class Model(DictSyncModel):
|
class Model(DictSyncModel):
|
||||||
def __init__(self, init):
|
def __init__(self, init):
|
||||||
DictSyncModel.__init__(self,
|
DictSyncModel.__init__(self,
|
||||||
["RID", "Pipeline", "Status", "Prio", "Due date",
|
["RID", "Pipeline", "Status", "Prio", "Due date",
|
||||||
"Revision", "File", "Class name"], init)
|
"Revision", "File", "Class name"],
|
||||||
|
init)
|
||||||
|
|
||||||
def sort_key(self, k, v):
|
def sort_key(self, k, v):
|
||||||
# order by priority, and then by due date and RID
|
# order by priority, and then by due date and RID
|
||||||
|
@ -95,14 +96,14 @@ class ScheduleDock(QtWidgets.QDockWidget):
|
||||||
|
|
||||||
cw = QtGui.QFontMetrics(self.font()).averageCharWidth()
|
cw = QtGui.QFontMetrics(self.font()).averageCharWidth()
|
||||||
h = self.table.horizontalHeader()
|
h = self.table.horizontalHeader()
|
||||||
h.resizeSection(0, 7 * cw)
|
h.resizeSection(0, 7*cw)
|
||||||
h.resizeSection(1, 12 * cw)
|
h.resizeSection(1, 12*cw)
|
||||||
h.resizeSection(2, 16 * cw)
|
h.resizeSection(2, 16*cw)
|
||||||
h.resizeSection(3, 6 * cw)
|
h.resizeSection(3, 6*cw)
|
||||||
h.resizeSection(4, 16 * cw)
|
h.resizeSection(4, 16*cw)
|
||||||
h.resizeSection(5, 30 * cw)
|
h.resizeSection(5, 30*cw)
|
||||||
h.resizeSection(6, 20 * cw)
|
h.resizeSection(6, 20*cw)
|
||||||
h.resizeSection(7, 20 * cw)
|
h.resizeSection(7, 20*cw)
|
||||||
|
|
||||||
def set_model(self, model):
|
def set_model(self, model):
|
||||||
self.table_model = model
|
self.table_model = model
|
||||||
|
@ -142,7 +143,7 @@ class ScheduleDock(QtWidgets.QDockWidget):
|
||||||
selected_rid = self.table_model.row_to_key[row]
|
selected_rid = self.table_model.row_to_key[row]
|
||||||
pipeline = self.table_model.backing_store[selected_rid]["pipeline"]
|
pipeline = self.table_model.backing_store[selected_rid]["pipeline"]
|
||||||
logger.info("Requesting termination of all "
|
logger.info("Requesting termination of all "
|
||||||
"experiments in pipeline '%s'", pipeline)
|
"experiments in pipeline '%s'", pipeline)
|
||||||
|
|
||||||
rids = set()
|
rids = set()
|
||||||
for rid, info in self.table_model.backing_store.items():
|
for rid, info in self.table_model.backing_store.items():
|
||||||
|
@ -150,6 +151,7 @@ class ScheduleDock(QtWidgets.QDockWidget):
|
||||||
rids.add(rid)
|
rids.add(rid)
|
||||||
asyncio.ensure_future(self.request_term_multiple(rids))
|
asyncio.ensure_future(self.request_term_multiple(rids))
|
||||||
|
|
||||||
|
|
||||||
def save_state(self):
|
def save_state(self):
|
||||||
return bytes(self.table.horizontalHeader().saveState())
|
return bytes(self.table.horizontalHeader().saveState())
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,8 @@ from functools import partial
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtWidgets
|
from PyQt5 import QtCore, QtWidgets
|
||||||
|
|
||||||
|
from artiq.gui.tools import LayoutWidget
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -33,7 +35,7 @@ class ShortcutsDock(QtWidgets.QDockWidget):
|
||||||
for i in range(12):
|
for i in range(12):
|
||||||
row = i + 1
|
row = i + 1
|
||||||
|
|
||||||
layout.addWidget(QtWidgets.QLabel("F" + str(i + 1)), row, 0)
|
layout.addWidget(QtWidgets.QLabel("F" + str(i+1)), row, 0)
|
||||||
|
|
||||||
label = QtWidgets.QLabel()
|
label = QtWidgets.QLabel()
|
||||||
label.setSizePolicy(QtWidgets.QSizePolicy.Ignored,
|
label.setSizePolicy(QtWidgets.QSizePolicy.Ignored,
|
||||||
|
@ -68,7 +70,7 @@ class ShortcutsDock(QtWidgets.QDockWidget):
|
||||||
"open": open,
|
"open": open,
|
||||||
"submit": submit
|
"submit": submit
|
||||||
}
|
}
|
||||||
shortcut = QtWidgets.QShortcut("F" + str(i + 1), main_window)
|
shortcut = QtWidgets.QShortcut("F" + str(i+1), main_window)
|
||||||
shortcut.setContext(QtCore.Qt.ApplicationShortcut)
|
shortcut.setContext(QtCore.Qt.ApplicationShortcut)
|
||||||
shortcut.activated.connect(partial(self._activated, i))
|
shortcut.activated.connect(partial(self._activated, i))
|
||||||
|
|
||||||
|
|
|
@ -1,914 +0,0 @@
|
||||||
import os
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import bisect
|
|
||||||
import itertools
|
|
||||||
import math
|
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtWidgets, QtGui
|
|
||||||
|
|
||||||
import pyqtgraph as pg
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from sipyco.pc_rpc import AsyncioClient
|
|
||||||
from sipyco import pyon
|
|
||||||
|
|
||||||
from artiq.tools import exc_to_warning, short_format
|
|
||||||
from artiq.coredevice import comm_analyzer
|
|
||||||
from artiq.coredevice.comm_analyzer import WaveformType
|
|
||||||
from artiq.gui.tools import LayoutWidget, get_open_file_name, get_save_file_name
|
|
||||||
from artiq.gui.models import DictSyncTreeSepModel
|
|
||||||
from artiq.gui.dndwidgets import VDragScrollArea, VDragDropSplitter
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
WAVEFORM_MIN_HEIGHT = 50
|
|
||||||
WAVEFORM_MAX_HEIGHT = 200
|
|
||||||
|
|
||||||
|
|
||||||
class ProxyClient():
|
|
||||||
def __init__(self, receive_cb, timeout=5, timer=5, timer_backoff=1.1):
|
|
||||||
self.receive_cb = receive_cb
|
|
||||||
self.receiver = None
|
|
||||||
self.addr = None
|
|
||||||
self.port_proxy = None
|
|
||||||
self.port = None
|
|
||||||
self._reconnect_event = asyncio.Event()
|
|
||||||
self.timeout = timeout
|
|
||||||
self.timer = timer
|
|
||||||
self.timer_cur = timer
|
|
||||||
self.timer_backoff = timer_backoff
|
|
||||||
self._reconnect_task = asyncio.ensure_future(self._reconnect())
|
|
||||||
|
|
||||||
def update_address(self, addr, port, port_proxy):
|
|
||||||
self.addr = addr
|
|
||||||
self.port = port
|
|
||||||
self.port_proxy = port_proxy
|
|
||||||
self._reconnect_event.set()
|
|
||||||
|
|
||||||
async def trigger_proxy_task(self):
|
|
||||||
remote = AsyncioClient()
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
if self.addr is None:
|
|
||||||
logger.error("missing core_analyzer host in device db")
|
|
||||||
return
|
|
||||||
await remote.connect_rpc(self.addr, self.port, "coreanalyzer_proxy_control")
|
|
||||||
except:
|
|
||||||
logger.error("error connecting to analyzer proxy control", exc_info=True)
|
|
||||||
return
|
|
||||||
await remote.trigger()
|
|
||||||
except:
|
|
||||||
logger.error("analyzer proxy reported failure", exc_info=True)
|
|
||||||
finally:
|
|
||||||
remote.close_rpc()
|
|
||||||
|
|
||||||
async def _reconnect(self):
|
|
||||||
while True:
|
|
||||||
await self._reconnect_event.wait()
|
|
||||||
self._reconnect_event.clear()
|
|
||||||
if self.receiver is not None:
|
|
||||||
await self.receiver.close()
|
|
||||||
self.receiver = None
|
|
||||||
new_receiver = comm_analyzer.AnalyzerProxyReceiver(
|
|
||||||
self.receive_cb, self.disconnect_cb)
|
|
||||||
try:
|
|
||||||
if self.addr is not None:
|
|
||||||
await asyncio.wait_for(new_receiver.connect(self.addr, self.port_proxy),
|
|
||||||
self.timeout)
|
|
||||||
logger.info("ARTIQ dashboard connected to analyzer proxy (%s)", self.addr)
|
|
||||||
self.timer_cur = self.timer
|
|
||||||
self.receiver = new_receiver
|
|
||||||
continue
|
|
||||||
except Exception:
|
|
||||||
logger.error("error connecting to analyzer proxy", exc_info=True)
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(self._reconnect_event.wait(), self.timer_cur)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
self.timer_cur *= self.timer_backoff
|
|
||||||
self._reconnect_event.set()
|
|
||||||
else:
|
|
||||||
self.timer_cur = self.timer
|
|
||||||
|
|
||||||
async def close(self):
|
|
||||||
self._reconnect_task.cancel()
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(self._reconnect_task, None)
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
pass
|
|
||||||
if self.receiver is not None:
|
|
||||||
await self.receiver.close()
|
|
||||||
|
|
||||||
def disconnect_cb(self):
|
|
||||||
logger.error("lost connection to analyzer proxy")
|
|
||||||
self._reconnect_event.set()
|
|
||||||
|
|
||||||
|
|
||||||
class _BackgroundItem(pg.GraphicsWidgetAnchor, pg.GraphicsWidget):
|
|
||||||
def __init__(self, parent, rect):
|
|
||||||
pg.GraphicsWidget.__init__(self, parent)
|
|
||||||
pg.GraphicsWidgetAnchor.__init__(self)
|
|
||||||
self.item = QtWidgets.QGraphicsRectItem(rect, self)
|
|
||||||
brush = QtGui.QBrush(QtGui.QColor(10, 10, 10, 140))
|
|
||||||
self.item.setBrush(brush)
|
|
||||||
|
|
||||||
|
|
||||||
class _BaseWaveform(pg.PlotWidget):
|
|
||||||
cursorMove = QtCore.pyqtSignal(float)
|
|
||||||
|
|
||||||
def __init__(self, name, width, precision, unit,
|
|
||||||
parent=None, pen="r", stepMode="right", connect="finite"):
|
|
||||||
pg.PlotWidget.__init__(self,
|
|
||||||
parent=parent,
|
|
||||||
x=None,
|
|
||||||
y=None,
|
|
||||||
pen=pen,
|
|
||||||
stepMode=stepMode,
|
|
||||||
connect=connect)
|
|
||||||
|
|
||||||
self.setMinimumHeight(WAVEFORM_MIN_HEIGHT)
|
|
||||||
self.setMaximumHeight(WAVEFORM_MAX_HEIGHT)
|
|
||||||
self.setMenuEnabled(False)
|
|
||||||
self.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
|
||||||
|
|
||||||
self.name = name
|
|
||||||
self.width = width
|
|
||||||
self.precision = precision
|
|
||||||
self.unit = unit
|
|
||||||
|
|
||||||
self.x_data = []
|
|
||||||
self.y_data = []
|
|
||||||
|
|
||||||
self.plot_item = self.getPlotItem()
|
|
||||||
self.plot_item.hideButtons()
|
|
||||||
self.plot_item.hideAxis("top")
|
|
||||||
self.plot_item.getAxis("bottom").setStyle(showValues=False, tickLength=0)
|
|
||||||
self.plot_item.getAxis("left").setStyle(showValues=False, tickLength=0)
|
|
||||||
self.plot_item.setRange(yRange=(0, 1), padding=0.1)
|
|
||||||
self.plot_item.showGrid(x=True, y=True)
|
|
||||||
|
|
||||||
self.plot_data_item = self.plot_item.listDataItems()[0]
|
|
||||||
self.plot_data_item.setClipToView(True)
|
|
||||||
|
|
||||||
self.view_box = self.plot_item.getViewBox()
|
|
||||||
self.view_box.setMouseEnabled(x=True, y=False)
|
|
||||||
self.view_box.disableAutoRange(axis=pg.ViewBox.YAxis)
|
|
||||||
self.view_box.setLimits(xMin=0, minXRange=20)
|
|
||||||
|
|
||||||
self.title_label = pg.LabelItem(self.name, parent=self.plot_item)
|
|
||||||
self.title_label.anchor(itemPos=(0, 0), parentPos=(0, 0), offset=(0, 0))
|
|
||||||
self.title_label.setAttr('justify', 'left')
|
|
||||||
self.title_label.setZValue(10)
|
|
||||||
|
|
||||||
rect = self.title_label.boundingRect()
|
|
||||||
rect.setHeight(rect.height() * 2)
|
|
||||||
rect.setWidth(225)
|
|
||||||
self.label_bg = _BackgroundItem(parent=self.plot_item, rect=rect)
|
|
||||||
self.label_bg.anchor(itemPos=(0, 0), parentPos=(0, 0), offset=(0, 0))
|
|
||||||
|
|
||||||
self.cursor = pg.InfiniteLine()
|
|
||||||
self.cursor_y = None
|
|
||||||
self.addItem(self.cursor)
|
|
||||||
|
|
||||||
self.cursor_label = pg.LabelItem('', parent=self.plot_item)
|
|
||||||
self.cursor_label.anchor(itemPos=(0, 0), parentPos=(0, 0), offset=(0, 20))
|
|
||||||
self.cursor_label.setAttr('justify', 'left')
|
|
||||||
self.cursor_label.setZValue(10)
|
|
||||||
|
|
||||||
def setStoppedX(self, stopped_x):
|
|
||||||
self.stopped_x = stopped_x
|
|
||||||
self.view_box.setLimits(xMax=stopped_x)
|
|
||||||
|
|
||||||
def setData(self, data):
|
|
||||||
if len(data) == 0:
|
|
||||||
self.x_data, self.y_data = [], []
|
|
||||||
else:
|
|
||||||
self.x_data, self.y_data = zip(*data)
|
|
||||||
|
|
||||||
def onDataChange(self, data):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def onCursorMove(self, x):
|
|
||||||
self.cursor.setValue(x)
|
|
||||||
if len(self.x_data) < 1:
|
|
||||||
return
|
|
||||||
ind = bisect.bisect_left(self.x_data, x) - 1
|
|
||||||
dr = self.plot_data_item.dataRect()
|
|
||||||
self.cursor_y = None
|
|
||||||
if dr is not None and 0 <= ind < len(self.y_data):
|
|
||||||
self.cursor_y = self.y_data[ind]
|
|
||||||
|
|
||||||
def mouseMoveEvent(self, e):
|
|
||||||
if e.buttons() == QtCore.Qt.LeftButton \
|
|
||||||
and e.modifiers() == QtCore.Qt.ShiftModifier:
|
|
||||||
drag = QtGui.QDrag(self)
|
|
||||||
mime = QtCore.QMimeData()
|
|
||||||
drag.setMimeData(mime)
|
|
||||||
pixmapi = QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_FileIcon)
|
|
||||||
drag.setPixmap(pixmapi.pixmap(32))
|
|
||||||
drag.exec_(QtCore.Qt.MoveAction)
|
|
||||||
else:
|
|
||||||
super().mouseMoveEvent(e)
|
|
||||||
|
|
||||||
def wheelEvent(self, e):
|
|
||||||
if e.modifiers() & QtCore.Qt.ControlModifier:
|
|
||||||
super().wheelEvent(e)
|
|
||||||
|
|
||||||
def mouseDoubleClickEvent(self, e):
|
|
||||||
pos = self.view_box.mapSceneToView(e.pos())
|
|
||||||
self.cursorMove.emit(pos.x())
|
|
||||||
|
|
||||||
|
|
||||||
class BitWaveform(_BaseWaveform):
|
|
||||||
def __init__(self, name, width, precision, unit, parent=None):
|
|
||||||
_BaseWaveform.__init__(self, name, width, precision, unit, parent)
|
|
||||||
self.plot_item.showGrid(x=True, y=False)
|
|
||||||
self._arrows = []
|
|
||||||
|
|
||||||
def onDataChange(self, data):
|
|
||||||
try:
|
|
||||||
self.setData(data)
|
|
||||||
for arw in self._arrows:
|
|
||||||
self.removeItem(arw)
|
|
||||||
self._arrows = []
|
|
||||||
l = len(data)
|
|
||||||
display_y = np.empty(l)
|
|
||||||
display_x = np.empty(l)
|
|
||||||
display_map = {
|
|
||||||
"X": 0.5,
|
|
||||||
"1": 1,
|
|
||||||
"0": 0
|
|
||||||
}
|
|
||||||
previous_y = None
|
|
||||||
for i, coord in enumerate(data):
|
|
||||||
x, y = coord
|
|
||||||
dis_y = display_map[y]
|
|
||||||
if previous_y == y:
|
|
||||||
arw = pg.ArrowItem(pxMode=True, angle=90)
|
|
||||||
self.addItem(arw)
|
|
||||||
self._arrows.append(arw)
|
|
||||||
arw.setPos(x, dis_y)
|
|
||||||
display_y[i] = dis_y
|
|
||||||
display_x[i] = x
|
|
||||||
previous_y = y
|
|
||||||
self.plot_data_item.setData(x=display_x, y=display_y)
|
|
||||||
except:
|
|
||||||
logger.error("Error when displaying waveform: %s", self.name, exc_info=True)
|
|
||||||
for arw in self._arrows:
|
|
||||||
self.removeItem(arw)
|
|
||||||
self.plot_data_item.setData(x=[], y=[])
|
|
||||||
|
|
||||||
def onCursorMove(self, x):
|
|
||||||
_BaseWaveform.onCursorMove(self, x)
|
|
||||||
if self.cursor_y is not None:
|
|
||||||
self.cursor_label.setText(self.cursor_y)
|
|
||||||
else:
|
|
||||||
self.cursor_label.setText("")
|
|
||||||
|
|
||||||
|
|
||||||
class AnalogWaveform(_BaseWaveform):
|
|
||||||
def __init__(self, name, width, precision, unit, parent=None):
|
|
||||||
_BaseWaveform.__init__(self, name, width, precision, unit, parent)
|
|
||||||
|
|
||||||
def onDataChange(self, data):
|
|
||||||
try:
|
|
||||||
self.setData(data)
|
|
||||||
self.plot_data_item.setData(x=self.x_data, y=self.y_data)
|
|
||||||
if len(data) > 0:
|
|
||||||
max_y = max(self.y_data)
|
|
||||||
min_y = min(self.y_data)
|
|
||||||
self.plot_item.setRange(yRange=(min_y, max_y), padding=0.1)
|
|
||||||
except:
|
|
||||||
logger.error("Error when displaying waveform: %s", self.name, exc_info=True)
|
|
||||||
self.plot_data_item.setData(x=[], y=[])
|
|
||||||
|
|
||||||
def onCursorMove(self, x):
|
|
||||||
_BaseWaveform.onCursorMove(self, x)
|
|
||||||
if self.cursor_y is not None:
|
|
||||||
t = short_format(self.cursor_y, {"precision": self.precision, "unit": self.unit})
|
|
||||||
else:
|
|
||||||
t = ""
|
|
||||||
self.cursor_label.setText(t)
|
|
||||||
|
|
||||||
|
|
||||||
class BitVectorWaveform(_BaseWaveform):
|
|
||||||
def __init__(self, name, width, precision, unit, parent=None):
|
|
||||||
_BaseWaveform.__init__(self, name, width, precision, parent)
|
|
||||||
self._labels = []
|
|
||||||
self._format_string = "{:0=" + str(math.ceil(width / 4)) + "X}"
|
|
||||||
self.view_box.sigTransformChanged.connect(self._update_labels)
|
|
||||||
self.plot_item.showGrid(x=True, y=False)
|
|
||||||
|
|
||||||
def _update_labels(self):
|
|
||||||
for label in self._labels:
|
|
||||||
self.removeItem(label)
|
|
||||||
xmin, xmax = self.view_box.viewRange()[0]
|
|
||||||
left_label_i = bisect.bisect_left(self.x_data, xmin)
|
|
||||||
right_label_i = bisect.bisect_right(self.x_data, xmax) + 1
|
|
||||||
for i, j in itertools.pairwise(range(left_label_i, right_label_i)):
|
|
||||||
x1 = self.x_data[i]
|
|
||||||
x2 = self.x_data[j] if j < len(self.x_data) else self.stopped_x
|
|
||||||
lbl = self._labels[i]
|
|
||||||
bounds = lbl.boundingRect()
|
|
||||||
bounds_view = self.view_box.mapSceneToView(bounds)
|
|
||||||
if bounds_view.boundingRect().width() < x2 - x1:
|
|
||||||
self.addItem(lbl)
|
|
||||||
|
|
||||||
def onDataChange(self, data):
|
|
||||||
try:
|
|
||||||
self.setData(data)
|
|
||||||
for lbl in self._labels:
|
|
||||||
self.plot_item.removeItem(lbl)
|
|
||||||
self._labels = []
|
|
||||||
l = len(data)
|
|
||||||
display_x = np.empty(l * 2)
|
|
||||||
display_y = np.empty(l * 2)
|
|
||||||
for i, coord in enumerate(data):
|
|
||||||
x, y = coord
|
|
||||||
display_x[i * 2] = x
|
|
||||||
display_x[i * 2 + 1] = x
|
|
||||||
display_y[i * 2] = 0
|
|
||||||
display_y[i * 2 + 1] = int(int(y) != 0)
|
|
||||||
lbl = pg.TextItem(
|
|
||||||
self._format_string.format(int(y, 2)), anchor=(0, 0.5))
|
|
||||||
lbl.setPos(x, 0.5)
|
|
||||||
lbl.setTextWidth(100)
|
|
||||||
self._labels.append(lbl)
|
|
||||||
self.plot_data_item.setData(x=display_x, y=display_y)
|
|
||||||
except:
|
|
||||||
logger.error("Error when displaying waveform: %s", self.name, exc_info=True)
|
|
||||||
for lbl in self._labels:
|
|
||||||
self.plot_item.removeItem(lbl)
|
|
||||||
self.plot_data_item.setData(x=[], y=[])
|
|
||||||
|
|
||||||
def onCursorMove(self, x):
|
|
||||||
_BaseWaveform.onCursorMove(self, x)
|
|
||||||
if self.cursor_y is not None:
|
|
||||||
t = self._format_string.format(int(self.cursor_y, 2))
|
|
||||||
else:
|
|
||||||
t = ""
|
|
||||||
self.cursor_label.setText(t)
|
|
||||||
|
|
||||||
|
|
||||||
class LogWaveform(_BaseWaveform):
|
|
||||||
def __init__(self, name, width, precision, unit, parent=None):
|
|
||||||
_BaseWaveform.__init__(self, name, width, precision, parent)
|
|
||||||
self.plot_data_item.opts['pen'] = None
|
|
||||||
self.plot_data_item.opts['symbol'] = 'x'
|
|
||||||
self._labels = []
|
|
||||||
self.plot_item.showGrid(x=True, y=False)
|
|
||||||
|
|
||||||
def onDataChange(self, data):
|
|
||||||
try:
|
|
||||||
self.setData(data)
|
|
||||||
for lbl in self._labels:
|
|
||||||
self.plot_item.removeItem(lbl)
|
|
||||||
self._labels = []
|
|
||||||
self.plot_data_item.setData(
|
|
||||||
x=self.x_data, y=np.ones(len(self.x_data)))
|
|
||||||
if len(data) == 0:
|
|
||||||
return
|
|
||||||
old_x = data[0][0]
|
|
||||||
old_msg = data[0][1]
|
|
||||||
for x, msg in data[1:]:
|
|
||||||
if x == old_x:
|
|
||||||
old_msg += "\n" + msg
|
|
||||||
else:
|
|
||||||
lbl = pg.TextItem(old_msg)
|
|
||||||
self.addItem(lbl)
|
|
||||||
self._labels.append(lbl)
|
|
||||||
lbl.setPos(old_x, 1)
|
|
||||||
old_msg = msg
|
|
||||||
old_x = x
|
|
||||||
lbl = pg.TextItem(old_msg)
|
|
||||||
self.addItem(lbl)
|
|
||||||
self._labels.append(lbl)
|
|
||||||
lbl.setPos(old_x, 1)
|
|
||||||
except:
|
|
||||||
logger.error("Error when displaying waveform: %s", self.name, exc_info=True)
|
|
||||||
for lbl in self._labels:
|
|
||||||
self.plot_item.removeItem(lbl)
|
|
||||||
self.plot_data_item.setData(x=[], y=[])
|
|
||||||
|
|
||||||
|
|
||||||
class _WaveformView(QtWidgets.QWidget):
|
|
||||||
cursorMove = QtCore.pyqtSignal(float)
|
|
||||||
|
|
||||||
def __init__(self, parent):
|
|
||||||
QtWidgets.QWidget.__init__(self, parent=parent)
|
|
||||||
|
|
||||||
self._stopped_x = None
|
|
||||||
self._timescale = 1
|
|
||||||
self._cursor_x = 0
|
|
||||||
|
|
||||||
layout = QtWidgets.QVBoxLayout()
|
|
||||||
layout.setContentsMargins(0, 0, 0, 0)
|
|
||||||
layout.setSpacing(0)
|
|
||||||
self.setLayout(layout)
|
|
||||||
|
|
||||||
self._ref_axis = pg.PlotWidget()
|
|
||||||
self._ref_axis.hideAxis("bottom")
|
|
||||||
self._ref_axis.hideAxis("left")
|
|
||||||
self._ref_axis.hideButtons()
|
|
||||||
self._ref_axis.setFixedHeight(45)
|
|
||||||
self._ref_axis.setMenuEnabled(False)
|
|
||||||
self._top = pg.AxisItem("top")
|
|
||||||
self._top.setScale(1e-12)
|
|
||||||
self._top.setLabel(units="s")
|
|
||||||
self._ref_axis.setAxisItems({"top": self._top})
|
|
||||||
layout.addWidget(self._ref_axis)
|
|
||||||
|
|
||||||
self._ref_vb = self._ref_axis.getPlotItem().getViewBox()
|
|
||||||
self._ref_vb.setFixedHeight(0)
|
|
||||||
self._ref_vb.setMouseEnabled(x=True, y=False)
|
|
||||||
self._ref_vb.setLimits(xMin=0)
|
|
||||||
|
|
||||||
scroll_area = VDragScrollArea(self)
|
|
||||||
scroll_area.setWidgetResizable(True)
|
|
||||||
scroll_area.setContentsMargins(0, 0, 0, 0)
|
|
||||||
scroll_area.setFrameShape(QtWidgets.QFrame.NoFrame)
|
|
||||||
scroll_area.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
|
|
||||||
layout.addWidget(scroll_area)
|
|
||||||
|
|
||||||
self._splitter = VDragDropSplitter(parent=scroll_area)
|
|
||||||
self._splitter.setHandleWidth(1)
|
|
||||||
scroll_area.setWidget(self._splitter)
|
|
||||||
|
|
||||||
self.cursorMove.connect(self.onCursorMove)
|
|
||||||
|
|
||||||
self.confirm_delete_dialog = QtWidgets.QMessageBox(self)
|
|
||||||
self.confirm_delete_dialog.setIcon(
|
|
||||||
QtWidgets.QMessageBox.Icon.Warning
|
|
||||||
)
|
|
||||||
self.confirm_delete_dialog.setText("Delete all waveforms?")
|
|
||||||
self.confirm_delete_dialog.setStandardButtons(
|
|
||||||
QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel
|
|
||||||
)
|
|
||||||
self.confirm_delete_dialog.setDefaultButton(
|
|
||||||
QtWidgets.QMessageBox.Ok
|
|
||||||
)
|
|
||||||
|
|
||||||
def setModel(self, model):
|
|
||||||
self._model = model
|
|
||||||
self._model.dataChanged.connect(self.onDataChange)
|
|
||||||
self._model.rowsInserted.connect(self.onInsert)
|
|
||||||
self._model.rowsRemoved.connect(self.onRemove)
|
|
||||||
self._model.rowsMoved.connect(self.onMove)
|
|
||||||
self._splitter.dropped.connect(self._model.move)
|
|
||||||
self.confirm_delete_dialog.accepted.connect(self._model.clear)
|
|
||||||
|
|
||||||
def setTimescale(self, timescale):
|
|
||||||
self._timescale = timescale
|
|
||||||
self._top.setScale(1e-12 * timescale)
|
|
||||||
|
|
||||||
def setStoppedX(self, stopped_x):
|
|
||||||
self._stopped_x = stopped_x
|
|
||||||
self._ref_vb.setLimits(xMax=stopped_x)
|
|
||||||
self._ref_vb.setRange(xRange=(0, stopped_x))
|
|
||||||
for i in range(self._model.rowCount()):
|
|
||||||
self._splitter.widget(i).setStoppedX(stopped_x)
|
|
||||||
|
|
||||||
def resetZoom(self):
|
|
||||||
if self._stopped_x is not None:
|
|
||||||
self._ref_vb.setRange(xRange=(0, self._stopped_x))
|
|
||||||
|
|
||||||
def onDataChange(self, top, bottom, roles):
|
|
||||||
self.cursorMove.emit(0)
|
|
||||||
first = top.row()
|
|
||||||
last = bottom.row()
|
|
||||||
data_row = self._model.headers.index("data")
|
|
||||||
for i in range(first, last + 1):
|
|
||||||
data = self._model.data(self._model.index(i, data_row))
|
|
||||||
self._splitter.widget(i).onDataChange(data)
|
|
||||||
|
|
||||||
def onInsert(self, parent, first, last):
|
|
||||||
for i in range(first, last + 1):
|
|
||||||
w = self._create_waveform(i)
|
|
||||||
self._splitter.insertWidget(i, w)
|
|
||||||
self._resize()
|
|
||||||
|
|
||||||
def onRemove(self, parent, first, last):
|
|
||||||
for i in reversed(range(first, last + 1)):
|
|
||||||
w = self._splitter.widget(i)
|
|
||||||
w.deleteLater()
|
|
||||||
self._splitter.refresh()
|
|
||||||
self._resize()
|
|
||||||
|
|
||||||
def onMove(self, src_parent, src_start, src_end, dest_parent, dest_row):
|
|
||||||
w = self._splitter.widget(src_start)
|
|
||||||
self._splitter.insertWidget(dest_row, w)
|
|
||||||
|
|
||||||
def onCursorMove(self, x):
|
|
||||||
self._cursor_x = x
|
|
||||||
for i in range(self._model.rowCount()):
|
|
||||||
self._splitter.widget(i).onCursorMove(x)
|
|
||||||
|
|
||||||
def _create_waveform(self, row):
|
|
||||||
name, ty, width, precision, unit = (
|
|
||||||
self._model.data(self._model.index(row, i)) for i in range(5))
|
|
||||||
waveform_cls = {
|
|
||||||
WaveformType.BIT: BitWaveform,
|
|
||||||
WaveformType.VECTOR: BitVectorWaveform,
|
|
||||||
WaveformType.ANALOG: AnalogWaveform,
|
|
||||||
WaveformType.LOG: LogWaveform
|
|
||||||
}[ty]
|
|
||||||
w = waveform_cls(name, width, precision, unit, parent=self._splitter)
|
|
||||||
w.setXLink(self._ref_vb)
|
|
||||||
w.setStoppedX(self._stopped_x)
|
|
||||||
w.cursorMove.connect(self.cursorMove)
|
|
||||||
w.onCursorMove(self._cursor_x)
|
|
||||||
action = QtWidgets.QAction("Delete waveform", w)
|
|
||||||
action.triggered.connect(lambda: self._delete_waveform(w))
|
|
||||||
w.addAction(action)
|
|
||||||
action = QtWidgets.QAction("Delete all waveforms", w)
|
|
||||||
action.triggered.connect(self.confirm_delete_dialog.open)
|
|
||||||
w.addAction(action)
|
|
||||||
return w
|
|
||||||
|
|
||||||
def _delete_waveform(self, waveform):
|
|
||||||
row = self._splitter.indexOf(waveform)
|
|
||||||
self._model.pop(row)
|
|
||||||
|
|
||||||
def _resize(self):
|
|
||||||
self._splitter.setFixedHeight(
|
|
||||||
int((WAVEFORM_MIN_HEIGHT + WAVEFORM_MAX_HEIGHT) * self._model.rowCount() / 2))
|
|
||||||
|
|
||||||
|
|
||||||
class _WaveformModel(QtCore.QAbstractTableModel):
|
|
||||||
def __init__(self):
|
|
||||||
self.backing_struct = []
|
|
||||||
self.headers = ["name", "type", "width", "precision", "unit", "data"]
|
|
||||||
QtCore.QAbstractTableModel.__init__(self)
|
|
||||||
|
|
||||||
def rowCount(self, parent=QtCore.QModelIndex()):
|
|
||||||
return len(self.backing_struct)
|
|
||||||
|
|
||||||
def columnCount(self, parent=QtCore.QModelIndex()):
|
|
||||||
return len(self.headers)
|
|
||||||
|
|
||||||
def data(self, index, role=QtCore.Qt.DisplayRole):
|
|
||||||
if index.isValid():
|
|
||||||
return self.backing_struct[index.row()][index.column()]
|
|
||||||
return None
|
|
||||||
|
|
||||||
def extend(self, data):
|
|
||||||
length = len(self.backing_struct)
|
|
||||||
len_data = len(data)
|
|
||||||
self.beginInsertRows(QtCore.QModelIndex(), length, length + len_data - 1)
|
|
||||||
self.backing_struct.extend(data)
|
|
||||||
self.endInsertRows()
|
|
||||||
|
|
||||||
def pop(self, row):
|
|
||||||
self.beginRemoveRows(QtCore.QModelIndex(), row, row)
|
|
||||||
self.backing_struct.pop(row)
|
|
||||||
self.endRemoveRows()
|
|
||||||
|
|
||||||
def move(self, src, dest):
|
|
||||||
if src == dest:
|
|
||||||
return
|
|
||||||
if src < dest:
|
|
||||||
dest, src = src, dest
|
|
||||||
self.beginMoveRows(QtCore.QModelIndex(), src, src, QtCore.QModelIndex(), dest)
|
|
||||||
self.backing_struct.insert(dest, self.backing_struct.pop(src))
|
|
||||||
self.endMoveRows()
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
self.beginRemoveRows(QtCore.QModelIndex(), 0, len(self.backing_struct) - 1)
|
|
||||||
self.backing_struct.clear()
|
|
||||||
self.endRemoveRows()
|
|
||||||
|
|
||||||
def export_list(self):
|
|
||||||
return [[row[0], row[1].value, *row[2:5]] for row in self.backing_struct]
|
|
||||||
|
|
||||||
def import_list(self, channel_list):
|
|
||||||
self.clear()
|
|
||||||
data = [[row[0], WaveformType(row[1]), *row[2:5], []] for row in channel_list]
|
|
||||||
self.extend(data)
|
|
||||||
|
|
||||||
def update_data(self, waveform_data, top, bottom):
|
|
||||||
name_col = self.headers.index("name")
|
|
||||||
data_col = self.headers.index("data")
|
|
||||||
for i in range(top, bottom):
|
|
||||||
name = self.data(self.index(i, name_col))
|
|
||||||
self.backing_struct[i][data_col] = waveform_data.get(name, [])
|
|
||||||
self.dataChanged.emit(self.index(i, data_col),
|
|
||||||
self.index(i, data_col))
|
|
||||||
|
|
||||||
def update_all(self, waveform_data):
|
|
||||||
self.update_data(waveform_data, 0, self.rowCount())
|
|
||||||
|
|
||||||
|
|
||||||
class _CursorTimeControl(QtWidgets.QLineEdit):
|
|
||||||
submit = QtCore.pyqtSignal(float)
|
|
||||||
|
|
||||||
def __init__(self, parent):
|
|
||||||
QtWidgets.QLineEdit.__init__(self, parent=parent)
|
|
||||||
self._text = ""
|
|
||||||
self._value = 0
|
|
||||||
self._timescale = 1
|
|
||||||
self.setDisplayValue(0)
|
|
||||||
self.textChanged.connect(self._onTextChange)
|
|
||||||
self.returnPressed.connect(self._onReturnPress)
|
|
||||||
|
|
||||||
def setTimescale(self, timescale):
|
|
||||||
self._timescale = timescale
|
|
||||||
|
|
||||||
def _onTextChange(self, text):
|
|
||||||
self._text = text
|
|
||||||
|
|
||||||
def setDisplayValue(self, value):
|
|
||||||
self._value = value
|
|
||||||
self._text = pg.siFormat(value * 1e-12 * self._timescale,
|
|
||||||
suffix="s",
|
|
||||||
allowUnicode=False,
|
|
||||||
precision=15)
|
|
||||||
self.setText(self._text)
|
|
||||||
|
|
||||||
def _setValueFromText(self, text):
|
|
||||||
try:
|
|
||||||
self._value = pg.siEval(text) * (1e12 / self._timescale)
|
|
||||||
except:
|
|
||||||
logger.error("Error when parsing cursor time input", exc_info=True)
|
|
||||||
|
|
||||||
def _onReturnPress(self):
|
|
||||||
self._setValueFromText(self._text)
|
|
||||||
self.setDisplayValue(self._value)
|
|
||||||
self.submit.emit(self._value)
|
|
||||||
self.clearFocus()
|
|
||||||
|
|
||||||
|
|
||||||
class Model(DictSyncTreeSepModel):
|
|
||||||
def __init__(self, init):
|
|
||||||
DictSyncTreeSepModel.__init__(self, "/", ["Channels"], init)
|
|
||||||
|
|
||||||
def clear(self):
|
|
||||||
for k in self.backing_store:
|
|
||||||
self._del_item(self, k.split(self.separator))
|
|
||||||
self.backing_store.clear()
|
|
||||||
|
|
||||||
def update(self, d):
|
|
||||||
for k, v in d.items():
|
|
||||||
self[k] = v
|
|
||||||
|
|
||||||
|
|
||||||
class _AddChannelDialog(QtWidgets.QDialog):
|
|
||||||
def __init__(self, parent, model):
|
|
||||||
QtWidgets.QDialog.__init__(self, parent=parent)
|
|
||||||
self.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
|
||||||
self.setWindowTitle("Add channels")
|
|
||||||
|
|
||||||
layout = QtWidgets.QVBoxLayout()
|
|
||||||
self.setLayout(layout)
|
|
||||||
|
|
||||||
self._model = model
|
|
||||||
self._tree_view = QtWidgets.QTreeView()
|
|
||||||
self._tree_view.setHeaderHidden(True)
|
|
||||||
self._tree_view.setSelectionBehavior(
|
|
||||||
QtWidgets.QAbstractItemView.SelectItems)
|
|
||||||
self._tree_view.setSelectionMode(
|
|
||||||
QtWidgets.QAbstractItemView.ExtendedSelection)
|
|
||||||
self._tree_view.setModel(self._model)
|
|
||||||
layout.addWidget(self._tree_view)
|
|
||||||
|
|
||||||
self._button_box = QtWidgets.QDialogButtonBox(
|
|
||||||
QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel
|
|
||||||
)
|
|
||||||
self._button_box.setCenterButtons(True)
|
|
||||||
self._button_box.accepted.connect(self.add_channels)
|
|
||||||
self._button_box.rejected.connect(self.reject)
|
|
||||||
layout.addWidget(self._button_box)
|
|
||||||
|
|
||||||
def add_channels(self):
|
|
||||||
selection = self._tree_view.selectedIndexes()
|
|
||||||
channels = []
|
|
||||||
for select in selection:
|
|
||||||
key = self._model.index_to_key(select)
|
|
||||||
if key is not None:
|
|
||||||
channels.append([key, *self._model[key].ref, []])
|
|
||||||
self.channels = channels
|
|
||||||
self.accept()
|
|
||||||
|
|
||||||
|
|
||||||
class WaveformDock(QtWidgets.QDockWidget):
|
|
||||||
def __init__(self, timeout, timer, timer_backoff):
|
|
||||||
QtWidgets.QDockWidget.__init__(self, "Waveform")
|
|
||||||
self.setObjectName("Waveform")
|
|
||||||
self.setFeatures(
|
|
||||||
QtWidgets.QDockWidget.DockWidgetMovable | QtWidgets.QDockWidget.DockWidgetFloatable)
|
|
||||||
|
|
||||||
self._channel_model = Model({})
|
|
||||||
self._waveform_model = _WaveformModel()
|
|
||||||
|
|
||||||
self._ddb = None
|
|
||||||
self._dump = None
|
|
||||||
|
|
||||||
self._waveform_data = {
|
|
||||||
"timescale": 1,
|
|
||||||
"stopped_x": None,
|
|
||||||
"logs": dict(),
|
|
||||||
"data": dict(),
|
|
||||||
}
|
|
||||||
|
|
||||||
self._current_dir = os.getcwd()
|
|
||||||
|
|
||||||
self.proxy_client = ProxyClient(self.on_dump_receive,
|
|
||||||
timeout,
|
|
||||||
timer,
|
|
||||||
timer_backoff)
|
|
||||||
|
|
||||||
grid = LayoutWidget()
|
|
||||||
self.setWidget(grid)
|
|
||||||
|
|
||||||
self._menu_btn = QtWidgets.QPushButton()
|
|
||||||
self._menu_btn.setIcon(
|
|
||||||
QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_FileDialogStart))
|
|
||||||
grid.addWidget(self._menu_btn, 0, 0)
|
|
||||||
|
|
||||||
self._request_dump_btn = QtWidgets.QToolButton()
|
|
||||||
self._request_dump_btn.setToolTip("Fetch analyzer data from device")
|
|
||||||
self._request_dump_btn.setIcon(
|
|
||||||
QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_BrowserReload))
|
|
||||||
self._request_dump_btn.clicked.connect(
|
|
||||||
lambda: asyncio.ensure_future(exc_to_warning(self.proxy_client.trigger_proxy_task())))
|
|
||||||
grid.addWidget(self._request_dump_btn, 0, 1)
|
|
||||||
|
|
||||||
self._add_channel_dialog = _AddChannelDialog(self, self._channel_model)
|
|
||||||
self._add_channel_dialog.accepted.connect(self._add_channels)
|
|
||||||
|
|
||||||
self._add_btn = QtWidgets.QToolButton()
|
|
||||||
self._add_btn.setToolTip("Add channels...")
|
|
||||||
self._add_btn.setIcon(
|
|
||||||
QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_FileDialogListView))
|
|
||||||
self._add_btn.clicked.connect(self._add_channel_dialog.open)
|
|
||||||
grid.addWidget(self._add_btn, 0, 2)
|
|
||||||
|
|
||||||
self._file_menu = QtWidgets.QMenu()
|
|
||||||
self._add_async_action("Open trace...", self.load_trace)
|
|
||||||
self._add_async_action("Save trace...", self.save_trace)
|
|
||||||
self._add_async_action("Save trace as VCD...", self.save_vcd)
|
|
||||||
self._add_async_action("Open channel list...", self.load_channels)
|
|
||||||
self._add_async_action("Save channel list...", self.save_channels)
|
|
||||||
self._menu_btn.setMenu(self._file_menu)
|
|
||||||
|
|
||||||
self._waveform_view = _WaveformView(self)
|
|
||||||
self._waveform_view.setModel(self._waveform_model)
|
|
||||||
grid.addWidget(self._waveform_view, 1, 0, colspan=12)
|
|
||||||
|
|
||||||
self._reset_zoom_btn = QtWidgets.QToolButton()
|
|
||||||
self._reset_zoom_btn.setToolTip("Reset zoom")
|
|
||||||
self._reset_zoom_btn.setIcon(
|
|
||||||
QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_TitleBarMaxButton))
|
|
||||||
self._reset_zoom_btn.clicked.connect(self._waveform_view.resetZoom)
|
|
||||||
grid.addWidget(self._reset_zoom_btn, 0, 3)
|
|
||||||
|
|
||||||
self._cursor_control = _CursorTimeControl(self)
|
|
||||||
self._waveform_view.cursorMove.connect(self._cursor_control.setDisplayValue)
|
|
||||||
self._cursor_control.submit.connect(self._waveform_view.onCursorMove)
|
|
||||||
grid.addWidget(self._cursor_control, 0, 4, colspan=6)
|
|
||||||
|
|
||||||
def _add_async_action(self, label, coro):
|
|
||||||
action = QtWidgets.QAction(label, self)
|
|
||||||
action.triggered.connect(
|
|
||||||
lambda: asyncio.ensure_future(exc_to_warning(coro())))
|
|
||||||
self._file_menu.addAction(action)
|
|
||||||
|
|
||||||
def _add_channels(self):
|
|
||||||
channels = self._add_channel_dialog.channels
|
|
||||||
count = self._waveform_model.rowCount()
|
|
||||||
self._waveform_model.extend(channels)
|
|
||||||
self._waveform_model.update_data(self._waveform_data['data'],
|
|
||||||
count,
|
|
||||||
count + len(channels))
|
|
||||||
|
|
||||||
def on_dump_receive(self, dump):
|
|
||||||
self._dump = dump
|
|
||||||
decoded_dump = comm_analyzer.decode_dump(dump)
|
|
||||||
waveform_data = comm_analyzer.decoded_dump_to_waveform_data(self._ddb, decoded_dump)
|
|
||||||
self._waveform_data.update(waveform_data)
|
|
||||||
self._channel_model.update(self._waveform_data['logs'])
|
|
||||||
self._waveform_model.update_all(self._waveform_data['data'])
|
|
||||||
self._waveform_view.setStoppedX(self._waveform_data['stopped_x'])
|
|
||||||
self._waveform_view.setTimescale(self._waveform_data['timescale'])
|
|
||||||
self._cursor_control.setTimescale(self._waveform_data['timescale'])
|
|
||||||
|
|
||||||
async def load_trace(self):
|
|
||||||
try:
|
|
||||||
filename = await get_open_file_name(
|
|
||||||
self,
|
|
||||||
"Load Analyzer Trace",
|
|
||||||
self._current_dir,
|
|
||||||
"All files (*.*)")
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
return
|
|
||||||
self._current_dir = os.path.dirname(filename)
|
|
||||||
try:
|
|
||||||
with open(filename, 'rb') as f:
|
|
||||||
dump = f.read()
|
|
||||||
self.on_dump_receive(dump)
|
|
||||||
except:
|
|
||||||
logger.error("Failed to open analyzer trace", exc_info=True)
|
|
||||||
|
|
||||||
async def save_trace(self):
|
|
||||||
if self._dump is None:
|
|
||||||
logger.error("No analyzer trace stored in dashboard, "
|
|
||||||
"try loading from file or fetching from device")
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
filename = await get_save_file_name(
|
|
||||||
self,
|
|
||||||
"Save Analyzer Trace",
|
|
||||||
self._current_dir,
|
|
||||||
"All files (*.*)")
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
return
|
|
||||||
self._current_dir = os.path.dirname(filename)
|
|
||||||
try:
|
|
||||||
with open(filename, 'wb') as f:
|
|
||||||
f.write(self._dump)
|
|
||||||
except:
|
|
||||||
logger.error("Failed to save analyzer trace", exc_info=True)
|
|
||||||
|
|
||||||
async def save_vcd(self):
|
|
||||||
if self._dump is None:
|
|
||||||
logger.error("No analyzer trace stored in dashboard, "
|
|
||||||
"try loading from file or fetching from device")
|
|
||||||
return
|
|
||||||
try:
|
|
||||||
filename = await get_save_file_name(
|
|
||||||
self,
|
|
||||||
"Save VCD",
|
|
||||||
self._current_dir,
|
|
||||||
"All files (*.*)")
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
return
|
|
||||||
self._current_dir = os.path.dirname(filename)
|
|
||||||
try:
|
|
||||||
decoded_dump = comm_analyzer.decode_dump(self._dump)
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
comm_analyzer.decoded_dump_to_vcd(f, self._ddb, decoded_dump)
|
|
||||||
except:
|
|
||||||
logger.error("Failed to save trace as VCD", exc_info=True)
|
|
||||||
|
|
||||||
async def load_channels(self):
|
|
||||||
try:
|
|
||||||
filename = await get_open_file_name(
|
|
||||||
self,
|
|
||||||
"Open channel list",
|
|
||||||
self._current_dir,
|
|
||||||
"PYON files (*.pyon);;All files (*.*)")
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
return
|
|
||||||
self._current_dir = os.path.dirname(filename)
|
|
||||||
try:
|
|
||||||
channel_list = pyon.load_file(filename)
|
|
||||||
self._waveform_model.import_list(channel_list)
|
|
||||||
self._waveform_model.update_all(self._waveform_data['data'])
|
|
||||||
except:
|
|
||||||
logger.error("Failed to open channel list", exc_info=True)
|
|
||||||
|
|
||||||
async def save_channels(self):
|
|
||||||
try:
|
|
||||||
filename = await get_save_file_name(
|
|
||||||
self,
|
|
||||||
"Save channel list",
|
|
||||||
self._current_dir,
|
|
||||||
"PYON files (*.pyon);;All files (*.*)")
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
return
|
|
||||||
self._current_dir = os.path.dirname(filename)
|
|
||||||
try:
|
|
||||||
channel_list = self._waveform_model.export_list()
|
|
||||||
pyon.store_file(filename, channel_list)
|
|
||||||
except:
|
|
||||||
logger.error("Failed to save channel list", exc_info=True)
|
|
||||||
|
|
||||||
def _process_ddb(self):
|
|
||||||
channel_list = comm_analyzer.get_channel_list(self._ddb)
|
|
||||||
self._channel_model.clear()
|
|
||||||
self._channel_model.update(channel_list)
|
|
||||||
desc = self._ddb.get("core_analyzer")
|
|
||||||
if desc is not None:
|
|
||||||
addr = desc["host"]
|
|
||||||
port_proxy = desc.get("port_proxy", 1385)
|
|
||||||
port = desc.get("port", 1386)
|
|
||||||
self.proxy_client.update_address(addr, port, port_proxy)
|
|
||||||
else:
|
|
||||||
self.proxy_client.update_address(None, None, None)
|
|
||||||
|
|
||||||
def init_ddb(self, ddb):
|
|
||||||
self._ddb = ddb
|
|
||||||
self._process_ddb()
|
|
||||||
return ddb
|
|
||||||
|
|
||||||
def notify_ddb(self, mod):
|
|
||||||
self._process_ddb()
|
|
||||||
|
|
||||||
async def stop(self):
|
|
||||||
if self.proxy_client is not None:
|
|
||||||
await self.proxy_client.close()
|
|
|
@ -127,7 +127,7 @@
|
||||||
"# let's connect to the master\n",
|
"# let's connect to the master\n",
|
||||||
"\n",
|
"\n",
|
||||||
"schedule, exps, datasets = [\n",
|
"schedule, exps, datasets = [\n",
|
||||||
" Client(\"::1\", 3251, i) for i in\n",
|
" Client(\"::1\", 3251, \"master_\" + i) for i in\n",
|
||||||
" \"schedule experiment_db dataset_db\".split()]\n",
|
" \"schedule experiment_db dataset_db\".split()]\n",
|
||||||
"\n",
|
"\n",
|
||||||
"print(\"current schedule\")\n",
|
"print(\"current schedule\")\n",
|
||||||
|
|
|
@ -1,34 +0,0 @@
|
||||||
from artiq.experiment import *
|
|
||||||
|
|
||||||
|
|
||||||
class InteractiveDemo(EnvExperiment):
|
|
||||||
def build(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
print("Waiting for user input...")
|
|
||||||
with self.interactive(title="Interactive Demo") as interactive:
|
|
||||||
interactive.setattr_argument("pyon_value",
|
|
||||||
PYONValue(self.get_dataset("foo", default=42)))
|
|
||||||
interactive.setattr_argument("number", NumberValue(42e-6,
|
|
||||||
unit="us",
|
|
||||||
precision=4))
|
|
||||||
interactive.setattr_argument("integer", NumberValue(42,
|
|
||||||
step=1, precision=0))
|
|
||||||
interactive.setattr_argument("string", StringValue("Hello World"))
|
|
||||||
interactive.setattr_argument("scan", Scannable(global_max=400,
|
|
||||||
default=NoScan(325),
|
|
||||||
precision=6))
|
|
||||||
interactive.setattr_argument("boolean", BooleanValue(True), "Group")
|
|
||||||
interactive.setattr_argument("enum",
|
|
||||||
EnumerationValue(["foo", "bar", "quux"], "foo"),
|
|
||||||
"Group")
|
|
||||||
print("Done! Values:")
|
|
||||||
print(interactive.pyon_value)
|
|
||||||
print(interactive.boolean)
|
|
||||||
print(interactive.enum)
|
|
||||||
print(interactive.number, type(interactive.number))
|
|
||||||
print(interactive.integer, type(interactive.integer))
|
|
||||||
print(interactive.string)
|
|
||||||
for i in interactive.scan:
|
|
||||||
print(i)
|
|
|
@ -500,7 +500,7 @@ pub extern fn main() -> i32 {
|
||||||
println!(r"|_| |_|_|____/ \___/ \____|");
|
println!(r"|_| |_|_|____/ \___/ \____|");
|
||||||
println!("");
|
println!("");
|
||||||
println!("MiSoC Bootloader");
|
println!("MiSoC Bootloader");
|
||||||
println!("Copyright (c) 2017-2024 M-Labs Limited");
|
println!("Copyright (c) 2017-2023 M-Labs Limited");
|
||||||
println!("");
|
println!("");
|
||||||
|
|
||||||
#[cfg(has_ethmac)]
|
#[cfg(has_ethmac)]
|
||||||
|
|
|
@ -453,42 +453,15 @@ extern fn dma_playback(timestamp: i64, ptr: i32, _uses_ddma: bool) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(all(not(kernel_has_rtio_dma), not(has_rtio_dma)))]
|
#[cfg(not(kernel_has_rtio_dma))]
|
||||||
#[unwind(allowed)]
|
#[unwind(allowed)]
|
||||||
extern fn dma_playback(_timestamp: i64, _ptr: i32, _uses_ddma: bool) {
|
extern fn dma_playback(_timestamp: i64, _ptr: i32, _uses_ddma: bool) {
|
||||||
unimplemented!("not(kernel_has_rtio_dma)")
|
unimplemented!("not(kernel_has_rtio_dma)")
|
||||||
}
|
}
|
||||||
|
|
||||||
// for satellite (has_rtio_dma but not in kernel)
|
|
||||||
#[cfg(all(not(kernel_has_rtio_dma), has_rtio_dma))]
|
|
||||||
#[unwind(allowed)]
|
#[unwind(allowed)]
|
||||||
extern fn dma_playback(timestamp: i64, ptr: i32, _uses_ddma: bool) {
|
extern fn subkernel_load_run(id: u32, run: bool) {
|
||||||
// DDMA is always used on satellites, so the `uses_ddma` setting is ignored
|
send(&SubkernelLoadRunRequest { id: id, run: run });
|
||||||
// StartRemoteRequest reused as "normal" start request
|
|
||||||
send(&DmaStartRemoteRequest { id: ptr as i32, timestamp: timestamp });
|
|
||||||
// skip awaitremoterequest - it's a given
|
|
||||||
recv!(&DmaAwaitRemoteReply { timeout, error, channel, timestamp } => {
|
|
||||||
if timeout {
|
|
||||||
raise!("DMAError",
|
|
||||||
"Error running DMA on satellite device, timed out waiting for results");
|
|
||||||
}
|
|
||||||
if error & 1 != 0 {
|
|
||||||
raise!("RTIOUnderflow",
|
|
||||||
"RTIO underflow at channel {rtio_channel_info:0}, {1} mu",
|
|
||||||
channel as i64, timestamp as i64, 0);
|
|
||||||
}
|
|
||||||
if error & 2 != 0 {
|
|
||||||
raise!("RTIODestinationUnreachable",
|
|
||||||
"RTIO destination unreachable, output, at channel {rtio_channel_info:0}, {1} mu",
|
|
||||||
channel as i64, timestamp as i64, 0);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#[unwind(allowed)]
|
|
||||||
extern fn subkernel_load_run(id: u32, destination: u8, run: bool) {
|
|
||||||
send(&SubkernelLoadRunRequest { id: id, destination: destination, run: run });
|
|
||||||
recv!(&SubkernelLoadRunReply { succeeded } => {
|
recv!(&SubkernelLoadRunReply { succeeded } => {
|
||||||
if !succeeded {
|
if !succeeded {
|
||||||
raise!("SubkernelError",
|
raise!("SubkernelError",
|
||||||
|
@ -498,7 +471,7 @@ extern fn subkernel_load_run(id: u32, destination: u8, run: bool) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unwind(allowed)]
|
#[unwind(allowed)]
|
||||||
extern fn subkernel_await_finish(id: u32, timeout: i64) {
|
extern fn subkernel_await_finish(id: u32, timeout: u64) {
|
||||||
send(&SubkernelAwaitFinishRequest { id: id, timeout: timeout });
|
send(&SubkernelAwaitFinishRequest { id: id, timeout: timeout });
|
||||||
recv!(SubkernelAwaitFinishReply { status } => {
|
recv!(SubkernelAwaitFinishReply { status } => {
|
||||||
match status {
|
match status {
|
||||||
|
@ -516,11 +489,9 @@ extern fn subkernel_await_finish(id: u32, timeout: i64) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unwind(aborts)]
|
#[unwind(aborts)]
|
||||||
extern fn subkernel_send_message(id: u32, is_return: bool, destination: u8,
|
extern fn subkernel_send_message(id: u32, count: u8, tag: &CSlice<u8>, data: *const *const ()) {
|
||||||
count: u8, tag: &CSlice<u8>, data: *const *const ()) {
|
|
||||||
send(&SubkernelMsgSend {
|
send(&SubkernelMsgSend {
|
||||||
id: id,
|
id: id,
|
||||||
destination: if is_return { None } else { Some(destination) },
|
|
||||||
count: count,
|
count: count,
|
||||||
tag: tag.as_ref(),
|
tag: tag.as_ref(),
|
||||||
data: data
|
data: data
|
||||||
|
@ -528,7 +499,7 @@ extern fn subkernel_send_message(id: u32, is_return: bool, destination: u8,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unwind(allowed)]
|
#[unwind(allowed)]
|
||||||
extern fn subkernel_await_message(id: i32, timeout: i64, tags: &CSlice<u8>, min: u8, max: u8) -> u8 {
|
extern fn subkernel_await_message(id: u32, timeout: u64, tags: &CSlice<u8>, min: u8, max: u8) -> u8 {
|
||||||
send(&SubkernelMsgRecvRequest { id: id, timeout: timeout, tags: tags.as_ref() });
|
send(&SubkernelMsgRecvRequest { id: id, timeout: timeout, tags: tags.as_ref() });
|
||||||
recv!(SubkernelMsgRecvReply { status, count } => {
|
recv!(SubkernelMsgRecvReply { status, count } => {
|
||||||
match status {
|
match status {
|
||||||
|
|
|
@ -18,7 +18,7 @@ impl<T> From<IoError<T>> for Error<T> {
|
||||||
// used by satellite -> master analyzer, subkernel exceptions
|
// used by satellite -> master analyzer, subkernel exceptions
|
||||||
pub const SAT_PAYLOAD_MAX_SIZE: usize = /*max size*/512 - /*CRC*/4 - /*packet ID*/1 - /*last*/1 - /*length*/2;
|
pub const SAT_PAYLOAD_MAX_SIZE: usize = /*max size*/512 - /*CRC*/4 - /*packet ID*/1 - /*last*/1 - /*length*/2;
|
||||||
// used by DDMA, subkernel program data (need to provide extra ID and destination)
|
// used by DDMA, subkernel program data (need to provide extra ID and destination)
|
||||||
pub const MASTER_PAYLOAD_MAX_SIZE: usize = SAT_PAYLOAD_MAX_SIZE - /*source*/1 - /*destination*/1 - /*ID*/4;
|
pub const MASTER_PAYLOAD_MAX_SIZE: usize = SAT_PAYLOAD_MAX_SIZE - /*destination*/1 - /*ID*/4;
|
||||||
|
|
||||||
#[derive(PartialEq, Clone, Copy, Debug)]
|
#[derive(PartialEq, Clone, Copy, Debug)]
|
||||||
#[repr(u8)]
|
#[repr(u8)]
|
||||||
|
@ -77,8 +77,6 @@ pub enum Packet {
|
||||||
|
|
||||||
RoutingSetPath { destination: u8, hops: [u8; 32] },
|
RoutingSetPath { destination: u8, hops: [u8; 32] },
|
||||||
RoutingSetRank { rank: u8 },
|
RoutingSetRank { rank: u8 },
|
||||||
RoutingRetrievePackets,
|
|
||||||
RoutingNoPackets,
|
|
||||||
RoutingAck,
|
RoutingAck,
|
||||||
|
|
||||||
MonitorRequest { destination: u8, channel: u16, probe: u8 },
|
MonitorRequest { destination: u8, channel: u16, probe: u8 },
|
||||||
|
@ -108,26 +106,22 @@ pub enum Packet {
|
||||||
AnalyzerDataRequest { destination: u8 },
|
AnalyzerDataRequest { destination: u8 },
|
||||||
AnalyzerData { last: bool, length: u16, data: [u8; SAT_PAYLOAD_MAX_SIZE]},
|
AnalyzerData { last: bool, length: u16, data: [u8; SAT_PAYLOAD_MAX_SIZE]},
|
||||||
|
|
||||||
DmaAddTraceRequest {
|
DmaAddTraceRequest { destination: u8, id: u32, status: PayloadStatus, length: u16, trace: [u8; MASTER_PAYLOAD_MAX_SIZE] },
|
||||||
source: u8, destination: u8,
|
DmaAddTraceReply { succeeded: bool },
|
||||||
id: u32, status: PayloadStatus,
|
DmaRemoveTraceRequest { destination: u8, id: u32 },
|
||||||
length: u16, trace: [u8; MASTER_PAYLOAD_MAX_SIZE]
|
DmaRemoveTraceReply { succeeded: bool },
|
||||||
},
|
DmaPlaybackRequest { destination: u8, id: u32, timestamp: u64 },
|
||||||
DmaAddTraceReply { source: u8, destination: u8, id: u32, succeeded: bool },
|
DmaPlaybackReply { succeeded: bool },
|
||||||
DmaRemoveTraceRequest { source: u8, destination: u8, id: u32 },
|
DmaPlaybackStatus { destination: u8, id: u32, error: u8, channel: u32, timestamp: u64 },
|
||||||
DmaRemoveTraceReply { destination: u8, succeeded: bool },
|
|
||||||
DmaPlaybackRequest { source: u8, destination: u8, id: u32, timestamp: u64 },
|
|
||||||
DmaPlaybackReply { destination: u8, succeeded: bool },
|
|
||||||
DmaPlaybackStatus { source: u8, destination: u8, id: u32, error: u8, channel: u32, timestamp: u64 },
|
|
||||||
|
|
||||||
SubkernelAddDataRequest { destination: u8, id: u32, status: PayloadStatus, length: u16, data: [u8; MASTER_PAYLOAD_MAX_SIZE] },
|
SubkernelAddDataRequest { destination: u8, id: u32, status: PayloadStatus, length: u16, data: [u8; MASTER_PAYLOAD_MAX_SIZE] },
|
||||||
SubkernelAddDataReply { succeeded: bool },
|
SubkernelAddDataReply { succeeded: bool },
|
||||||
SubkernelLoadRunRequest { source: u8, destination: u8, id: u32, run: bool },
|
SubkernelLoadRunRequest { destination: u8, id: u32, run: bool },
|
||||||
SubkernelLoadRunReply { destination: u8, succeeded: bool },
|
SubkernelLoadRunReply { succeeded: bool },
|
||||||
SubkernelFinished { destination: u8, id: u32, with_exception: bool, exception_src: u8 },
|
SubkernelFinished { id: u32, with_exception: bool },
|
||||||
SubkernelExceptionRequest { destination: u8 },
|
SubkernelExceptionRequest { destination: u8 },
|
||||||
SubkernelException { last: bool, length: u16, data: [u8; SAT_PAYLOAD_MAX_SIZE] },
|
SubkernelException { last: bool, length: u16, data: [u8; SAT_PAYLOAD_MAX_SIZE] },
|
||||||
SubkernelMessage { source: u8, destination: u8, id: u32, status: PayloadStatus, length: u16, data: [u8; MASTER_PAYLOAD_MAX_SIZE] },
|
SubkernelMessage { destination: u8, id: u32, status: PayloadStatus, length: u16, data: [u8; MASTER_PAYLOAD_MAX_SIZE] },
|
||||||
SubkernelMessageAck { destination: u8 },
|
SubkernelMessageAck { destination: u8 },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -170,8 +164,6 @@ impl Packet {
|
||||||
rank: reader.read_u8()?
|
rank: reader.read_u8()?
|
||||||
},
|
},
|
||||||
0x32 => Packet::RoutingAck,
|
0x32 => Packet::RoutingAck,
|
||||||
0x33 => Packet::RoutingRetrievePackets,
|
|
||||||
0x34 => Packet::RoutingNoPackets,
|
|
||||||
|
|
||||||
0x40 => Packet::MonitorRequest {
|
0x40 => Packet::MonitorRequest {
|
||||||
destination: reader.read_u8()?,
|
destination: reader.read_u8()?,
|
||||||
|
@ -285,8 +277,7 @@ impl Packet {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
0xb0 => {
|
0xb0 => {
|
||||||
let source = reader.read_u8()?;
|
|
||||||
let destination = reader.read_u8()?;
|
let destination = reader.read_u8()?;
|
||||||
let id = reader.read_u32()?;
|
let id = reader.read_u32()?;
|
||||||
let status = reader.read_u8()?;
|
let status = reader.read_u8()?;
|
||||||
|
@ -294,7 +285,6 @@ impl Packet {
|
||||||
let mut trace: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
let mut trace: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
||||||
reader.read_exact(&mut trace[0..length as usize])?;
|
reader.read_exact(&mut trace[0..length as usize])?;
|
||||||
Packet::DmaAddTraceRequest {
|
Packet::DmaAddTraceRequest {
|
||||||
source: source,
|
|
||||||
destination: destination,
|
destination: destination,
|
||||||
id: id,
|
id: id,
|
||||||
status: PayloadStatus::from(status),
|
status: PayloadStatus::from(status),
|
||||||
|
@ -303,32 +293,24 @@ impl Packet {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
0xb1 => Packet::DmaAddTraceReply {
|
0xb1 => Packet::DmaAddTraceReply {
|
||||||
source: reader.read_u8()?,
|
|
||||||
destination: reader.read_u8()?,
|
|
||||||
id: reader.read_u32()?,
|
|
||||||
succeeded: reader.read_bool()?
|
succeeded: reader.read_bool()?
|
||||||
},
|
},
|
||||||
0xb2 => Packet::DmaRemoveTraceRequest {
|
0xb2 => Packet::DmaRemoveTraceRequest {
|
||||||
source: reader.read_u8()?,
|
|
||||||
destination: reader.read_u8()?,
|
destination: reader.read_u8()?,
|
||||||
id: reader.read_u32()?
|
id: reader.read_u32()?
|
||||||
},
|
},
|
||||||
0xb3 => Packet::DmaRemoveTraceReply {
|
0xb3 => Packet::DmaRemoveTraceReply {
|
||||||
destination: reader.read_u8()?,
|
|
||||||
succeeded: reader.read_bool()?
|
succeeded: reader.read_bool()?
|
||||||
},
|
},
|
||||||
0xb4 => Packet::DmaPlaybackRequest {
|
0xb4 => Packet::DmaPlaybackRequest {
|
||||||
source: reader.read_u8()?,
|
|
||||||
destination: reader.read_u8()?,
|
destination: reader.read_u8()?,
|
||||||
id: reader.read_u32()?,
|
id: reader.read_u32()?,
|
||||||
timestamp: reader.read_u64()?
|
timestamp: reader.read_u64()?
|
||||||
},
|
},
|
||||||
0xb5 => Packet::DmaPlaybackReply {
|
0xb5 => Packet::DmaPlaybackReply {
|
||||||
destination: reader.read_u8()?,
|
|
||||||
succeeded: reader.read_bool()?
|
succeeded: reader.read_bool()?
|
||||||
},
|
},
|
||||||
0xb6 => Packet::DmaPlaybackStatus {
|
0xb6 => Packet::DmaPlaybackStatus {
|
||||||
source: reader.read_u8()?,
|
|
||||||
destination: reader.read_u8()?,
|
destination: reader.read_u8()?,
|
||||||
id: reader.read_u32()?,
|
id: reader.read_u32()?,
|
||||||
error: reader.read_u8()?,
|
error: reader.read_u8()?,
|
||||||
|
@ -355,20 +337,16 @@ impl Packet {
|
||||||
succeeded: reader.read_bool()?
|
succeeded: reader.read_bool()?
|
||||||
},
|
},
|
||||||
0xc4 => Packet::SubkernelLoadRunRequest {
|
0xc4 => Packet::SubkernelLoadRunRequest {
|
||||||
source: reader.read_u8()?,
|
|
||||||
destination: reader.read_u8()?,
|
destination: reader.read_u8()?,
|
||||||
id: reader.read_u32()?,
|
id: reader.read_u32()?,
|
||||||
run: reader.read_bool()?
|
run: reader.read_bool()?
|
||||||
},
|
},
|
||||||
0xc5 => Packet::SubkernelLoadRunReply {
|
0xc5 => Packet::SubkernelLoadRunReply {
|
||||||
destination: reader.read_u8()?,
|
|
||||||
succeeded: reader.read_bool()?
|
succeeded: reader.read_bool()?
|
||||||
},
|
},
|
||||||
0xc8 => Packet::SubkernelFinished {
|
0xc8 => Packet::SubkernelFinished {
|
||||||
destination: reader.read_u8()?,
|
|
||||||
id: reader.read_u32()?,
|
id: reader.read_u32()?,
|
||||||
with_exception: reader.read_bool()?,
|
with_exception: reader.read_bool()?,
|
||||||
exception_src: reader.read_u8()?
|
|
||||||
},
|
},
|
||||||
0xc9 => Packet::SubkernelExceptionRequest {
|
0xc9 => Packet::SubkernelExceptionRequest {
|
||||||
destination: reader.read_u8()?
|
destination: reader.read_u8()?
|
||||||
|
@ -385,7 +363,6 @@ impl Packet {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
0xcb => {
|
0xcb => {
|
||||||
let source = reader.read_u8()?;
|
|
||||||
let destination = reader.read_u8()?;
|
let destination = reader.read_u8()?;
|
||||||
let id = reader.read_u32()?;
|
let id = reader.read_u32()?;
|
||||||
let status = reader.read_u8()?;
|
let status = reader.read_u8()?;
|
||||||
|
@ -393,7 +370,6 @@ impl Packet {
|
||||||
let mut data: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
let mut data: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
||||||
reader.read_exact(&mut data[0..length as usize])?;
|
reader.read_exact(&mut data[0..length as usize])?;
|
||||||
Packet::SubkernelMessage {
|
Packet::SubkernelMessage {
|
||||||
source: source,
|
|
||||||
destination: destination,
|
destination: destination,
|
||||||
id: id,
|
id: id,
|
||||||
status: PayloadStatus::from(status),
|
status: PayloadStatus::from(status),
|
||||||
|
@ -456,10 +432,6 @@ impl Packet {
|
||||||
},
|
},
|
||||||
Packet::RoutingAck =>
|
Packet::RoutingAck =>
|
||||||
writer.write_u8(0x32)?,
|
writer.write_u8(0x32)?,
|
||||||
Packet::RoutingRetrievePackets =>
|
|
||||||
writer.write_u8(0x33)?,
|
|
||||||
Packet::RoutingNoPackets =>
|
|
||||||
writer.write_u8(0x34)?,
|
|
||||||
|
|
||||||
Packet::MonitorRequest { destination, channel, probe } => {
|
Packet::MonitorRequest { destination, channel, probe } => {
|
||||||
writer.write_u8(0x40)?;
|
writer.write_u8(0x40)?;
|
||||||
|
@ -589,9 +561,8 @@ impl Packet {
|
||||||
writer.write_all(&data[0..length as usize])?;
|
writer.write_all(&data[0..length as usize])?;
|
||||||
},
|
},
|
||||||
|
|
||||||
Packet::DmaAddTraceRequest { source, destination, id, status, trace, length } => {
|
Packet::DmaAddTraceRequest { destination, id, status, trace, length } => {
|
||||||
writer.write_u8(0xb0)?;
|
writer.write_u8(0xb0)?;
|
||||||
writer.write_u8(source)?;
|
|
||||||
writer.write_u8(destination)?;
|
writer.write_u8(destination)?;
|
||||||
writer.write_u32(id)?;
|
writer.write_u32(id)?;
|
||||||
writer.write_u8(status as u8)?;
|
writer.write_u8(status as u8)?;
|
||||||
|
@ -600,39 +571,31 @@ impl Packet {
|
||||||
writer.write_u16(length)?;
|
writer.write_u16(length)?;
|
||||||
writer.write_all(&trace[0..length as usize])?;
|
writer.write_all(&trace[0..length as usize])?;
|
||||||
},
|
},
|
||||||
Packet::DmaAddTraceReply { source, destination, id, succeeded } => {
|
Packet::DmaAddTraceReply { succeeded } => {
|
||||||
writer.write_u8(0xb1)?;
|
writer.write_u8(0xb1)?;
|
||||||
writer.write_u8(source)?;
|
|
||||||
writer.write_u8(destination)?;
|
|
||||||
writer.write_u32(id)?;
|
|
||||||
writer.write_bool(succeeded)?;
|
writer.write_bool(succeeded)?;
|
||||||
},
|
},
|
||||||
Packet::DmaRemoveTraceRequest { source, destination, id } => {
|
Packet::DmaRemoveTraceRequest { destination, id } => {
|
||||||
writer.write_u8(0xb2)?;
|
writer.write_u8(0xb2)?;
|
||||||
writer.write_u8(source)?;
|
|
||||||
writer.write_u8(destination)?;
|
writer.write_u8(destination)?;
|
||||||
writer.write_u32(id)?;
|
writer.write_u32(id)?;
|
||||||
},
|
},
|
||||||
Packet::DmaRemoveTraceReply { destination, succeeded } => {
|
Packet::DmaRemoveTraceReply { succeeded } => {
|
||||||
writer.write_u8(0xb3)?;
|
writer.write_u8(0xb3)?;
|
||||||
writer.write_u8(destination)?;
|
|
||||||
writer.write_bool(succeeded)?;
|
writer.write_bool(succeeded)?;
|
||||||
},
|
},
|
||||||
Packet::DmaPlaybackRequest { source, destination, id, timestamp } => {
|
Packet::DmaPlaybackRequest { destination, id, timestamp } => {
|
||||||
writer.write_u8(0xb4)?;
|
writer.write_u8(0xb4)?;
|
||||||
writer.write_u8(source)?;
|
|
||||||
writer.write_u8(destination)?;
|
writer.write_u8(destination)?;
|
||||||
writer.write_u32(id)?;
|
writer.write_u32(id)?;
|
||||||
writer.write_u64(timestamp)?;
|
writer.write_u64(timestamp)?;
|
||||||
},
|
},
|
||||||
Packet::DmaPlaybackReply { destination, succeeded } => {
|
Packet::DmaPlaybackReply { succeeded } => {
|
||||||
writer.write_u8(0xb5)?;
|
writer.write_u8(0xb5)?;
|
||||||
writer.write_u8(destination)?;
|
|
||||||
writer.write_bool(succeeded)?;
|
writer.write_bool(succeeded)?;
|
||||||
},
|
},
|
||||||
Packet::DmaPlaybackStatus { source, destination, id, error, channel, timestamp } => {
|
Packet::DmaPlaybackStatus { destination, id, error, channel, timestamp } => {
|
||||||
writer.write_u8(0xb6)?;
|
writer.write_u8(0xb6)?;
|
||||||
writer.write_u8(source)?;
|
|
||||||
writer.write_u8(destination)?;
|
writer.write_u8(destination)?;
|
||||||
writer.write_u32(id)?;
|
writer.write_u32(id)?;
|
||||||
writer.write_u8(error)?;
|
writer.write_u8(error)?;
|
||||||
|
@ -652,24 +615,20 @@ impl Packet {
|
||||||
writer.write_u8(0xc1)?;
|
writer.write_u8(0xc1)?;
|
||||||
writer.write_bool(succeeded)?;
|
writer.write_bool(succeeded)?;
|
||||||
},
|
},
|
||||||
Packet::SubkernelLoadRunRequest { source, destination, id, run } => {
|
Packet::SubkernelLoadRunRequest { destination, id, run } => {
|
||||||
writer.write_u8(0xc4)?;
|
writer.write_u8(0xc4)?;
|
||||||
writer.write_u8(source)?;
|
|
||||||
writer.write_u8(destination)?;
|
writer.write_u8(destination)?;
|
||||||
writer.write_u32(id)?;
|
writer.write_u32(id)?;
|
||||||
writer.write_bool(run)?;
|
writer.write_bool(run)?;
|
||||||
},
|
},
|
||||||
Packet::SubkernelLoadRunReply { destination, succeeded } => {
|
Packet::SubkernelLoadRunReply { succeeded } => {
|
||||||
writer.write_u8(0xc5)?;
|
writer.write_u8(0xc5)?;
|
||||||
writer.write_u8(destination)?;
|
|
||||||
writer.write_bool(succeeded)?;
|
writer.write_bool(succeeded)?;
|
||||||
},
|
},
|
||||||
Packet::SubkernelFinished { destination, id, with_exception, exception_src } => {
|
Packet::SubkernelFinished { id, with_exception } => {
|
||||||
writer.write_u8(0xc8)?;
|
writer.write_u8(0xc8)?;
|
||||||
writer.write_u8(destination)?;
|
|
||||||
writer.write_u32(id)?;
|
writer.write_u32(id)?;
|
||||||
writer.write_bool(with_exception)?;
|
writer.write_bool(with_exception)?;
|
||||||
writer.write_u8(exception_src)?;
|
|
||||||
},
|
},
|
||||||
Packet::SubkernelExceptionRequest { destination } => {
|
Packet::SubkernelExceptionRequest { destination } => {
|
||||||
writer.write_u8(0xc9)?;
|
writer.write_u8(0xc9)?;
|
||||||
|
@ -681,9 +640,8 @@ impl Packet {
|
||||||
writer.write_u16(length)?;
|
writer.write_u16(length)?;
|
||||||
writer.write_all(&data[0..length as usize])?;
|
writer.write_all(&data[0..length as usize])?;
|
||||||
},
|
},
|
||||||
Packet::SubkernelMessage { source, destination, id, status, data, length } => {
|
Packet::SubkernelMessage { destination, id, status, data, length } => {
|
||||||
writer.write_u8(0xcb)?;
|
writer.write_u8(0xcb)?;
|
||||||
writer.write_u8(source)?;
|
|
||||||
writer.write_u8(destination)?;
|
writer.write_u8(destination)?;
|
||||||
writer.write_u32(id)?;
|
writer.write_u32(id)?;
|
||||||
writer.write_u8(status as u8)?;
|
writer.write_u8(status as u8)?;
|
||||||
|
@ -697,36 +655,4 @@ impl Packet {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn routable_destination(&self) -> Option<u8> {
|
|
||||||
// only for packets that could be re-routed, not only forwarded
|
|
||||||
match self {
|
|
||||||
Packet::DmaAddTraceRequest { destination, .. } => Some(*destination),
|
|
||||||
Packet::DmaAddTraceReply { destination, .. } => Some(*destination),
|
|
||||||
Packet::DmaRemoveTraceRequest { destination, .. } => Some(*destination),
|
|
||||||
Packet::DmaRemoveTraceReply { destination, .. } => Some(*destination),
|
|
||||||
Packet::DmaPlaybackRequest { destination, .. } => Some(*destination),
|
|
||||||
Packet::DmaPlaybackReply { destination, .. } => Some(*destination),
|
|
||||||
Packet::SubkernelLoadRunRequest { destination, .. } => Some(*destination),
|
|
||||||
Packet::SubkernelLoadRunReply { destination, .. } => Some(*destination),
|
|
||||||
Packet::SubkernelMessage { destination, .. } => Some(*destination),
|
|
||||||
Packet::SubkernelMessageAck { destination, .. } => Some(*destination),
|
|
||||||
Packet::DmaPlaybackStatus { destination, .. } => Some(*destination),
|
|
||||||
Packet::SubkernelFinished { destination, .. } => Some(*destination),
|
|
||||||
_ => None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expects_response(&self) -> bool {
|
|
||||||
// returns true if the routable packet should elicit a response
|
|
||||||
// e.g. reply, ACK packets end a conversation,
|
|
||||||
// and firmware should not wait for response
|
|
||||||
match self {
|
|
||||||
Packet::DmaAddTraceReply { .. } | Packet::DmaRemoveTraceReply { .. } |
|
|
||||||
Packet::DmaPlaybackReply { .. } | Packet::SubkernelLoadRunReply { .. } |
|
|
||||||
Packet::SubkernelMessageAck { .. } | Packet::DmaPlaybackStatus { .. } |
|
|
||||||
Packet::SubkernelFinished { .. } => false,
|
|
||||||
_ => true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,12 +103,12 @@ pub enum Message<'a> {
|
||||||
SpiReadReply { succeeded: bool, data: u32 },
|
SpiReadReply { succeeded: bool, data: u32 },
|
||||||
SpiBasicReply { succeeded: bool },
|
SpiBasicReply { succeeded: bool },
|
||||||
|
|
||||||
SubkernelLoadRunRequest { id: u32, destination: u8, run: bool },
|
SubkernelLoadRunRequest { id: u32, run: bool },
|
||||||
SubkernelLoadRunReply { succeeded: bool },
|
SubkernelLoadRunReply { succeeded: bool },
|
||||||
SubkernelAwaitFinishRequest { id: u32, timeout: i64 },
|
SubkernelAwaitFinishRequest { id: u32, timeout: u64 },
|
||||||
SubkernelAwaitFinishReply { status: SubkernelStatus },
|
SubkernelAwaitFinishReply { status: SubkernelStatus },
|
||||||
SubkernelMsgSend { id: u32, destination: Option<u8>, count: u8, tag: &'a [u8], data: *const *const () },
|
SubkernelMsgSend { id: u32, count: u8, tag: &'a [u8], data: *const *const () },
|
||||||
SubkernelMsgRecvRequest { id: i32, timeout: i64, tags: &'a [u8] },
|
SubkernelMsgRecvRequest { id: u32, timeout: u64, tags: &'a [u8] },
|
||||||
SubkernelMsgRecvReply { status: SubkernelStatus, count: u8 },
|
SubkernelMsgRecvReply { status: SubkernelStatus, count: u8 },
|
||||||
|
|
||||||
Log(fmt::Arguments<'a>),
|
Log(fmt::Arguments<'a>),
|
||||||
|
|
|
@ -103,7 +103,7 @@ pub mod subkernel {
|
||||||
pub enum FinishStatus {
|
pub enum FinishStatus {
|
||||||
Ok,
|
Ok,
|
||||||
CommLost,
|
CommLost,
|
||||||
Exception(u8) // exception source
|
Exception
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||||
|
@ -216,7 +216,7 @@ pub mod subkernel {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn subkernel_finished(io: &Io, subkernel_mutex: &Mutex, id: u32, with_exception: bool, exception_src: u8) {
|
pub fn subkernel_finished(io: &Io, subkernel_mutex: &Mutex, id: u32, with_exception: bool) {
|
||||||
// called upon receiving DRTIO SubkernelRunDone
|
// called upon receiving DRTIO SubkernelRunDone
|
||||||
let _lock = subkernel_mutex.lock(io).unwrap();
|
let _lock = subkernel_mutex.lock(io).unwrap();
|
||||||
let subkernel = unsafe { SUBKERNELS.get_mut(&id) };
|
let subkernel = unsafe { SUBKERNELS.get_mut(&id) };
|
||||||
|
@ -226,7 +226,7 @@ pub mod subkernel {
|
||||||
if subkernel.state == SubkernelState::Running {
|
if subkernel.state == SubkernelState::Running {
|
||||||
subkernel.state = SubkernelState::Finished {
|
subkernel.state = SubkernelState::Finished {
|
||||||
status: match with_exception {
|
status: match with_exception {
|
||||||
true => FinishStatus::Exception(exception_src),
|
true => FinishStatus::Exception,
|
||||||
false => FinishStatus::Ok,
|
false => FinishStatus::Ok,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -266,9 +266,9 @@ pub mod subkernel {
|
||||||
Ok(SubkernelFinished {
|
Ok(SubkernelFinished {
|
||||||
id: id,
|
id: id,
|
||||||
comm_lost: status == FinishStatus::CommLost,
|
comm_lost: status == FinishStatus::CommLost,
|
||||||
exception: if let FinishStatus::Exception(dest) = status {
|
exception: if status == FinishStatus::Exception {
|
||||||
Some(drtio::subkernel_retrieve_exception(io, aux_mutex,
|
Some(drtio::subkernel_retrieve_exception(io, aux_mutex,
|
||||||
routing_table, dest)?)
|
routing_table, subkernel.destination)?)
|
||||||
} else { None }
|
} else { None }
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
|
@ -279,7 +279,7 @@ pub mod subkernel {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn await_finish(io: &Io, aux_mutex: &Mutex, subkernel_mutex: &Mutex,
|
pub fn await_finish(io: &Io, aux_mutex: &Mutex, subkernel_mutex: &Mutex,
|
||||||
routing_table: &RoutingTable, id: u32, timeout: i64) -> Result<SubkernelFinished, Error> {
|
routing_table: &RoutingTable, id: u32, timeout: u64) -> Result<SubkernelFinished, Error> {
|
||||||
{
|
{
|
||||||
let _lock = subkernel_mutex.lock(io)?;
|
let _lock = subkernel_mutex.lock(io)?;
|
||||||
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
|
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
|
||||||
|
@ -291,7 +291,7 @@ pub mod subkernel {
|
||||||
}
|
}
|
||||||
let max_time = clock::get_ms() + timeout as u64;
|
let max_time = clock::get_ms() + timeout as u64;
|
||||||
let _res = io.until(|| {
|
let _res = io.until(|| {
|
||||||
if timeout > 0 && clock::get_ms() > max_time {
|
if clock::get_ms() > max_time {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if subkernel_mutex.test_lock() {
|
if subkernel_mutex.test_lock() {
|
||||||
|
@ -305,7 +305,7 @@ pub mod subkernel {
|
||||||
_ => false
|
_ => false
|
||||||
}
|
}
|
||||||
})?;
|
})?;
|
||||||
if timeout > 0 && clock::get_ms() > max_time {
|
if clock::get_ms() > max_time {
|
||||||
error!("Remote subkernel finish await timed out");
|
error!("Remote subkernel finish await timed out");
|
||||||
return Err(Error::Timeout);
|
return Err(Error::Timeout);
|
||||||
}
|
}
|
||||||
|
@ -332,9 +332,8 @@ pub mod subkernel {
|
||||||
Err(_) => return,
|
Err(_) => return,
|
||||||
};
|
};
|
||||||
let subkernel = unsafe { SUBKERNELS.get(&id) };
|
let subkernel = unsafe { SUBKERNELS.get(&id) };
|
||||||
if subkernel.is_some() && subkernel.unwrap().state != SubkernelState::Running {
|
if subkernel.is_none() || subkernel.unwrap().state != SubkernelState::Running {
|
||||||
warn!("received a message for a non-running subkernel #{}", id);
|
// do not add messages for non-existing, non-running or deleted subkernels
|
||||||
// do not add messages for non-running or deleted subkernels
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if status.is_first() {
|
if status.is_first() {
|
||||||
|
@ -360,26 +359,19 @@ pub mod subkernel {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn message_await(io: &Io, subkernel_mutex: &Mutex, id: u32, timeout: i64
|
pub fn message_await(io: &Io, subkernel_mutex: &Mutex, id: u32, timeout: u64
|
||||||
) -> Result<Message, Error> {
|
) -> Result<Message, Error> {
|
||||||
let is_subkernel = {
|
{
|
||||||
let _lock = subkernel_mutex.lock(io)?;
|
let _lock = subkernel_mutex.lock(io)?;
|
||||||
let is_subkernel = unsafe { SUBKERNELS.get(&id).is_some() };
|
|
||||||
if is_subkernel {
|
|
||||||
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
|
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
|
||||||
SubkernelState::Finished { status: FinishStatus::Ok } |
|
SubkernelState::Finished { .. } => return Err(Error::SubkernelFinished),
|
||||||
SubkernelState::Running => (),
|
SubkernelState::Running => (),
|
||||||
SubkernelState::Finished {
|
|
||||||
status: FinishStatus::CommLost,
|
|
||||||
} => return Err(Error::SubkernelFinished),
|
|
||||||
_ => return Err(Error::IncorrectState)
|
_ => return Err(Error::IncorrectState)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
is_subkernel
|
|
||||||
};
|
|
||||||
let max_time = clock::get_ms() + timeout as u64;
|
let max_time = clock::get_ms() + timeout as u64;
|
||||||
let message = io.until_ok(|| {
|
let message = io.until_ok(|| {
|
||||||
if timeout > 0 && clock::get_ms() > max_time {
|
if clock::get_ms() > max_time {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
if subkernel_mutex.test_lock() {
|
if subkernel_mutex.test_lock() {
|
||||||
|
@ -392,12 +384,9 @@ pub mod subkernel {
|
||||||
return Ok(Some(unsafe { MESSAGE_QUEUE.remove(i) }));
|
return Ok(Some(unsafe { MESSAGE_QUEUE.remove(i) }));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if is_subkernel {
|
|
||||||
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
|
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
|
||||||
SubkernelState::Finished { status: FinishStatus::CommLost } |
|
SubkernelState::Finished { .. } => return Ok(None),
|
||||||
SubkernelState::Finished { status: FinishStatus::Exception(_) } => return Ok(None),
|
|
||||||
_ => ()
|
_ => ()
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Err(())
|
Err(())
|
||||||
});
|
});
|
||||||
|
@ -419,17 +408,15 @@ pub mod subkernel {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn message_send<'a>(io: &Io, aux_mutex: &Mutex, subkernel_mutex: &Mutex,
|
pub fn message_send<'a>(io: &Io, aux_mutex: &Mutex, subkernel_mutex: &Mutex,
|
||||||
routing_table: &RoutingTable, id: u32, destination: Option<u8>, count: u8, tag: &'a [u8], message: *const *const ()
|
routing_table: &RoutingTable, id: u32, count: u8, tag: &'a [u8], message: *const *const ()
|
||||||
) -> Result<(), Error> {
|
) -> Result<(), Error> {
|
||||||
let mut writer = Cursor::new(Vec::new());
|
let mut writer = Cursor::new(Vec::new());
|
||||||
|
let _lock = subkernel_mutex.lock(io)?;
|
||||||
|
let destination = unsafe { SUBKERNELS.get(&id).unwrap().destination };
|
||||||
|
|
||||||
// reuse rpc code for sending arbitrary data
|
// reuse rpc code for sending arbitrary data
|
||||||
rpc::send_args(&mut writer, 0, tag, message, false)?;
|
rpc::send_args(&mut writer, 0, tag, message, false)?;
|
||||||
// skip service tag, but overwrite first byte with tag count
|
// skip service tag, but overwrite first byte with tag count
|
||||||
let destination = destination.unwrap_or_else(|| {
|
|
||||||
let _lock = subkernel_mutex.lock(io).unwrap();
|
|
||||||
unsafe { SUBKERNELS.get(&id).unwrap().destination }
|
|
||||||
}
|
|
||||||
);
|
|
||||||
let data = &mut writer.into_inner()[3..];
|
let data = &mut writer.into_inner()[3..];
|
||||||
data[0] = count;
|
data[0] = count;
|
||||||
Ok(drtio::subkernel_send_message(
|
Ok(drtio::subkernel_send_message(
|
||||||
|
|
|
@ -167,10 +167,10 @@ pub mod remote_dma {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn playback_done(io: &Io, ddma_mutex: &Mutex,
|
pub fn playback_done(io: &Io, ddma_mutex: &Mutex,
|
||||||
id: u32, source: u8, error: u8, channel: u32, timestamp: u64) {
|
id: u32, destination: u8, error: u8, channel: u32, timestamp: u64) {
|
||||||
// called upon receiving PlaybackDone aux packet
|
// called upon receiving PlaybackDone aux packet
|
||||||
let _lock = ddma_mutex.lock(io).unwrap();
|
let _lock = ddma_mutex.lock(io).unwrap();
|
||||||
let mut trace = unsafe { TRACES.get_mut(&id).unwrap().get_mut(&source).unwrap() };
|
let mut trace = unsafe { TRACES.get_mut(&id).unwrap().get_mut(&destination).unwrap() };
|
||||||
trace.state = RemoteState::PlaybackEnded {
|
trace.state = RemoteState::PlaybackEnded {
|
||||||
error: error,
|
error: error,
|
||||||
channel: channel,
|
channel: channel,
|
||||||
|
|
|
@ -78,16 +78,6 @@ pub mod drtio {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn link_has_async_ready(linkno: u8) -> bool {
|
|
||||||
let linkno = linkno as usize;
|
|
||||||
let async_ready;
|
|
||||||
unsafe {
|
|
||||||
async_ready = (csr::DRTIO[linkno].async_messages_ready_read)() == 1;
|
|
||||||
(csr::DRTIO[linkno].async_messages_ready_write)(1);
|
|
||||||
}
|
|
||||||
async_ready
|
|
||||||
}
|
|
||||||
|
|
||||||
fn recv_aux_timeout(io: &Io, linkno: u8, timeout: u32) -> Result<drtioaux::Packet, Error> {
|
fn recv_aux_timeout(io: &Io, linkno: u8, timeout: u32) -> Result<drtioaux::Packet, Error> {
|
||||||
let max_time = clock::get_ms() + timeout as u64;
|
let max_time = clock::get_ms() + timeout as u64;
|
||||||
loop {
|
loop {
|
||||||
|
@ -106,62 +96,27 @@ pub mod drtio {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_async_packets(io: &Io, aux_mutex: &Mutex, ddma_mutex: &Mutex, subkernel_mutex: &Mutex,
|
fn process_async_packets(io: &Io, ddma_mutex: &Mutex, subkernel_mutex: &Mutex, linkno: u8,
|
||||||
routing_table: &drtio_routing::RoutingTable, linkno: u8)
|
packet: drtioaux::Packet) -> Option<drtioaux::Packet> {
|
||||||
{
|
// returns None if an async packet has been consumed
|
||||||
if link_has_async_ready(linkno) {
|
match packet {
|
||||||
loop {
|
drtioaux::Packet::DmaPlaybackStatus { id, destination, error, channel, timestamp } => {
|
||||||
let reply = aux_transact(io, aux_mutex, linkno, &drtioaux::Packet::RoutingRetrievePackets);
|
remote_dma::playback_done(io, ddma_mutex, id, destination, error, channel, timestamp);
|
||||||
if let Ok(packet) = reply {
|
None
|
||||||
match packet {
|
},
|
||||||
// packets to be consumed locally
|
drtioaux::Packet::SubkernelFinished { id, with_exception } => {
|
||||||
drtioaux::Packet::DmaPlaybackStatus { id, source, destination: 0, error, channel, timestamp } => {
|
subkernel::subkernel_finished(io, subkernel_mutex, id, with_exception);
|
||||||
remote_dma::playback_done(io, ddma_mutex, id, source, error, channel, timestamp);
|
None
|
||||||
},
|
},
|
||||||
drtioaux::Packet::SubkernelFinished { id, destination: 0, with_exception, exception_src } => {
|
drtioaux::Packet::SubkernelMessage { id, destination: from, status, length, data } => {
|
||||||
subkernel::subkernel_finished(io, subkernel_mutex, id, with_exception, exception_src);
|
subkernel::message_handle_incoming(io, subkernel_mutex, id, status, length as usize, &data);
|
||||||
},
|
// acknowledge receiving part of the message
|
||||||
drtioaux::Packet::SubkernelMessage { id, source: from, destination: 0, status, length, data } => {
|
drtioaux::send(linkno,
|
||||||
subkernel::message_handle_incoming(io, subkernel_mutex, id, status, length as usize, &data);
|
&drtioaux::Packet::SubkernelMessageAck { destination: from }
|
||||||
// acknowledge receiving part of the message
|
).unwrap();
|
||||||
drtioaux::send(linkno,
|
None
|
||||||
&drtioaux::Packet::SubkernelMessageAck { destination: from }
|
}
|
||||||
).unwrap();
|
other => Some(other)
|
||||||
// give the satellite some time to process the message
|
|
||||||
io.sleep(10).unwrap();
|
|
||||||
},
|
|
||||||
// routable packets
|
|
||||||
drtioaux::Packet::DmaAddTraceRequest { destination, .. } |
|
|
||||||
drtioaux::Packet::DmaAddTraceReply { destination, .. } |
|
|
||||||
drtioaux::Packet::DmaRemoveTraceRequest { destination, .. } |
|
|
||||||
drtioaux::Packet::DmaRemoveTraceReply { destination, .. } |
|
|
||||||
drtioaux::Packet::DmaPlaybackRequest { destination, .. } |
|
|
||||||
drtioaux::Packet::DmaPlaybackReply { destination, .. } |
|
|
||||||
drtioaux::Packet::SubkernelLoadRunRequest { destination, .. } |
|
|
||||||
drtioaux::Packet::SubkernelLoadRunReply { destination, .. } |
|
|
||||||
drtioaux::Packet::SubkernelMessage { destination, .. } |
|
|
||||||
drtioaux::Packet::SubkernelMessageAck { destination, .. } |
|
|
||||||
drtioaux::Packet::DmaPlaybackStatus { destination, .. } |
|
|
||||||
drtioaux::Packet::SubkernelFinished { destination, .. } => {
|
|
||||||
let dest_link = routing_table.0[destination as usize][0] - 1;
|
|
||||||
if dest_link == linkno {
|
|
||||||
warn!("[LINK#{}] Re-routed packet would return to the same link, dropping: {:?}", linkno, packet);
|
|
||||||
} else if destination == 0 {
|
|
||||||
warn!("[LINK#{}] Received invalid routable packet: {:?}", linkno, packet)
|
|
||||||
} else {
|
|
||||||
drtioaux::send(dest_link, &packet).unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
drtioaux::Packet::RoutingNoPackets => break,
|
|
||||||
|
|
||||||
other => warn!("[LINK#{}] Received an unroutable packet: {:?}", linkno, other)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
warn!("[LINK#{}] Error handling async packets ({})", linkno, reply.unwrap_err());
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -268,10 +223,14 @@ pub mod drtio {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_unsolicited_aux(io: &Io, aux_mutex: &Mutex, linkno: u8) {
|
fn process_unsolicited_aux(io: &Io, aux_mutex: &Mutex, ddma_mutex: &Mutex, subkernel_mutex: &Mutex, linkno: u8) {
|
||||||
let _lock = aux_mutex.lock(io).unwrap();
|
let _lock = aux_mutex.lock(io).unwrap();
|
||||||
match drtioaux::recv(linkno) {
|
match drtioaux::recv(linkno) {
|
||||||
Ok(Some(packet)) => warn!("[LINK#{}] unsolicited aux packet: {:?}", linkno, packet),
|
Ok(Some(packet)) => {
|
||||||
|
if let Some(packet) = process_async_packets(io, ddma_mutex, subkernel_mutex, linkno, packet) {
|
||||||
|
warn!("[LINK#{}] unsolicited aux packet: {:?}", linkno, packet);
|
||||||
|
}
|
||||||
|
}
|
||||||
Ok(None) => (),
|
Ok(None) => (),
|
||||||
Err(_) => warn!("[LINK#{}] aux packet error", linkno)
|
Err(_) => warn!("[LINK#{}] aux packet error", linkno)
|
||||||
}
|
}
|
||||||
|
@ -334,35 +293,44 @@ pub mod drtio {
|
||||||
let linkno = hop - 1;
|
let linkno = hop - 1;
|
||||||
if destination_up(up_destinations, destination) {
|
if destination_up(up_destinations, destination) {
|
||||||
if up_links[linkno as usize] {
|
if up_links[linkno as usize] {
|
||||||
let reply = aux_transact(io, aux_mutex, linkno,
|
loop {
|
||||||
&drtioaux::Packet::DestinationStatusRequest {
|
let reply = aux_transact(io, aux_mutex, linkno,
|
||||||
destination: destination
|
&drtioaux::Packet::DestinationStatusRequest {
|
||||||
});
|
destination: destination
|
||||||
if let Ok(reply) = reply {
|
});
|
||||||
match reply {
|
if let Ok(reply) = reply {
|
||||||
drtioaux::Packet::DestinationDownReply => {
|
let reply = process_async_packets(io, ddma_mutex, subkernel_mutex, linkno, reply);
|
||||||
destination_set_up(routing_table, up_destinations, destination, false);
|
match reply {
|
||||||
remote_dma::destination_changed(io, aux_mutex, ddma_mutex, routing_table, destination, false);
|
Some(drtioaux::Packet::DestinationDownReply) => {
|
||||||
subkernel::destination_changed(io, aux_mutex, subkernel_mutex, routing_table, destination, false);
|
destination_set_up(routing_table, up_destinations, destination, false);
|
||||||
|
remote_dma::destination_changed(io, aux_mutex, ddma_mutex, routing_table, destination, false);
|
||||||
|
subkernel::destination_changed(io, aux_mutex, subkernel_mutex, routing_table, destination, false);
|
||||||
|
}
|
||||||
|
Some(drtioaux::Packet::DestinationOkReply) => (),
|
||||||
|
Some(drtioaux::Packet::DestinationSequenceErrorReply { channel }) => {
|
||||||
|
error!("[DEST#{}] RTIO sequence error involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
|
||||||
|
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_SEQUENCE_ERROR };
|
||||||
|
}
|
||||||
|
Some(drtioaux::Packet::DestinationCollisionReply { channel }) => {
|
||||||
|
error!("[DEST#{}] RTIO collision involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
|
||||||
|
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_COLLISION };
|
||||||
|
}
|
||||||
|
Some(drtioaux::Packet::DestinationBusyReply { channel }) => {
|
||||||
|
error!("[DEST#{}] RTIO busy error involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
|
||||||
|
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_BUSY };
|
||||||
|
}
|
||||||
|
Some(packet) => error!("[DEST#{}] received unexpected aux packet: {:?}", destination, packet),
|
||||||
|
None => {
|
||||||
|
// continue asking until we get Destination...Reply or error out
|
||||||
|
// wait a bit not to overwhelm the receiver causing gateway errors
|
||||||
|
io.sleep(10).unwrap();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
drtioaux::Packet::DestinationOkReply => (),
|
} else {
|
||||||
drtioaux::Packet::DestinationSequenceErrorReply { channel } => {
|
error!("[DEST#{}] communication failed ({:?})", destination, reply.unwrap_err());
|
||||||
error!("[DEST#{}] RTIO sequence error involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
|
|
||||||
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_SEQUENCE_ERROR };
|
|
||||||
}
|
|
||||||
drtioaux::Packet::DestinationCollisionReply { channel } => {
|
|
||||||
error!("[DEST#{}] RTIO collision involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
|
|
||||||
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_COLLISION };
|
|
||||||
}
|
|
||||||
drtioaux::Packet::DestinationBusyReply { channel } => {
|
|
||||||
error!("[DEST#{}] RTIO busy error involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
|
|
||||||
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_BUSY };
|
|
||||||
}
|
|
||||||
packet => error!("[DEST#{}] received unexpected aux packet: {:?}", destination, packet),
|
|
||||||
|
|
||||||
}
|
}
|
||||||
} else {
|
break;
|
||||||
error!("[DEST#{}] communication failed ({:?})", destination, reply.unwrap_err());
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
destination_set_up(routing_table, up_destinations, destination, false);
|
destination_set_up(routing_table, up_destinations, destination, false);
|
||||||
|
@ -403,8 +371,7 @@ pub mod drtio {
|
||||||
if up_links[linkno as usize] {
|
if up_links[linkno as usize] {
|
||||||
/* link was previously up */
|
/* link was previously up */
|
||||||
if link_rx_up(linkno) {
|
if link_rx_up(linkno) {
|
||||||
process_async_packets(&io, aux_mutex, ddma_mutex, subkernel_mutex, routing_table, linkno);
|
process_unsolicited_aux(&io, aux_mutex, ddma_mutex, subkernel_mutex, linkno);
|
||||||
process_unsolicited_aux(&io, aux_mutex, linkno);
|
|
||||||
process_local_errors(linkno);
|
process_local_errors(linkno);
|
||||||
} else {
|
} else {
|
||||||
info!("[LINK#{}] link is down", linkno);
|
info!("[LINK#{}] link is down", linkno);
|
||||||
|
@ -489,10 +456,10 @@ pub mod drtio {
|
||||||
partition_data(trace, |slice, status, len: usize| {
|
partition_data(trace, |slice, status, len: usize| {
|
||||||
let reply = aux_transact(io, aux_mutex, linkno,
|
let reply = aux_transact(io, aux_mutex, linkno,
|
||||||
&drtioaux::Packet::DmaAddTraceRequest {
|
&drtioaux::Packet::DmaAddTraceRequest {
|
||||||
id: id, source: 0, destination: destination, status: status, length: len as u16, trace: *slice})?;
|
id: id, destination: destination, status: status, length: len as u16, trace: *slice})?;
|
||||||
match reply {
|
match reply {
|
||||||
drtioaux::Packet::DmaAddTraceReply { destination: 0, succeeded: true, .. } => Ok(()),
|
drtioaux::Packet::DmaAddTraceReply { succeeded: true } => Ok(()),
|
||||||
drtioaux::Packet::DmaAddTraceReply { destination: 0, succeeded: false, .. } => Err(Error::DmaAddTraceFail(destination)),
|
drtioaux::Packet::DmaAddTraceReply { succeeded: false } => Err(Error::DmaAddTraceFail(destination)),
|
||||||
packet => Err(Error::UnexpectedPacket(packet)),
|
packet => Err(Error::UnexpectedPacket(packet)),
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
@ -502,10 +469,10 @@ pub mod drtio {
|
||||||
id: u32, destination: u8) -> Result<(), Error> {
|
id: u32, destination: u8) -> Result<(), Error> {
|
||||||
let linkno = routing_table.0[destination as usize][0] - 1;
|
let linkno = routing_table.0[destination as usize][0] - 1;
|
||||||
let reply = aux_transact(io, aux_mutex, linkno,
|
let reply = aux_transact(io, aux_mutex, linkno,
|
||||||
&drtioaux::Packet::DmaRemoveTraceRequest { id: id, source: 0, destination: destination })?;
|
&drtioaux::Packet::DmaRemoveTraceRequest { id: id, destination: destination })?;
|
||||||
match reply {
|
match reply {
|
||||||
drtioaux::Packet::DmaRemoveTraceReply { destination: 0, succeeded: true } => Ok(()),
|
drtioaux::Packet::DmaRemoveTraceReply { succeeded: true } => Ok(()),
|
||||||
drtioaux::Packet::DmaRemoveTraceReply { destination: 0, succeeded: false } => Err(Error::DmaEraseFail(destination)),
|
drtioaux::Packet::DmaRemoveTraceReply { succeeded: false } => Err(Error::DmaEraseFail(destination)),
|
||||||
packet => Err(Error::UnexpectedPacket(packet)),
|
packet => Err(Error::UnexpectedPacket(packet)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -514,10 +481,10 @@ pub mod drtio {
|
||||||
id: u32, destination: u8, timestamp: u64) -> Result<(), Error> {
|
id: u32, destination: u8, timestamp: u64) -> Result<(), Error> {
|
||||||
let linkno = routing_table.0[destination as usize][0] - 1;
|
let linkno = routing_table.0[destination as usize][0] - 1;
|
||||||
let reply = aux_transact(io, aux_mutex, linkno,
|
let reply = aux_transact(io, aux_mutex, linkno,
|
||||||
&drtioaux::Packet::DmaPlaybackRequest{ id: id, source: 0, destination: destination, timestamp: timestamp })?;
|
&drtioaux::Packet::DmaPlaybackRequest{ id: id, destination: destination, timestamp: timestamp })?;
|
||||||
match reply {
|
match reply {
|
||||||
drtioaux::Packet::DmaPlaybackReply { destination: 0, succeeded: true } => Ok(()),
|
drtioaux::Packet::DmaPlaybackReply { succeeded: true } => Ok(()),
|
||||||
drtioaux::Packet::DmaPlaybackReply { destination: 0, succeeded: false } =>
|
drtioaux::Packet::DmaPlaybackReply { succeeded: false } =>
|
||||||
Err(Error::DmaPlaybackFail(destination)),
|
Err(Error::DmaPlaybackFail(destination)),
|
||||||
packet => Err(Error::UnexpectedPacket(packet)),
|
packet => Err(Error::UnexpectedPacket(packet)),
|
||||||
}
|
}
|
||||||
|
@ -592,10 +559,10 @@ pub mod drtio {
|
||||||
id: u32, destination: u8, run: bool) -> Result<(), Error> {
|
id: u32, destination: u8, run: bool) -> Result<(), Error> {
|
||||||
let linkno = routing_table.0[destination as usize][0] - 1;
|
let linkno = routing_table.0[destination as usize][0] - 1;
|
||||||
let reply = aux_transact(io, aux_mutex, linkno,
|
let reply = aux_transact(io, aux_mutex, linkno,
|
||||||
&drtioaux::Packet::SubkernelLoadRunRequest{ id: id, source: 0, destination: destination, run: run })?;
|
&drtioaux::Packet::SubkernelLoadRunRequest{ id: id, destination: destination, run: run })?;
|
||||||
match reply {
|
match reply {
|
||||||
drtioaux::Packet::SubkernelLoadRunReply { destination: 0, succeeded: true } => Ok(()),
|
drtioaux::Packet::SubkernelLoadRunReply { succeeded: true } => Ok(()),
|
||||||
drtioaux::Packet::SubkernelLoadRunReply { destination: 0, succeeded: false } =>
|
drtioaux::Packet::SubkernelLoadRunReply { succeeded: false } =>
|
||||||
Err(Error::SubkernelRunFail(destination)),
|
Err(Error::SubkernelRunFail(destination)),
|
||||||
packet => Err(Error::UnexpectedPacket(packet)),
|
packet => Err(Error::UnexpectedPacket(packet)),
|
||||||
}
|
}
|
||||||
|
@ -628,8 +595,7 @@ pub mod drtio {
|
||||||
partition_data(message, |slice, status, len: usize| {
|
partition_data(message, |slice, status, len: usize| {
|
||||||
let reply = aux_transact(io, aux_mutex, linkno,
|
let reply = aux_transact(io, aux_mutex, linkno,
|
||||||
&drtioaux::Packet::SubkernelMessage {
|
&drtioaux::Packet::SubkernelMessage {
|
||||||
source: 0, destination: destination,
|
destination: destination, id: id, status: status, length: len as u16, data: *slice})?;
|
||||||
id: id, status: status, length: len as u16, data: *slice})?;
|
|
||||||
match reply {
|
match reply {
|
||||||
drtioaux::Packet::SubkernelMessageAck { .. } => Ok(()),
|
drtioaux::Packet::SubkernelMessageAck { .. } => Ok(()),
|
||||||
packet => Err(Error::UnexpectedPacket(packet)),
|
packet => Err(Error::UnexpectedPacket(packet)),
|
||||||
|
|
|
@ -471,7 +471,6 @@ fn process_host_message(io: &Io, _aux_mutex: &Mutex, _ddma_mutex: &Mutex, _subke
|
||||||
match subkernel::upload(io, _aux_mutex, _subkernel_mutex, _routing_table, _id) {
|
match subkernel::upload(io, _aux_mutex, _subkernel_mutex, _routing_table, _id) {
|
||||||
Ok(_) => host_write(stream, host::Reply::LoadCompleted)?,
|
Ok(_) => host_write(stream, host::Reply::LoadCompleted)?,
|
||||||
Err(error) => {
|
Err(error) => {
|
||||||
subkernel::clear_subkernels(io, _subkernel_mutex)?;
|
|
||||||
let mut description = String::new();
|
let mut description = String::new();
|
||||||
write!(&mut description, "{}", error).unwrap();
|
write!(&mut description, "{}", error).unwrap();
|
||||||
host_write(stream, host::Reply::LoadFailed(&description))?
|
host_write(stream, host::Reply::LoadFailed(&description))?
|
||||||
|
@ -632,8 +631,6 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex,
|
||||||
unsafe { kernel::stop() }
|
unsafe { kernel::stop() }
|
||||||
session.kernel_state = KernelState::Absent;
|
session.kernel_state = KernelState::Absent;
|
||||||
unsafe { session.congress.cache.unborrow() }
|
unsafe { session.congress.cache.unborrow() }
|
||||||
#[cfg(has_drtio)]
|
|
||||||
subkernel::clear_subkernels(io, _subkernel_mutex)?;
|
|
||||||
|
|
||||||
match stream {
|
match stream {
|
||||||
None => return Ok(true),
|
None => return Ok(true),
|
||||||
|
@ -651,8 +648,6 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex,
|
||||||
unsafe { kernel::stop() }
|
unsafe { kernel::stop() }
|
||||||
session.kernel_state = KernelState::Absent;
|
session.kernel_state = KernelState::Absent;
|
||||||
unsafe { session.congress.cache.unborrow() }
|
unsafe { session.congress.cache.unborrow() }
|
||||||
#[cfg(has_drtio)]
|
|
||||||
subkernel::clear_subkernels(io, _subkernel_mutex)?;
|
|
||||||
|
|
||||||
match stream {
|
match stream {
|
||||||
None => {
|
None => {
|
||||||
|
@ -673,7 +668,7 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#[cfg(has_drtio)]
|
#[cfg(has_drtio)]
|
||||||
&kern::SubkernelLoadRunRequest { id, destination: _, run } => {
|
&kern::SubkernelLoadRunRequest { id, run } => {
|
||||||
let succeeded = match subkernel::load(
|
let succeeded = match subkernel::load(
|
||||||
io, aux_mutex, _subkernel_mutex, routing_table, id, run) {
|
io, aux_mutex, _subkernel_mutex, routing_table, id, run) {
|
||||||
Ok(()) => true,
|
Ok(()) => true,
|
||||||
|
@ -704,20 +699,20 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex,
|
||||||
kern_send(io, &kern::SubkernelAwaitFinishReply { status: status })
|
kern_send(io, &kern::SubkernelAwaitFinishReply { status: status })
|
||||||
}
|
}
|
||||||
#[cfg(has_drtio)]
|
#[cfg(has_drtio)]
|
||||||
&kern::SubkernelMsgSend { id, destination, count, tag, data } => {
|
&kern::SubkernelMsgSend { id, count, tag, data } => {
|
||||||
subkernel::message_send(io, aux_mutex, _subkernel_mutex, routing_table, id, destination, count, tag, data)?;
|
subkernel::message_send(io, aux_mutex, _subkernel_mutex, routing_table, id, count, tag, data)?;
|
||||||
kern_acknowledge()
|
kern_acknowledge()
|
||||||
}
|
}
|
||||||
#[cfg(has_drtio)]
|
#[cfg(has_drtio)]
|
||||||
&kern::SubkernelMsgRecvRequest { id, timeout, tags } => {
|
&kern::SubkernelMsgRecvRequest { id, timeout, tags } => {
|
||||||
let message_received = subkernel::message_await(io, _subkernel_mutex, id as u32, timeout);
|
let message_received = subkernel::message_await(io, _subkernel_mutex, id, timeout);
|
||||||
let (status, count) = match message_received {
|
let (status, count) = match message_received {
|
||||||
Ok(ref message) => (kern::SubkernelStatus::NoError, message.count),
|
Ok(ref message) => (kern::SubkernelStatus::NoError, message.count),
|
||||||
Err(SubkernelError::Timeout) => (kern::SubkernelStatus::Timeout, 0),
|
Err(SubkernelError::Timeout) => (kern::SubkernelStatus::Timeout, 0),
|
||||||
Err(SubkernelError::IncorrectState) => (kern::SubkernelStatus::IncorrectState, 0),
|
Err(SubkernelError::IncorrectState) => (kern::SubkernelStatus::IncorrectState, 0),
|
||||||
Err(SubkernelError::SubkernelFinished) => {
|
Err(SubkernelError::SubkernelFinished) => {
|
||||||
let res = subkernel::retrieve_finish_status(io, aux_mutex, _subkernel_mutex,
|
let res = subkernel::retrieve_finish_status(io, aux_mutex, _subkernel_mutex,
|
||||||
routing_table, id as u32)?;
|
routing_table, id)?;
|
||||||
if res.comm_lost {
|
if res.comm_lost {
|
||||||
(kern::SubkernelStatus::CommLost, 0)
|
(kern::SubkernelStatus::CommLost, 0)
|
||||||
} else if let Some(exception) = &res.exception {
|
} else if let Some(exception) = &res.exception {
|
||||||
|
@ -976,13 +971,7 @@ pub fn thread(io: Io, aux_mutex: &Mutex,
|
||||||
drtio::clear_buffers(&io, &aux_mutex);
|
drtio::clear_buffers(&io, &aux_mutex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
loop {
|
stream.close().expect("session: close socket");
|
||||||
match stream.close() {
|
|
||||||
Ok(_) => break,
|
|
||||||
Err(SchedError::Interrupted) => (),
|
|
||||||
Err(e) => panic!("session: close socket: {:?}", e)
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,190 +1,41 @@
|
||||||
use alloc::{vec::Vec, collections::btree_map::BTreeMap, string::String};
|
|
||||||
use core::mem;
|
|
||||||
use board_artiq::{drtioaux, drtio_routing::RoutingTable};
|
|
||||||
use board_misoc::{csr, cache::flush_l2_cache};
|
use board_misoc::{csr, cache::flush_l2_cache};
|
||||||
use proto_artiq::drtioaux_proto::PayloadStatus;
|
use proto_artiq::drtioaux_proto::PayloadStatus;
|
||||||
use routing::{Router, Sliceable};
|
use alloc::{vec::Vec, collections::btree_map::BTreeMap};
|
||||||
use kernel::Manager as KernelManager;
|
use ::{cricon_select, RtioMaster};
|
||||||
use ::{cricon_select, cricon_read, RtioMaster, MASTER_PAYLOAD_MAX_SIZE};
|
|
||||||
|
|
||||||
const ALIGNMENT: usize = 64;
|
const ALIGNMENT: usize = 64;
|
||||||
|
|
||||||
#[derive(PartialEq)]
|
#[derive(Debug, PartialEq)]
|
||||||
enum ManagerState {
|
enum ManagerState {
|
||||||
Idle,
|
Idle,
|
||||||
Playback
|
Playback
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct RtioStatus {
|
pub struct RtioStatus {
|
||||||
pub source: u8,
|
|
||||||
pub id: u32,
|
pub id: u32,
|
||||||
pub error: u8,
|
pub error: u8,
|
||||||
pub channel: u32,
|
pub channel: u32,
|
||||||
pub timestamp: u64
|
pub timestamp: u64
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
IdNotFound,
|
IdNotFound,
|
||||||
PlaybackInProgress,
|
PlaybackInProgress,
|
||||||
EntryNotComplete,
|
EntryNotComplete
|
||||||
MasterDmaFound,
|
|
||||||
UploadFail,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
struct Entry {
|
struct Entry {
|
||||||
trace: Vec<u8>,
|
trace: Vec<u8>,
|
||||||
padding_len: usize,
|
padding_len: usize,
|
||||||
complete: bool,
|
complete: bool
|
||||||
duration: u64, // relevant for locally ran DMA
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Entry {
|
|
||||||
pub fn from_vec(data: Vec<u8>, duration: u64) -> Entry {
|
|
||||||
let mut entry = Entry {
|
|
||||||
trace: data,
|
|
||||||
padding_len: 0,
|
|
||||||
complete: true,
|
|
||||||
duration: duration,
|
|
||||||
};
|
|
||||||
entry.realign();
|
|
||||||
entry
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn id(&self) -> u32 {
|
|
||||||
self.trace[self.padding_len..].as_ptr() as u32
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn realign(&mut self) {
|
|
||||||
self.trace.push(0);
|
|
||||||
let data_len = self.trace.len();
|
|
||||||
|
|
||||||
self.trace.reserve(ALIGNMENT - 1);
|
|
||||||
let padding = ALIGNMENT - self.trace.as_ptr() as usize % ALIGNMENT;
|
|
||||||
let padding = if padding == ALIGNMENT { 0 } else { padding };
|
|
||||||
for _ in 0..padding {
|
|
||||||
// Vec guarantees that this will not reallocate
|
|
||||||
self.trace.push(0)
|
|
||||||
}
|
|
||||||
for i in 1..data_len + 1 {
|
|
||||||
self.trace[data_len + padding - i] = self.trace[data_len - i]
|
|
||||||
}
|
|
||||||
self.complete = true;
|
|
||||||
self.padding_len = padding;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
enum RemoteTraceState {
|
|
||||||
Unsent,
|
|
||||||
Sending(usize),
|
|
||||||
Ready,
|
|
||||||
Running(usize),
|
|
||||||
}
|
|
||||||
|
|
||||||
struct RemoteTraces {
|
|
||||||
remote_traces: BTreeMap<u8, Sliceable>,
|
|
||||||
state: RemoteTraceState,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl RemoteTraces {
|
|
||||||
pub fn new(traces: BTreeMap<u8, Sliceable>) -> RemoteTraces {
|
|
||||||
RemoteTraces {
|
|
||||||
remote_traces: traces,
|
|
||||||
state: RemoteTraceState::Unsent
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// on subkernel request
|
|
||||||
pub fn upload_traces(&mut self, id: u32, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) -> usize {
|
|
||||||
let len = self.remote_traces.len();
|
|
||||||
if len > 0 {
|
|
||||||
self.state = RemoteTraceState::Sending(self.remote_traces.len());
|
|
||||||
for (dest, trace) in self.remote_traces.iter_mut() {
|
|
||||||
// queue up the first packet for all destinations, rest will be sent after first ACK
|
|
||||||
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
|
||||||
let meta = trace.get_slice_master(&mut data_slice);
|
|
||||||
router.route(drtioaux::Packet::DmaAddTraceRequest {
|
|
||||||
source: self_destination, destination: *dest, id: id,
|
|
||||||
status: meta.status, length: meta.len, trace: data_slice
|
|
||||||
}, routing_table, rank, self_destination);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
len
|
|
||||||
}
|
|
||||||
|
|
||||||
// on incoming Packet::DmaAddTraceReply
|
|
||||||
pub fn ack_upload(&mut self, kernel_manager: &mut KernelManager, source: u8, id: u32, succeeded: bool, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
|
|
||||||
if let RemoteTraceState::Sending(count) = self.state {
|
|
||||||
if let Some(trace) = self.remote_traces.get_mut(&source) {
|
|
||||||
if trace.at_end() {
|
|
||||||
if count - 1 == 0 {
|
|
||||||
self.state = RemoteTraceState::Ready;
|
|
||||||
kernel_manager.ddma_remote_uploaded(succeeded);
|
|
||||||
} else {
|
|
||||||
self.state = RemoteTraceState::Sending(count - 1);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// send next slice
|
|
||||||
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
|
||||||
let meta = trace.get_slice_master(&mut data_slice);
|
|
||||||
router.route(drtioaux::Packet::DmaAddTraceRequest {
|
|
||||||
source: self_destination, destination: meta.destination, id: id,
|
|
||||||
status: meta.status, length: meta.len, trace: data_slice
|
|
||||||
}, routing_table, rank, self_destination);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// on subkernel request
|
|
||||||
pub fn playback(&mut self, id: u32, timestamp: u64, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
|
|
||||||
// route all the playback requests
|
|
||||||
// remote traces + local trace
|
|
||||||
self.state = RemoteTraceState::Running(self.remote_traces.len() + 1);
|
|
||||||
for (dest, _) in self.remote_traces.iter() {
|
|
||||||
router.route(drtioaux::Packet::DmaPlaybackRequest {
|
|
||||||
source: self_destination, destination: *dest, id: id, timestamp: timestamp
|
|
||||||
}, routing_table, rank, self_destination);
|
|
||||||
// response will be ignored (succeeded = false handled by the main thread)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// on incoming Packet::DmaPlaybackDone
|
|
||||||
pub fn remote_finished(&mut self, kernel_manager: &mut KernelManager, error: u8, channel: u32, timestamp: u64) {
|
|
||||||
if let RemoteTraceState::Running(count) = self.state {
|
|
||||||
if error != 0 || count - 1 == 0 {
|
|
||||||
// notify the kernel about a DDMA error or finish
|
|
||||||
kernel_manager.ddma_finished(error, channel, timestamp);
|
|
||||||
self.state = RemoteTraceState::Ready;
|
|
||||||
// further messages will be ignored (if there was an error)
|
|
||||||
} else { // no error and not the last one awaited
|
|
||||||
self.state = RemoteTraceState::Running(count - 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn erase(&mut self, id: u32, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
|
|
||||||
for (dest, _) in self.remote_traces.iter() {
|
|
||||||
router.route(drtioaux::Packet::DmaRemoveTraceRequest {
|
|
||||||
source: self_destination, destination: *dest, id: id
|
|
||||||
}, routing_table, rank, self_destination);
|
|
||||||
// response will be ignored as this object will stop existing too
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Manager {
|
pub struct Manager {
|
||||||
entries: BTreeMap<(u8, u32), Entry>,
|
entries: BTreeMap<u32, Entry>,
|
||||||
state: ManagerState,
|
state: ManagerState,
|
||||||
current_id: u32,
|
currentid: u32
|
||||||
current_source: u8,
|
|
||||||
previous_cri_master: RtioMaster,
|
|
||||||
|
|
||||||
remote_entries: BTreeMap<u32, RemoteTraces>,
|
|
||||||
name_map: BTreeMap<String, u32>,
|
|
||||||
recording_trace: Vec<u8>,
|
|
||||||
recording_name: String
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Manager {
|
impl Manager {
|
||||||
|
@ -196,197 +47,74 @@ impl Manager {
|
||||||
}
|
}
|
||||||
Manager {
|
Manager {
|
||||||
entries: BTreeMap::new(),
|
entries: BTreeMap::new(),
|
||||||
current_id: 0,
|
currentid: 0,
|
||||||
current_source: 0,
|
|
||||||
previous_cri_master: RtioMaster::Drtio,
|
|
||||||
state: ManagerState::Idle,
|
state: ManagerState::Idle,
|
||||||
remote_entries: BTreeMap::new(),
|
|
||||||
name_map: BTreeMap::new(),
|
|
||||||
recording_trace: Vec::new(),
|
|
||||||
recording_name: String::new(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add(&mut self, source: u8, id: u32, status: PayloadStatus, trace: &[u8], trace_len: usize) -> Result<(), Error> {
|
pub fn add(&mut self, id: u32, status: PayloadStatus, trace: &[u8], trace_len: usize) -> Result<(), Error> {
|
||||||
if status.is_first() {
|
if status.is_first() {
|
||||||
self.entries.remove(&(source, id));
|
self.entries.remove(&id);
|
||||||
}
|
}
|
||||||
let entry = match self.entries.get_mut(&(source, id)) {
|
let entry = match self.entries.get_mut(&id) {
|
||||||
Some(entry) => {
|
Some(entry) => {
|
||||||
if entry.complete {
|
if entry.complete {
|
||||||
// replace entry
|
// replace entry
|
||||||
self.entries.remove(&(source, id));
|
self.entries.remove(&id);
|
||||||
self.entries.insert((source, id), Entry {
|
self.entries.insert(id, Entry {
|
||||||
trace: Vec::new(),
|
trace: Vec::new(),
|
||||||
padding_len: 0,
|
padding_len: 0,
|
||||||
complete: false,
|
complete: false });
|
||||||
duration: 0
|
self.entries.get_mut(&id).unwrap()
|
||||||
});
|
|
||||||
self.entries.get_mut(&(source, id)).unwrap()
|
|
||||||
} else {
|
} else {
|
||||||
entry
|
entry
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
self.entries.insert((source, id), Entry {
|
self.entries.insert(id, Entry {
|
||||||
trace: Vec::new(),
|
trace: Vec::new(),
|
||||||
padding_len: 0,
|
padding_len: 0,
|
||||||
complete: false,
|
complete: false });
|
||||||
duration: 0,
|
self.entries.get_mut(&id).unwrap()
|
||||||
});
|
|
||||||
self.entries.get_mut(&(source, id)).unwrap()
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
entry.trace.extend(&trace[0..trace_len]);
|
entry.trace.extend(&trace[0..trace_len]);
|
||||||
|
|
||||||
if status.is_last() {
|
if status.is_last() {
|
||||||
entry.realign();
|
entry.trace.push(0);
|
||||||
|
let data_len = entry.trace.len();
|
||||||
|
|
||||||
|
// Realign.
|
||||||
|
entry.trace.reserve(ALIGNMENT - 1);
|
||||||
|
let padding = ALIGNMENT - entry.trace.as_ptr() as usize % ALIGNMENT;
|
||||||
|
let padding = if padding == ALIGNMENT { 0 } else { padding };
|
||||||
|
for _ in 0..padding {
|
||||||
|
// Vec guarantees that this will not reallocate
|
||||||
|
entry.trace.push(0)
|
||||||
|
}
|
||||||
|
for i in 1..data_len + 1 {
|
||||||
|
entry.trace[data_len + padding - i] = entry.trace[data_len - i]
|
||||||
|
}
|
||||||
|
entry.complete = true;
|
||||||
|
entry.padding_len = padding;
|
||||||
flush_l2_cache();
|
flush_l2_cache();
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
// API for subkernel
|
|
||||||
pub fn record_start(&mut self, name: &str) {
|
|
||||||
self.recording_name = String::from(name);
|
|
||||||
self.recording_trace = Vec::new();
|
|
||||||
}
|
|
||||||
|
|
||||||
// API for subkernel
|
pub fn erase(&mut self, id: u32) -> Result<(), Error> {
|
||||||
pub fn record_append(&mut self, data: &[u8]) {
|
match self.entries.remove(&id) {
|
||||||
self.recording_trace.extend_from_slice(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
// API for subkernel
|
|
||||||
pub fn record_stop(&mut self, duration: u64, self_destination: u8) -> Result<u32, Error> {
|
|
||||||
let mut trace = Vec::new();
|
|
||||||
mem::swap(&mut self.recording_trace, &mut trace);
|
|
||||||
trace.push(0);
|
|
||||||
let mut local_trace = Vec::new();
|
|
||||||
let mut remote_traces: BTreeMap<u8, Sliceable> = BTreeMap::new();
|
|
||||||
// analyze each entry and put in proper buckets, as the kernel core
|
|
||||||
// sends whole chunks, to limit comms/kernel CPU communication,
|
|
||||||
// and as only comms core has access to varios DMA buffers.
|
|
||||||
let mut ptr = 0;
|
|
||||||
while trace[ptr] != 0 {
|
|
||||||
// ptr + 3 = tgt >> 24 (destination)
|
|
||||||
let len = trace[ptr] as usize;
|
|
||||||
let destination = trace[ptr+3];
|
|
||||||
if destination == 0 {
|
|
||||||
return Err(Error::MasterDmaFound);
|
|
||||||
} else if destination == self_destination {
|
|
||||||
local_trace.extend(&trace[ptr..ptr+len]);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if let Some(remote_trace) = remote_traces.get_mut(&destination) {
|
|
||||||
remote_trace.extend(&trace[ptr..ptr+len]);
|
|
||||||
} else {
|
|
||||||
remote_traces.insert(destination, Sliceable::new(destination, trace[ptr..ptr+len].to_vec()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// and jump to the next event
|
|
||||||
ptr += len;
|
|
||||||
}
|
|
||||||
let local_entry = Entry::from_vec(local_trace, duration);
|
|
||||||
let id = local_entry.id();
|
|
||||||
|
|
||||||
self.entries.insert((self_destination, id), local_entry);
|
|
||||||
self.remote_entries.insert(id, RemoteTraces::new(remote_traces));
|
|
||||||
let mut name = String::new();
|
|
||||||
mem::swap(&mut self.recording_name, &mut name);
|
|
||||||
self.name_map.insert(name, id);
|
|
||||||
|
|
||||||
flush_l2_cache();
|
|
||||||
|
|
||||||
Ok(id)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn upload_traces(&mut self, id: u32, router: &mut Router, rank: u8, self_destination: u8,
|
|
||||||
routing_table: &RoutingTable) -> Result<usize, Error> {
|
|
||||||
let remote_traces = self.remote_entries.get_mut(&id);
|
|
||||||
let mut len = 0;
|
|
||||||
if let Some(traces) = remote_traces {
|
|
||||||
len = traces.upload_traces(id, router, rank, self_destination, routing_table);
|
|
||||||
}
|
|
||||||
Ok(len)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn with_trace<F, R>(&self, self_destination: u8, name: &str, f: F) -> R
|
|
||||||
where F: FnOnce(Option<&[u8]>, u64) -> R {
|
|
||||||
if let Some(ptr) = self.name_map.get(name) {
|
|
||||||
match self.entries.get(&(self_destination, *ptr)) {
|
|
||||||
Some(entry) => f(Some(&entry.trace[entry.padding_len..]), entry.duration),
|
|
||||||
None => f(None, 0)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
f(None, 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// API for subkernel
|
|
||||||
pub fn playback_remote(&mut self, id: u32, timestamp: u64,
|
|
||||||
router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
if let Some(traces) = self.remote_entries.get_mut(&id) {
|
|
||||||
traces.playback(id, timestamp, router, rank, self_destination, routing_table);
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(Error::IdNotFound)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// API for subkernel
|
|
||||||
pub fn erase_name(&mut self, name: &str, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
|
|
||||||
if let Some(id) = self.name_map.get(name) {
|
|
||||||
if let Some(traces) = self.remote_entries.get_mut(&id) {
|
|
||||||
traces.erase(*id, router, rank, self_destination, routing_table);
|
|
||||||
self.remote_entries.remove(&id);
|
|
||||||
}
|
|
||||||
self.entries.remove(&(self_destination, *id));
|
|
||||||
self.name_map.remove(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// API for incoming DDMA (drtio)
|
|
||||||
pub fn erase(&mut self, source: u8, id: u32) -> Result<(), Error> {
|
|
||||||
match self.entries.remove(&(source, id)) {
|
|
||||||
Some(_) => Ok(()),
|
Some(_) => Ok(()),
|
||||||
None => Err(Error::IdNotFound)
|
None => Err(Error::IdNotFound)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remote_finished(&mut self, kernel_manager: &mut KernelManager,
|
pub fn playback(&mut self, id: u32, timestamp: u64) -> Result<(), Error> {
|
||||||
id: u32, error: u8, channel: u32, timestamp: u64) {
|
|
||||||
if let Some(entry) = self.remote_entries.get_mut(&id) {
|
|
||||||
entry.remote_finished(kernel_manager, error, channel, timestamp);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ack_upload(&mut self, kernel_manager: &mut KernelManager, source: u8, id: u32, succeeded: bool,
|
|
||||||
router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
|
|
||||||
if let Some(entry) = self.remote_entries.get_mut(&id) {
|
|
||||||
entry.ack_upload(kernel_manager, source, id, succeeded, router, rank, self_destination, routing_table);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn cleanup(&mut self, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
|
|
||||||
// after subkernel ends, remove all self-generated traces
|
|
||||||
for (_, id) in self.name_map.iter_mut() {
|
|
||||||
if let Some(traces) = self.remote_entries.get_mut(&id) {
|
|
||||||
traces.erase(*id, router, rank, self_destination, routing_table);
|
|
||||||
self.remote_entries.remove(&id);
|
|
||||||
}
|
|
||||||
self.entries.remove(&(self_destination, *id));
|
|
||||||
}
|
|
||||||
self.name_map.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
// API for both incoming DDMA (drtio) and subkernel
|
|
||||||
pub fn playback(&mut self, source: u8, id: u32, timestamp: u64) -> Result<(), Error> {
|
|
||||||
if self.state != ManagerState::Idle {
|
if self.state != ManagerState::Idle {
|
||||||
return Err(Error::PlaybackInProgress);
|
return Err(Error::PlaybackInProgress);
|
||||||
}
|
}
|
||||||
|
|
||||||
let entry = match self.entries.get(&(source, id)){
|
let entry = match self.entries.get(&id){
|
||||||
Some(entry) => entry,
|
Some(entry) => entry,
|
||||||
None => { return Err(Error::IdNotFound); }
|
None => { return Err(Error::IdNotFound); }
|
||||||
};
|
};
|
||||||
|
@ -397,9 +125,7 @@ impl Manager {
|
||||||
assert!(ptr as u32 % 64 == 0);
|
assert!(ptr as u32 % 64 == 0);
|
||||||
|
|
||||||
self.state = ManagerState::Playback;
|
self.state = ManagerState::Playback;
|
||||||
self.current_id = id;
|
self.currentid = id;
|
||||||
self.current_source = source;
|
|
||||||
self.previous_cri_master = cricon_read();
|
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
csr::rtio_dma::base_address_write(ptr as u64);
|
csr::rtio_dma::base_address_write(ptr as u64);
|
||||||
|
@ -423,16 +149,15 @@ impl Manager {
|
||||||
} else {
|
} else {
|
||||||
self.state = ManagerState::Idle;
|
self.state = ManagerState::Idle;
|
||||||
unsafe {
|
unsafe {
|
||||||
cricon_select(self.previous_cri_master);
|
cricon_select(RtioMaster::Drtio);
|
||||||
let error = csr::rtio_dma::error_read();
|
let error = csr::rtio_dma::error_read();
|
||||||
let channel = csr::rtio_dma::error_channel_read();
|
let channel = csr::rtio_dma::error_channel_read();
|
||||||
let timestamp = csr::rtio_dma::error_timestamp_read();
|
let timestamp = csr::rtio_dma::error_timestamp_read();
|
||||||
if error != 0 {
|
if error != 0 {
|
||||||
csr::rtio_dma::error_write(1);
|
csr::rtio_dma::error_write(1);
|
||||||
}
|
}
|
||||||
return Some(RtioStatus {
|
return Some(RtioStatus {
|
||||||
source: self.current_source,
|
id: self.currentid,
|
||||||
id: self.current_id,
|
|
||||||
error: error,
|
error: error,
|
||||||
channel: channel,
|
channel: channel,
|
||||||
timestamp: timestamp });
|
timestamp: timestamp });
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use core::{mem, option::NoneError};
|
use core::{mem, option::NoneError, cmp::min};
|
||||||
use alloc::{string::String, format, vec::Vec, collections::btree_map::BTreeMap};
|
use alloc::{string::String, format, vec::Vec, collections::{btree_map::BTreeMap, vec_deque::VecDeque}};
|
||||||
use cslice::AsCSlice;
|
use cslice::AsCSlice;
|
||||||
|
|
||||||
use board_artiq::{drtioaux, drtio_routing::RoutingTable, mailbox, spi};
|
use board_artiq::{mailbox, spi};
|
||||||
use board_misoc::{csr, clock, i2c};
|
use board_misoc::{csr, clock, i2c};
|
||||||
use proto_artiq::{
|
use proto_artiq::{
|
||||||
drtioaux_proto::PayloadStatus,
|
drtioaux_proto::PayloadStatus,
|
||||||
kernel_proto as kern,
|
kernel_proto as kern,
|
||||||
session_proto::Reply::KernelException as HostKernelException,
|
session_proto::Reply::KernelException as HostKernelException,
|
||||||
rpc_proto as rpc};
|
rpc_proto as rpc};
|
||||||
|
@ -15,8 +15,6 @@ use kernel::eh_artiq::StackPointerBacktrace;
|
||||||
|
|
||||||
use ::{cricon_select, RtioMaster};
|
use ::{cricon_select, RtioMaster};
|
||||||
use cache::Cache;
|
use cache::Cache;
|
||||||
use dma::{Manager as DmaManager, Error as DmaError};
|
|
||||||
use routing::{Router, Sliceable, SliceMeta};
|
|
||||||
use SAT_PAYLOAD_MAX_SIZE;
|
use SAT_PAYLOAD_MAX_SIZE;
|
||||||
use MASTER_PAYLOAD_MAX_SIZE;
|
use MASTER_PAYLOAD_MAX_SIZE;
|
||||||
|
|
||||||
|
@ -63,12 +61,8 @@ enum KernelState {
|
||||||
Absent,
|
Absent,
|
||||||
Loaded,
|
Loaded,
|
||||||
Running,
|
Running,
|
||||||
MsgAwait { id: u32, max_time: i64, tags: Vec<u8> },
|
MsgAwait { max_time: u64, tags: Vec<u8> },
|
||||||
MsgSending,
|
MsgSending
|
||||||
SubkernelAwaitLoad,
|
|
||||||
SubkernelAwaitFinish { max_time: i64, id: u32 },
|
|
||||||
DmaUploading { max_time: u64 },
|
|
||||||
DmaAwait { max_time: u64 },
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -80,9 +74,7 @@ pub enum Error {
|
||||||
NoMessage,
|
NoMessage,
|
||||||
AwaitingMessage,
|
AwaitingMessage,
|
||||||
SubkernelIoError,
|
SubkernelIoError,
|
||||||
DrtioError,
|
KernelException(Sliceable)
|
||||||
KernelException(Sliceable),
|
|
||||||
DmaError(DmaError),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<NoneError> for Error {
|
impl From<NoneError> for Error {
|
||||||
|
@ -97,25 +89,19 @@ impl From<io::Error<!>> for Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<drtioaux::Error<!>> for Error {
|
|
||||||
fn from(_value: drtioaux::Error<!>) -> Error {
|
|
||||||
Error::DrtioError
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<DmaError> for Error {
|
|
||||||
fn from(value: DmaError) -> Error {
|
|
||||||
Error::DmaError(value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! unexpected {
|
macro_rules! unexpected {
|
||||||
($($arg:tt)*) => (return Err(Error::Unexpected(format!($($arg)*))));
|
($($arg:tt)*) => (return Err(Error::Unexpected(format!($($arg)*))));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* represents data that has to be sent to Master */
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Sliceable {
|
||||||
|
it: usize,
|
||||||
|
data: Vec<u8>
|
||||||
|
}
|
||||||
|
|
||||||
/* represents interkernel messages */
|
/* represents interkernel messages */
|
||||||
struct Message {
|
struct Message {
|
||||||
id: u32,
|
|
||||||
count: u8,
|
count: u8,
|
||||||
data: Vec<u8>
|
data: Vec<u8>
|
||||||
}
|
}
|
||||||
|
@ -123,6 +109,7 @@ struct Message {
|
||||||
#[derive(PartialEq)]
|
#[derive(PartialEq)]
|
||||||
enum OutMessageState {
|
enum OutMessageState {
|
||||||
NoMessage,
|
NoMessage,
|
||||||
|
MessageReady,
|
||||||
MessageBeingSent,
|
MessageBeingSent,
|
||||||
MessageSent,
|
MessageSent,
|
||||||
MessageAcknowledged
|
MessageAcknowledged
|
||||||
|
@ -132,7 +119,7 @@ enum OutMessageState {
|
||||||
struct MessageManager {
|
struct MessageManager {
|
||||||
out_message: Option<Sliceable>,
|
out_message: Option<Sliceable>,
|
||||||
out_state: OutMessageState,
|
out_state: OutMessageState,
|
||||||
in_queue: Vec<Message>,
|
in_queue: VecDeque<Message>,
|
||||||
in_buffer: Option<Message>,
|
in_buffer: Option<Message>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,9 +128,7 @@ struct Session {
|
||||||
kernel_state: KernelState,
|
kernel_state: KernelState,
|
||||||
log_buffer: String,
|
log_buffer: String,
|
||||||
last_exception: Option<Sliceable>,
|
last_exception: Option<Sliceable>,
|
||||||
source: u8, // which destination requested running the kernel
|
messages: MessageManager
|
||||||
messages: MessageManager,
|
|
||||||
subkernels_finished: Vec<u32> // ids of subkernels finished
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -162,9 +147,42 @@ pub struct Manager {
|
||||||
|
|
||||||
pub struct SubkernelFinished {
|
pub struct SubkernelFinished {
|
||||||
pub id: u32,
|
pub id: u32,
|
||||||
pub with_exception: bool,
|
pub with_exception: bool
|
||||||
pub exception_source: u8,
|
}
|
||||||
pub source: u8
|
|
||||||
|
pub struct SliceMeta {
|
||||||
|
pub len: u16,
|
||||||
|
pub status: PayloadStatus
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! get_slice_fn {
|
||||||
|
( $name:tt, $size:expr ) => {
|
||||||
|
pub fn $name(&mut self, data_slice: &mut [u8; $size]) -> SliceMeta {
|
||||||
|
let first = self.it == 0;
|
||||||
|
let len = min($size, self.data.len() - self.it);
|
||||||
|
let last = self.it + len == self.data.len();
|
||||||
|
let status = PayloadStatus::from_status(first, last);
|
||||||
|
data_slice[..len].clone_from_slice(&self.data[self.it..self.it+len]);
|
||||||
|
self.it += len;
|
||||||
|
|
||||||
|
SliceMeta {
|
||||||
|
len: len as u16,
|
||||||
|
status: status
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Sliceable {
|
||||||
|
pub fn new(data: Vec<u8>) -> Sliceable {
|
||||||
|
Sliceable {
|
||||||
|
it: 0,
|
||||||
|
data: data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
get_slice_fn!(get_slice_sat, SAT_PAYLOAD_MAX_SIZE);
|
||||||
|
get_slice_fn!(get_slice_master, MASTER_PAYLOAD_MAX_SIZE);
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MessageManager {
|
impl MessageManager {
|
||||||
|
@ -172,12 +190,12 @@ impl MessageManager {
|
||||||
MessageManager {
|
MessageManager {
|
||||||
out_message: None,
|
out_message: None,
|
||||||
out_state: OutMessageState::NoMessage,
|
out_state: OutMessageState::NoMessage,
|
||||||
in_queue: Vec::new(),
|
in_queue: VecDeque::new(),
|
||||||
in_buffer: None
|
in_buffer: None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn handle_incoming(&mut self, status: PayloadStatus, length: usize, id: u32, data: &[u8; MASTER_PAYLOAD_MAX_SIZE]) {
|
pub fn handle_incoming(&mut self, status: PayloadStatus, length: usize, data: &[u8; MASTER_PAYLOAD_MAX_SIZE]) {
|
||||||
// called when receiving a message from master
|
// called when receiving a message from master
|
||||||
if status.is_first() {
|
if status.is_first() {
|
||||||
// clear the buffer for first message
|
// clear the buffer for first message
|
||||||
|
@ -187,7 +205,6 @@ impl MessageManager {
|
||||||
Some(message) => message.data.extend(&data[..length]),
|
Some(message) => message.data.extend(&data[..length]),
|
||||||
None => {
|
None => {
|
||||||
self.in_buffer = Some(Message {
|
self.in_buffer = Some(Message {
|
||||||
id: id,
|
|
||||||
count: data[0],
|
count: data[0],
|
||||||
data: data[1..length].to_vec()
|
data: data[1..length].to_vec()
|
||||||
});
|
});
|
||||||
|
@ -195,7 +212,18 @@ impl MessageManager {
|
||||||
};
|
};
|
||||||
if status.is_last() {
|
if status.is_last() {
|
||||||
// when done, remove from working queue
|
// when done, remove from working queue
|
||||||
self.in_queue.push(self.in_buffer.take().unwrap());
|
self.in_queue.push_back(self.in_buffer.take().unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_outgoing_ready(&mut self) -> bool {
|
||||||
|
// called by main loop, to see if there's anything to send, will send it afterwards
|
||||||
|
match self.out_state {
|
||||||
|
OutMessageState::MessageReady => {
|
||||||
|
self.out_state = OutMessageState::MessageBeingSent;
|
||||||
|
true
|
||||||
|
},
|
||||||
|
_ => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -238,42 +266,19 @@ impl MessageManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn accept_outgoing(&mut self, id: u32, self_destination: u8, destination: u8,
|
pub fn accept_outgoing(&mut self, count: u8, tag: &[u8], data: *const *const ()) -> Result<(), Error> {
|
||||||
count: u8, tag: &[u8], data: *const *const (),
|
|
||||||
routing_table: &RoutingTable, rank: u8, router: &mut Router
|
|
||||||
) -> Result<(), Error> {
|
|
||||||
let mut writer = Cursor::new(Vec::new());
|
let mut writer = Cursor::new(Vec::new());
|
||||||
rpc::send_args(&mut writer, 0, tag, data, false)?;
|
rpc::send_args(&mut writer, 0, tag, data, false)?;
|
||||||
// skip service tag, but write the count
|
// skip service tag, but write the count
|
||||||
let mut data = writer.into_inner().split_off(3);
|
let mut data = writer.into_inner().split_off(3);
|
||||||
data[0] = count;
|
data[0] = count;
|
||||||
self.out_message = Some(Sliceable::new(destination, data));
|
self.out_message = Some(Sliceable::new(data));
|
||||||
|
self.out_state = OutMessageState::MessageReady;
|
||||||
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
|
||||||
self.out_state = OutMessageState::MessageBeingSent;
|
|
||||||
let meta = self.get_outgoing_slice(&mut data_slice).unwrap();
|
|
||||||
router.route(drtioaux::Packet::SubkernelMessage {
|
|
||||||
source: self_destination, destination: destination, id: id,
|
|
||||||
status: meta.status, length: meta.len as u16, data: data_slice
|
|
||||||
}, routing_table, rank, self_destination);
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_incoming(&mut self, id: u32) -> Option<Message> {
|
pub fn get_incoming(&mut self) -> Option<Message> {
|
||||||
for i in 0..self.in_queue.len() {
|
self.in_queue.pop_front()
|
||||||
if self.in_queue[i].id == id {
|
|
||||||
return Some(self.in_queue.remove(i));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn pending_ids(&self) -> Vec<u32> {
|
|
||||||
let mut pending_ids: Vec<u32> = Vec::new();
|
|
||||||
for msg in self.in_queue.iter() {
|
|
||||||
pending_ids.push(msg.id);
|
|
||||||
}
|
|
||||||
pending_ids
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -283,16 +288,15 @@ impl Session {
|
||||||
kernel_state: KernelState::Absent,
|
kernel_state: KernelState::Absent,
|
||||||
log_buffer: String::new(),
|
log_buffer: String::new(),
|
||||||
last_exception: None,
|
last_exception: None,
|
||||||
source: 0,
|
messages: MessageManager::new()
|
||||||
messages: MessageManager::new(),
|
|
||||||
subkernels_finished: Vec::new()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn running(&self) -> bool {
|
fn running(&self) -> bool {
|
||||||
match self.kernel_state {
|
match self.kernel_state {
|
||||||
KernelState::Absent | KernelState::Loaded => false,
|
KernelState::Absent | KernelState::Loaded => false,
|
||||||
_ => true
|
KernelState::Running | KernelState::MsgAwait { .. } |
|
||||||
|
KernelState::MsgSending => true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -365,24 +369,23 @@ impl Manager {
|
||||||
unsafe { self.cache.unborrow() }
|
unsafe { self.cache.unborrow() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run(&mut self, source: u8, id: u32) -> Result<(), Error> {
|
pub fn run(&mut self, id: u32) -> Result<(), Error> {
|
||||||
info!("starting subkernel #{}", id);
|
info!("starting subkernel #{}", id);
|
||||||
if self.session.kernel_state != KernelState::Loaded
|
if self.session.kernel_state != KernelState::Loaded
|
||||||
|| self.current_id != id {
|
|| self.current_id != id {
|
||||||
self.load(id)?;
|
self.load(id)?;
|
||||||
}
|
}
|
||||||
self.session.source = source;
|
|
||||||
self.session.kernel_state = KernelState::Running;
|
self.session.kernel_state = KernelState::Running;
|
||||||
cricon_select(RtioMaster::Kernel);
|
cricon_select(RtioMaster::Kernel);
|
||||||
|
|
||||||
kern_acknowledge()
|
kern_acknowledge()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn message_handle_incoming(&mut self, status: PayloadStatus, length: usize, id: u32, slice: &[u8; MASTER_PAYLOAD_MAX_SIZE]) {
|
pub fn message_handle_incoming(&mut self, status: PayloadStatus, length: usize, slice: &[u8; MASTER_PAYLOAD_MAX_SIZE]) {
|
||||||
if !self.is_running() {
|
if !self.is_running() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
self.session.messages.handle_incoming(status, length, id, slice);
|
self.session.messages.handle_incoming(status, length, slice);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn message_get_slice(&mut self, slice: &mut [u8; MASTER_PAYLOAD_MAX_SIZE]) -> Option<SliceMeta> {
|
pub fn message_get_slice(&mut self, slice: &mut [u8; MASTER_PAYLOAD_MAX_SIZE]) -> Option<SliceMeta> {
|
||||||
|
@ -400,6 +403,14 @@ impl Manager {
|
||||||
self.session.messages.ack_slice()
|
self.session.messages.ack_slice()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn message_is_ready(&mut self) -> bool {
|
||||||
|
self.session.messages.is_outgoing_ready()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_last_finished(&mut self) -> Option<SubkernelFinished> {
|
||||||
|
self.last_finished.take()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn load(&mut self, id: u32) -> Result<(), Error> {
|
pub fn load(&mut self, id: u32) -> Result<(), Error> {
|
||||||
if self.current_id == id && self.session.kernel_state == KernelState::Loaded {
|
if self.current_id == id && self.session.kernel_state == KernelState::Loaded {
|
||||||
return Ok(())
|
return Ok(())
|
||||||
|
@ -423,7 +434,6 @@ impl Manager {
|
||||||
}
|
}
|
||||||
kern::LoadReply(Err(error)) => {
|
kern::LoadReply(Err(error)) => {
|
||||||
kernel_cpu::stop();
|
kernel_cpu::stop();
|
||||||
error!("load error: {:?}", error);
|
|
||||||
Err(Error::Load(format!("{}", error)))
|
Err(Error::Load(format!("{}", error)))
|
||||||
}
|
}
|
||||||
other => {
|
other => {
|
||||||
|
@ -437,7 +447,7 @@ impl Manager {
|
||||||
pub fn exception_get_slice(&mut self, data_slice: &mut [u8; SAT_PAYLOAD_MAX_SIZE]) -> SliceMeta {
|
pub fn exception_get_slice(&mut self, data_slice: &mut [u8; SAT_PAYLOAD_MAX_SIZE]) -> SliceMeta {
|
||||||
match self.session.last_exception.as_mut() {
|
match self.session.last_exception.as_mut() {
|
||||||
Some(exception) => exception.get_slice_sat(data_slice),
|
Some(exception) => exception.get_slice_sat(data_slice),
|
||||||
None => SliceMeta { destination: 0, len: 0, status: PayloadStatus::FirstAndLast }
|
None => SliceMeta { len: 0, status: PayloadStatus::FirstAndLast }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -462,63 +472,12 @@ impl Manager {
|
||||||
backtrace: &[],
|
backtrace: &[],
|
||||||
async_errors: 0
|
async_errors: 0
|
||||||
}).write_to(&mut writer) {
|
}).write_to(&mut writer) {
|
||||||
Ok(_) => self.session.last_exception = Some(Sliceable::new(0, writer.into_inner())),
|
Ok(_) => self.session.last_exception = Some(Sliceable::new(writer.into_inner())),
|
||||||
Err(_) => error!("Error writing exception data")
|
Err(_) => error!("Error writing exception data")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ddma_finished(&mut self, error: u8, channel: u32, timestamp: u64) {
|
pub fn process_kern_requests(&mut self, rank: u8) {
|
||||||
if let KernelState::DmaAwait { .. } = self.session.kernel_state {
|
|
||||||
kern_send(&kern::DmaAwaitRemoteReply {
|
|
||||||
timeout: false, error: error, channel: channel, timestamp: timestamp
|
|
||||||
}).unwrap();
|
|
||||||
self.session.kernel_state = KernelState::Running;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ddma_nack(&mut self) {
|
|
||||||
// for simplicity treat it as a timeout for now...
|
|
||||||
if let KernelState::DmaAwait { .. } = self.session.kernel_state {
|
|
||||||
kern_send(&kern::DmaAwaitRemoteReply {
|
|
||||||
timeout: true, error: 0, channel: 0, timestamp: 0
|
|
||||||
}).unwrap();
|
|
||||||
self.session.kernel_state = KernelState::Running;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ddma_remote_uploaded(&mut self, succeeded: bool) {
|
|
||||||
if let KernelState::DmaUploading { .. } = self.session.kernel_state {
|
|
||||||
if succeeded {
|
|
||||||
self.session.kernel_state = KernelState::Running;
|
|
||||||
kern_acknowledge().unwrap();
|
|
||||||
} else {
|
|
||||||
self.stop();
|
|
||||||
self.runtime_exception(Error::DmaError(DmaError::UploadFail));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn process_kern_requests(&mut self, router: &mut Router, routing_table: &RoutingTable, rank: u8, destination: u8, dma_manager: &mut DmaManager) {
|
|
||||||
macro_rules! finished {
|
|
||||||
($with_exception:expr) => {{ Some(SubkernelFinished {
|
|
||||||
source: self.session.source, id: self.current_id,
|
|
||||||
with_exception: $with_exception, exception_source: destination
|
|
||||||
}) }}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(subkernel_finished) = self.last_finished.take() {
|
|
||||||
info!("subkernel {} finished, with exception: {}", subkernel_finished.id, subkernel_finished.with_exception);
|
|
||||||
let pending = self.session.messages.pending_ids();
|
|
||||||
if pending.len() > 0 {
|
|
||||||
warn!("subkernel terminated with messages still pending: {:?}", pending);
|
|
||||||
}
|
|
||||||
router.route(drtioaux::Packet::SubkernelFinished {
|
|
||||||
destination: subkernel_finished.source, id: subkernel_finished.id,
|
|
||||||
with_exception: subkernel_finished.with_exception, exception_src: subkernel_finished.exception_source
|
|
||||||
}, &routing_table, rank, destination);
|
|
||||||
dma_manager.cleanup(router, rank, destination, routing_table);
|
|
||||||
}
|
|
||||||
|
|
||||||
if !self.is_running() {
|
if !self.is_running() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -531,39 +490,39 @@ impl Manager {
|
||||||
self.session.kernel_state = KernelState::Absent;
|
self.session.kernel_state = KernelState::Absent;
|
||||||
unsafe { self.cache.unborrow() }
|
unsafe { self.cache.unborrow() }
|
||||||
self.session.last_exception = Some(exception);
|
self.session.last_exception = Some(exception);
|
||||||
self.last_finished = finished!(true);
|
self.last_finished = Some(SubkernelFinished { id: self.current_id, with_exception: true })
|
||||||
},
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Error while running processing external messages: {:?}", e);
|
error!("Error while running processing external messages: {:?}", e);
|
||||||
self.stop();
|
self.stop();
|
||||||
self.runtime_exception(e);
|
self.runtime_exception(e);
|
||||||
self.last_finished = finished!(true);
|
self.last_finished = Some(SubkernelFinished { id: self.current_id, with_exception: true })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.process_kern_message(router, routing_table, rank, destination, dma_manager) {
|
match self.process_kern_message(rank) {
|
||||||
Ok(Some(with_exception)) => {
|
Ok(Some(with_exception)) => {
|
||||||
self.last_finished = finished!(with_exception)
|
self.last_finished = Some(SubkernelFinished { id: self.current_id, with_exception: with_exception })
|
||||||
},
|
},
|
||||||
Ok(None) | Err(Error::NoMessage) => (),
|
Ok(None) | Err(Error::NoMessage) => (),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Error while running kernel: {:?}", e);
|
error!("Error while running kernel: {:?}", e);
|
||||||
self.stop();
|
self.stop();
|
||||||
self.runtime_exception(e);
|
self.runtime_exception(e);
|
||||||
self.last_finished = finished!(true);
|
self.last_finished = Some(SubkernelFinished { id: self.current_id, with_exception: true })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_external_messages(&mut self) -> Result<(), Error> {
|
fn process_external_messages(&mut self) -> Result<(), Error> {
|
||||||
match &self.session.kernel_state {
|
match &self.session.kernel_state {
|
||||||
KernelState::MsgAwait { id, max_time, tags } => {
|
KernelState::MsgAwait { max_time, tags } => {
|
||||||
if *max_time > 0 && clock::get_ms() > *max_time as u64 {
|
if clock::get_ms() > *max_time {
|
||||||
kern_send(&kern::SubkernelMsgRecvReply { status: kern::SubkernelStatus::Timeout, count: 0 })?;
|
kern_send(&kern::SubkernelMsgRecvReply { status: kern::SubkernelStatus::Timeout, count: 0 })?;
|
||||||
self.session.kernel_state = KernelState::Running;
|
self.session.kernel_state = KernelState::Running;
|
||||||
return Ok(())
|
return Ok(())
|
||||||
}
|
}
|
||||||
if let Some(message) = self.session.messages.get_incoming(*id) {
|
if let Some(message) = self.session.messages.get_incoming() {
|
||||||
kern_send(&kern::SubkernelMsgRecvReply { status: kern::SubkernelStatus::NoError, count: message.count })?;
|
kern_send(&kern::SubkernelMsgRecvReply { status: kern::SubkernelStatus::NoError, count: message.count })?;
|
||||||
let tags = tags.clone();
|
let tags = tags.clone();
|
||||||
self.session.kernel_state = KernelState::Running;
|
self.session.kernel_state = KernelState::Running;
|
||||||
|
@ -580,88 +539,16 @@ impl Manager {
|
||||||
Err(Error::AwaitingMessage)
|
Err(Error::AwaitingMessage)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
KernelState::SubkernelAwaitFinish { max_time, id } => {
|
|
||||||
if *max_time > 0 && clock::get_ms() > *max_time as u64 {
|
|
||||||
kern_send(&kern::SubkernelAwaitFinishReply { status: kern::SubkernelStatus::Timeout })?;
|
|
||||||
self.session.kernel_state = KernelState::Running;
|
|
||||||
} else {
|
|
||||||
let mut i = 0;
|
|
||||||
for status in &self.session.subkernels_finished {
|
|
||||||
if *status == *id {
|
|
||||||
kern_send(&kern::SubkernelAwaitFinishReply { status: kern::SubkernelStatus::NoError })?;
|
|
||||||
self.session.kernel_state = KernelState::Running;
|
|
||||||
self.session.subkernels_finished.swap_remove(i);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
i += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
KernelState::DmaAwait { max_time } => {
|
|
||||||
if clock::get_ms() > *max_time {
|
|
||||||
kern_send(&kern::DmaAwaitRemoteReply { timeout: true, error: 0, channel: 0, timestamp: 0 })?;
|
|
||||||
self.session.kernel_state = KernelState::Running;
|
|
||||||
}
|
|
||||||
// ddma_finished() and nack() covers the other case
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
KernelState::DmaUploading { max_time } => {
|
|
||||||
if clock::get_ms() > *max_time {
|
|
||||||
unexpected!("DMAError: Timed out sending traces to remote");
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
_ => Ok(())
|
_ => Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn subkernel_load_run_reply(&mut self, succeeded: bool, self_destination: u8) {
|
fn process_kern_message(&mut self, rank: u8) -> Result<Option<bool>, Error> {
|
||||||
if self.session.kernel_state == KernelState::SubkernelAwaitLoad {
|
|
||||||
if let Err(e) = kern_send(&kern::SubkernelLoadRunReply { succeeded: succeeded }) {
|
|
||||||
self.stop();
|
|
||||||
self.runtime_exception(e);
|
|
||||||
self.last_finished = Some(SubkernelFinished {
|
|
||||||
source: self.session.source, id: self.current_id,
|
|
||||||
with_exception: true, exception_source: self_destination
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
self.session.kernel_state = KernelState::Running;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
warn!("received unsolicited SubkernelLoadRunReply");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn remote_subkernel_finished(&mut self, id: u32, with_exception: bool, exception_source: u8) {
|
|
||||||
if with_exception {
|
|
||||||
unsafe { kernel_cpu::stop() }
|
|
||||||
self.session.kernel_state = KernelState::Absent;
|
|
||||||
unsafe { self.cache.unborrow() }
|
|
||||||
self.last_finished = Some(SubkernelFinished {
|
|
||||||
source: self.session.source, id: self.current_id,
|
|
||||||
with_exception: true, exception_source: exception_source
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
self.session.subkernels_finished.push(id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn process_kern_message(&mut self, router: &mut Router,
|
|
||||||
routing_table: &RoutingTable,
|
|
||||||
rank: u8, destination: u8,
|
|
||||||
dma_manager: &mut DmaManager
|
|
||||||
) -> Result<Option<bool>, Error> {
|
|
||||||
// returns Ok(with_exception) on finish
|
// returns Ok(with_exception) on finish
|
||||||
// None if the kernel is still running
|
// None if the kernel is still running
|
||||||
kern_recv(|request| {
|
kern_recv(|request| {
|
||||||
match (request, &self.session.kernel_state) {
|
match (request, &self.session.kernel_state) {
|
||||||
(&kern::LoadReply(_), KernelState::Loaded) |
|
(&kern::LoadReply(_), KernelState::Loaded) => {
|
||||||
(_, KernelState::DmaUploading { .. }) |
|
|
||||||
(_, KernelState::DmaAwait { .. }) |
|
|
||||||
(_, KernelState::MsgSending) |
|
|
||||||
(_, KernelState::SubkernelAwaitLoad) |
|
|
||||||
(_, KernelState::SubkernelAwaitFinish { .. }) => {
|
|
||||||
// We're standing by; ignore the message.
|
// We're standing by; ignore the message.
|
||||||
return Ok(None)
|
return Ok(None)
|
||||||
}
|
}
|
||||||
|
@ -672,7 +559,7 @@ impl Manager {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if process_kern_hwreq(request, destination)? {
|
if process_kern_hwreq(request, rank)? {
|
||||||
return Ok(None)
|
return Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -726,93 +613,19 @@ impl Manager {
|
||||||
return Ok(Some(true))
|
return Ok(Some(true))
|
||||||
}
|
}
|
||||||
|
|
||||||
&kern::DmaRecordStart(name) => {
|
&kern::SubkernelMsgSend { id: _, count, tag, data } => {
|
||||||
dma_manager.record_start(name);
|
self.session.messages.accept_outgoing(count, tag, data)?;
|
||||||
kern_acknowledge()
|
|
||||||
}
|
|
||||||
&kern::DmaRecordAppend(data) => {
|
|
||||||
dma_manager.record_append(data);
|
|
||||||
kern_acknowledge()
|
|
||||||
}
|
|
||||||
&kern::DmaRecordStop { duration, enable_ddma: _ } => {
|
|
||||||
// ddma is always used on satellites
|
|
||||||
if let Ok(id) = dma_manager.record_stop(duration, destination) {
|
|
||||||
let remote_count = dma_manager.upload_traces(id, router, rank, destination, routing_table)?;
|
|
||||||
if remote_count > 0 {
|
|
||||||
let max_time = clock::get_ms() + 10_000 as u64;
|
|
||||||
self.session.kernel_state = KernelState::DmaUploading { max_time: max_time };
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
kern_acknowledge()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
unexpected!("DMAError: found an unsupported call to RTIO devices on master")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
&kern::DmaEraseRequest { name } => {
|
|
||||||
dma_manager.erase_name(name, router, rank, destination, routing_table);
|
|
||||||
kern_acknowledge()
|
|
||||||
}
|
|
||||||
&kern::DmaRetrieveRequest { name } => {
|
|
||||||
dma_manager.with_trace(destination, name, |trace, duration| {
|
|
||||||
kern_send(&kern::DmaRetrieveReply {
|
|
||||||
trace: trace,
|
|
||||||
duration: duration,
|
|
||||||
uses_ddma: true,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
&kern::DmaStartRemoteRequest { id, timestamp } => {
|
|
||||||
let max_time = clock::get_ms() + 10_000 as u64;
|
|
||||||
self.session.kernel_state = KernelState::DmaAwait { max_time: max_time };
|
|
||||||
dma_manager.playback_remote(id as u32, timestamp as u64, router, rank, destination, routing_table)?;
|
|
||||||
dma_manager.playback(destination, id as u32, timestamp as u64)?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
&kern::SubkernelMsgSend { id, destination: msg_dest, count, tag, data } => {
|
|
||||||
let message_destination;
|
|
||||||
let message_id;
|
|
||||||
if let Some(dest) = msg_dest {
|
|
||||||
message_destination = dest;
|
|
||||||
message_id = id;
|
|
||||||
} else {
|
|
||||||
// return message, return to source
|
|
||||||
message_destination = self.session.source;
|
|
||||||
message_id = self.current_id;
|
|
||||||
}
|
|
||||||
self.session.messages.accept_outgoing(message_id, destination,
|
|
||||||
message_destination, count, tag, data,
|
|
||||||
routing_table, rank, router)?;
|
|
||||||
// acknowledge after the message is sent
|
// acknowledge after the message is sent
|
||||||
self.session.kernel_state = KernelState::MsgSending;
|
self.session.kernel_state = KernelState::MsgSending;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
&kern::SubkernelMsgRecvRequest { id, timeout, tags } => {
|
&kern::SubkernelMsgRecvRequest { id: _, timeout, tags } => {
|
||||||
// negative timeout value means no timeout
|
let max_time = clock::get_ms() + timeout as u64;
|
||||||
let max_time = if timeout > 0 { clock::get_ms() as i64 + timeout } else { timeout };
|
self.session.kernel_state = KernelState::MsgAwait { max_time: max_time, tags: tags.to_vec() };
|
||||||
// ID equal to -1 indicates wildcard for receiving arguments
|
|
||||||
let id = if id == -1 { self.current_id } else { id as u32 };
|
|
||||||
self.session.kernel_state = KernelState::MsgAwait {
|
|
||||||
id: id, max_time: max_time, tags: tags.to_vec() };
|
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|
|
||||||
&kern::SubkernelLoadRunRequest { id, destination: sk_destination, run } => {
|
|
||||||
self.session.kernel_state = KernelState::SubkernelAwaitLoad;
|
|
||||||
router.route(drtioaux::Packet::SubkernelLoadRunRequest {
|
|
||||||
source: destination, destination: sk_destination, id: id, run: run
|
|
||||||
}, routing_table, rank, destination);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
&kern::SubkernelAwaitFinishRequest{ id, timeout } => {
|
|
||||||
let max_time = if timeout > 0 { clock::get_ms() as i64 + timeout } else { timeout };
|
|
||||||
self.session.kernel_state = KernelState::SubkernelAwaitFinish { max_time: max_time, id: id };
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
request => unexpected!("unexpected request {:?} from kernel CPU", request)
|
request => unexpected!("unexpected request {:?} from kernel CPU", request)
|
||||||
}.and(Ok(None))
|
}.and(Ok(None))
|
||||||
})
|
})
|
||||||
|
@ -886,7 +699,7 @@ fn slice_kernel_exception(exceptions: &[Option<eh_artiq::Exception>],
|
||||||
async_errors: 0
|
async_errors: 0
|
||||||
}).write_to(&mut writer) {
|
}).write_to(&mut writer) {
|
||||||
// save last exception data to be received by master
|
// save last exception data to be received by master
|
||||||
Ok(_) => Ok(Sliceable::new(0, writer.into_inner())),
|
Ok(_) => Ok(Sliceable::new(writer.into_inner())),
|
||||||
Err(_) => Err(Error::SubkernelIoError)
|
Err(_) => Err(Error::SubkernelIoError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -946,7 +759,7 @@ fn pass_message_to_kernel(message: &Message, tags: &[u8]) -> Result<(), Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_kern_hwreq(request: &kern::Message, self_destination: u8) -> Result<bool, Error> {
|
fn process_kern_hwreq(request: &kern::Message, rank: u8) -> Result<bool, Error> {
|
||||||
match request {
|
match request {
|
||||||
&kern::RtioInitRequest => {
|
&kern::RtioInitRequest => {
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -961,7 +774,7 @@ fn process_kern_hwreq(request: &kern::Message, self_destination: u8) -> Result<b
|
||||||
// only local destination is considered "up"
|
// only local destination is considered "up"
|
||||||
// no access to other DRTIO destinations
|
// no access to other DRTIO destinations
|
||||||
kern_send(&kern::RtioDestinationStatusReply {
|
kern_send(&kern::RtioDestinationStatusReply {
|
||||||
up: destination == self_destination })
|
up: destination == rank })
|
||||||
}
|
}
|
||||||
|
|
||||||
&kern::I2cStartRequest { busno } => {
|
&kern::I2cStartRequest { busno } => {
|
||||||
|
|
|
@ -32,7 +32,6 @@ use analyzer::Analyzer;
|
||||||
static mut ALLOC: alloc_list::ListAlloc = alloc_list::EMPTY;
|
static mut ALLOC: alloc_list::ListAlloc = alloc_list::EMPTY;
|
||||||
|
|
||||||
mod repeater;
|
mod repeater;
|
||||||
mod routing;
|
|
||||||
mod dma;
|
mod dma;
|
||||||
mod analyzer;
|
mod analyzer;
|
||||||
mod kernel;
|
mod kernel;
|
||||||
|
@ -66,13 +65,6 @@ fn drtiosat_tsc_loaded() -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn drtiosat_async_ready() {
|
|
||||||
unsafe {
|
|
||||||
csr::drtiosat::async_messages_ready_write(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Copy)]
|
|
||||||
pub enum RtioMaster {
|
pub enum RtioMaster {
|
||||||
Drtio,
|
Drtio,
|
||||||
Dma,
|
Dma,
|
||||||
|
@ -90,16 +82,6 @@ pub fn cricon_select(master: RtioMaster) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cricon_read() -> RtioMaster {
|
|
||||||
let val = unsafe { csr::cri_con::selected_read() };
|
|
||||||
match val {
|
|
||||||
0 => RtioMaster::Drtio,
|
|
||||||
1 => RtioMaster::Dma,
|
|
||||||
2 => RtioMaster::Kernel,
|
|
||||||
_ => unreachable!()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(has_drtio_routing)]
|
#[cfg(has_drtio_routing)]
|
||||||
macro_rules! forward {
|
macro_rules! forward {
|
||||||
($routing_table:expr, $destination:expr, $rank:expr, $repeaters:expr, $packet:expr) => {{
|
($routing_table:expr, $destination:expr, $rank:expr, $repeaters:expr, $packet:expr) => {{
|
||||||
|
@ -107,14 +89,7 @@ macro_rules! forward {
|
||||||
if hop != 0 {
|
if hop != 0 {
|
||||||
let repno = (hop - 1) as usize;
|
let repno = (hop - 1) as usize;
|
||||||
if repno < $repeaters.len() {
|
if repno < $repeaters.len() {
|
||||||
if $packet.expects_response() {
|
return $repeaters[repno].aux_forward($packet);
|
||||||
return $repeaters[repno].aux_forward($packet);
|
|
||||||
} else {
|
|
||||||
let res = $repeaters[repno].aux_send($packet);
|
|
||||||
// allow the satellite to parse the packet before next
|
|
||||||
clock::spin_us(10_000);
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
return Err(drtioaux::Error::RoutingError);
|
return Err(drtioaux::Error::RoutingError);
|
||||||
}
|
}
|
||||||
|
@ -128,9 +103,8 @@ macro_rules! forward {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmgr: &mut KernelManager,
|
fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmgr: &mut KernelManager,
|
||||||
_repeaters: &mut [repeater::Repeater], _routing_table: &mut drtio_routing::RoutingTable, rank: &mut u8,
|
_repeaters: &mut [repeater::Repeater], _routing_table: &mut drtio_routing::RoutingTable, _rank: &mut u8,
|
||||||
router: &mut routing::Router, self_destination: &mut u8, packet: drtioaux::Packet
|
packet: drtioaux::Packet) -> Result<(), drtioaux::Error<!>> {
|
||||||
) -> Result<(), drtioaux::Error<!>> {
|
|
||||||
// In the code below, *_chan_sel_write takes an u8 if there are fewer than 256 channels,
|
// In the code below, *_chan_sel_write takes an u8 if there are fewer than 256 channels,
|
||||||
// and u16 otherwise; hence the `as _` conversion.
|
// and u16 otherwise; hence the `as _` conversion.
|
||||||
match packet {
|
match packet {
|
||||||
|
@ -151,43 +125,61 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
|
|
||||||
drtioaux::Packet::DestinationStatusRequest { destination } => {
|
drtioaux::Packet::DestinationStatusRequest { destination } => {
|
||||||
#[cfg(has_drtio_routing)]
|
#[cfg(has_drtio_routing)]
|
||||||
let hop = _routing_table.0[destination as usize][*rank as usize];
|
let hop = _routing_table.0[destination as usize][*_rank as usize];
|
||||||
#[cfg(not(has_drtio_routing))]
|
#[cfg(not(has_drtio_routing))]
|
||||||
let hop = 0;
|
let hop = 0;
|
||||||
|
|
||||||
if hop == 0 {
|
if hop == 0 {
|
||||||
*self_destination = destination;
|
// async messages
|
||||||
let errors;
|
if let Some(status) = dmamgr.get_status() {
|
||||||
unsafe {
|
info!("playback done, error: {}, channel: {}, timestamp: {}", status.error, status.channel, status.timestamp);
|
||||||
errors = csr::drtiosat::rtio_error_read();
|
drtioaux::send(0, &drtioaux::Packet::DmaPlaybackStatus {
|
||||||
}
|
destination: destination, id: status.id, error: status.error, channel: status.channel, timestamp: status.timestamp })?;
|
||||||
if errors & 1 != 0 {
|
} else if let Some(subkernel_finished) = kernelmgr.get_last_finished() {
|
||||||
let channel;
|
info!("subkernel {} finished, with exception: {}", subkernel_finished.id, subkernel_finished.with_exception);
|
||||||
|
drtioaux::send(0, &drtioaux::Packet::SubkernelFinished {
|
||||||
|
id: subkernel_finished.id, with_exception: subkernel_finished.with_exception
|
||||||
|
})?;
|
||||||
|
} else if kernelmgr.message_is_ready() {
|
||||||
|
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
||||||
|
let meta = kernelmgr.message_get_slice(&mut data_slice).unwrap();
|
||||||
|
drtioaux::send(0, &drtioaux::Packet::SubkernelMessage {
|
||||||
|
destination: destination, id: kernelmgr.get_current_id().unwrap(),
|
||||||
|
status: meta.status, length: meta.len as u16, data: data_slice
|
||||||
|
})?;
|
||||||
|
} else {
|
||||||
|
let errors;
|
||||||
unsafe {
|
unsafe {
|
||||||
channel = csr::drtiosat::sequence_error_channel_read();
|
errors = csr::drtiosat::rtio_error_read();
|
||||||
csr::drtiosat::rtio_error_write(1);
|
|
||||||
}
|
}
|
||||||
drtioaux::send(0,
|
if errors & 1 != 0 {
|
||||||
&drtioaux::Packet::DestinationSequenceErrorReply { channel })?;
|
let channel;
|
||||||
} else if errors & 2 != 0 {
|
unsafe {
|
||||||
let channel;
|
channel = csr::drtiosat::sequence_error_channel_read();
|
||||||
unsafe {
|
csr::drtiosat::rtio_error_write(1);
|
||||||
channel = csr::drtiosat::collision_channel_read();
|
}
|
||||||
csr::drtiosat::rtio_error_write(2);
|
drtioaux::send(0,
|
||||||
|
&drtioaux::Packet::DestinationSequenceErrorReply { channel })?;
|
||||||
|
} else if errors & 2 != 0 {
|
||||||
|
let channel;
|
||||||
|
unsafe {
|
||||||
|
channel = csr::drtiosat::collision_channel_read();
|
||||||
|
csr::drtiosat::rtio_error_write(2);
|
||||||
|
}
|
||||||
|
drtioaux::send(0,
|
||||||
|
&drtioaux::Packet::DestinationCollisionReply { channel })?;
|
||||||
|
} else if errors & 4 != 0 {
|
||||||
|
let channel;
|
||||||
|
unsafe {
|
||||||
|
channel = csr::drtiosat::busy_channel_read();
|
||||||
|
csr::drtiosat::rtio_error_write(4);
|
||||||
|
}
|
||||||
|
drtioaux::send(0,
|
||||||
|
&drtioaux::Packet::DestinationBusyReply { channel })?;
|
||||||
}
|
}
|
||||||
drtioaux::send(0,
|
else {
|
||||||
&drtioaux::Packet::DestinationCollisionReply { channel })?;
|
drtioaux::send(0, &drtioaux::Packet::DestinationOkReply)?;
|
||||||
} else if errors & 4 != 0 {
|
|
||||||
let channel;
|
|
||||||
unsafe {
|
|
||||||
channel = csr::drtiosat::busy_channel_read();
|
|
||||||
csr::drtiosat::rtio_error_write(4);
|
|
||||||
}
|
}
|
||||||
drtioaux::send(0,
|
|
||||||
&drtioaux::Packet::DestinationBusyReply { channel })?;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
drtioaux::send(0, &drtioaux::Packet::DestinationOkReply)?;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -212,6 +204,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -226,18 +219,18 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
|
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
|
||||||
}
|
}
|
||||||
#[cfg(has_drtio_routing)]
|
#[cfg(has_drtio_routing)]
|
||||||
drtioaux::Packet::RoutingSetRank { rank: new_rank } => {
|
drtioaux::Packet::RoutingSetRank { rank } => {
|
||||||
*rank = new_rank;
|
*_rank = rank;
|
||||||
drtio_routing::interconnect_enable_all(_routing_table, new_rank);
|
drtio_routing::interconnect_enable_all(_routing_table, rank);
|
||||||
|
|
||||||
let rep_rank = new_rank + 1;
|
let rep_rank = rank + 1;
|
||||||
for rep in _repeaters.iter() {
|
for rep in _repeaters.iter() {
|
||||||
if let Err(e) = rep.set_rank(rep_rank) {
|
if let Err(e) = rep.set_rank(rep_rank) {
|
||||||
error!("failed to set rank ({})", e);
|
error!("failed to set rank ({})", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("rank: {}", new_rank);
|
info!("rank: {}", rank);
|
||||||
info!("routing table: {}", _routing_table);
|
info!("routing table: {}", _routing_table);
|
||||||
|
|
||||||
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
|
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
|
||||||
|
@ -252,14 +245,8 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
|
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
|
||||||
}
|
}
|
||||||
|
|
||||||
drtioaux::Packet::RoutingRetrievePackets => {
|
|
||||||
let packet = router.get_upstream_packet().or(
|
|
||||||
Some(drtioaux::Packet::RoutingNoPackets)).unwrap();
|
|
||||||
drtioaux::send(0, &packet)
|
|
||||||
}
|
|
||||||
|
|
||||||
drtioaux::Packet::MonitorRequest { destination: _destination, channel, probe } => {
|
drtioaux::Packet::MonitorRequest { destination: _destination, channel, probe } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let value;
|
let value;
|
||||||
#[cfg(has_rtio_moninj)]
|
#[cfg(has_rtio_moninj)]
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -276,7 +263,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
drtioaux::send(0, &reply)
|
drtioaux::send(0, &reply)
|
||||||
},
|
},
|
||||||
drtioaux::Packet::InjectionRequest { destination: _destination, channel, overrd, value } => {
|
drtioaux::Packet::InjectionRequest { destination: _destination, channel, overrd, value } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
#[cfg(has_rtio_moninj)]
|
#[cfg(has_rtio_moninj)]
|
||||||
unsafe {
|
unsafe {
|
||||||
csr::rtio_moninj::inj_chan_sel_write(channel as _);
|
csr::rtio_moninj::inj_chan_sel_write(channel as _);
|
||||||
|
@ -286,7 +273,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
drtioaux::Packet::InjectionStatusRequest { destination: _destination, channel, overrd } => {
|
drtioaux::Packet::InjectionStatusRequest { destination: _destination, channel, overrd } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let value;
|
let value;
|
||||||
#[cfg(has_rtio_moninj)]
|
#[cfg(has_rtio_moninj)]
|
||||||
unsafe {
|
unsafe {
|
||||||
|
@ -302,22 +289,22 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
},
|
},
|
||||||
|
|
||||||
drtioaux::Packet::I2cStartRequest { destination: _destination, busno } => {
|
drtioaux::Packet::I2cStartRequest { destination: _destination, busno } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let succeeded = i2c::start(busno).is_ok();
|
let succeeded = i2c::start(busno).is_ok();
|
||||||
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
|
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
|
||||||
}
|
}
|
||||||
drtioaux::Packet::I2cRestartRequest { destination: _destination, busno } => {
|
drtioaux::Packet::I2cRestartRequest { destination: _destination, busno } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let succeeded = i2c::restart(busno).is_ok();
|
let succeeded = i2c::restart(busno).is_ok();
|
||||||
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
|
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
|
||||||
}
|
}
|
||||||
drtioaux::Packet::I2cStopRequest { destination: _destination, busno } => {
|
drtioaux::Packet::I2cStopRequest { destination: _destination, busno } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let succeeded = i2c::stop(busno).is_ok();
|
let succeeded = i2c::stop(busno).is_ok();
|
||||||
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
|
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
|
||||||
}
|
}
|
||||||
drtioaux::Packet::I2cWriteRequest { destination: _destination, busno, data } => {
|
drtioaux::Packet::I2cWriteRequest { destination: _destination, busno, data } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
match i2c::write(busno, data) {
|
match i2c::write(busno, data) {
|
||||||
Ok(ack) => drtioaux::send(0,
|
Ok(ack) => drtioaux::send(0,
|
||||||
&drtioaux::Packet::I2cWriteReply { succeeded: true, ack: ack }),
|
&drtioaux::Packet::I2cWriteReply { succeeded: true, ack: ack }),
|
||||||
|
@ -326,7 +313,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
drtioaux::Packet::I2cReadRequest { destination: _destination, busno, ack } => {
|
drtioaux::Packet::I2cReadRequest { destination: _destination, busno, ack } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
match i2c::read(busno, ack) {
|
match i2c::read(busno, ack) {
|
||||||
Ok(data) => drtioaux::send(0,
|
Ok(data) => drtioaux::send(0,
|
||||||
&drtioaux::Packet::I2cReadReply { succeeded: true, data: data }),
|
&drtioaux::Packet::I2cReadReply { succeeded: true, data: data }),
|
||||||
|
@ -335,25 +322,25 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
drtioaux::Packet::I2cSwitchSelectRequest { destination: _destination, busno, address, mask } => {
|
drtioaux::Packet::I2cSwitchSelectRequest { destination: _destination, busno, address, mask } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let succeeded = i2c::switch_select(busno, address, mask).is_ok();
|
let succeeded = i2c::switch_select(busno, address, mask).is_ok();
|
||||||
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
|
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
|
||||||
}
|
}
|
||||||
|
|
||||||
drtioaux::Packet::SpiSetConfigRequest { destination: _destination, busno, flags, length, div, cs } => {
|
drtioaux::Packet::SpiSetConfigRequest { destination: _destination, busno, flags, length, div, cs } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let succeeded = spi::set_config(busno, flags, length, div, cs).is_ok();
|
let succeeded = spi::set_config(busno, flags, length, div, cs).is_ok();
|
||||||
drtioaux::send(0,
|
drtioaux::send(0,
|
||||||
&drtioaux::Packet::SpiBasicReply { succeeded: succeeded })
|
&drtioaux::Packet::SpiBasicReply { succeeded: succeeded })
|
||||||
},
|
},
|
||||||
drtioaux::Packet::SpiWriteRequest { destination: _destination, busno, data } => {
|
drtioaux::Packet::SpiWriteRequest { destination: _destination, busno, data } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let succeeded = spi::write(busno, data).is_ok();
|
let succeeded = spi::write(busno, data).is_ok();
|
||||||
drtioaux::send(0,
|
drtioaux::send(0,
|
||||||
&drtioaux::Packet::SpiBasicReply { succeeded: succeeded })
|
&drtioaux::Packet::SpiBasicReply { succeeded: succeeded })
|
||||||
}
|
}
|
||||||
drtioaux::Packet::SpiReadRequest { destination: _destination, busno } => {
|
drtioaux::Packet::SpiReadRequest { destination: _destination, busno } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
match spi::read(busno) {
|
match spi::read(busno) {
|
||||||
Ok(data) => drtioaux::send(0,
|
Ok(data) => drtioaux::send(0,
|
||||||
&drtioaux::Packet::SpiReadReply { succeeded: true, data: data }),
|
&drtioaux::Packet::SpiReadReply { succeeded: true, data: data }),
|
||||||
|
@ -363,7 +350,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
}
|
}
|
||||||
|
|
||||||
drtioaux::Packet::AnalyzerHeaderRequest { destination: _destination } => {
|
drtioaux::Packet::AnalyzerHeaderRequest { destination: _destination } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let header = analyzer.get_header();
|
let header = analyzer.get_header();
|
||||||
drtioaux::send(0, &drtioaux::Packet::AnalyzerHeader {
|
drtioaux::send(0, &drtioaux::Packet::AnalyzerHeader {
|
||||||
total_byte_count: header.total_byte_count,
|
total_byte_count: header.total_byte_count,
|
||||||
|
@ -373,7 +360,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
}
|
}
|
||||||
|
|
||||||
drtioaux::Packet::AnalyzerDataRequest { destination: _destination } => {
|
drtioaux::Packet::AnalyzerDataRequest { destination: _destination } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let mut data_slice: [u8; SAT_PAYLOAD_MAX_SIZE] = [0; SAT_PAYLOAD_MAX_SIZE];
|
let mut data_slice: [u8; SAT_PAYLOAD_MAX_SIZE] = [0; SAT_PAYLOAD_MAX_SIZE];
|
||||||
let meta = analyzer.get_data(&mut data_slice);
|
let meta = analyzer.get_data(&mut data_slice);
|
||||||
drtioaux::send(0, &drtioaux::Packet::AnalyzerData {
|
drtioaux::send(0, &drtioaux::Packet::AnalyzerData {
|
||||||
|
@ -383,56 +370,34 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
drtioaux::Packet::DmaAddTraceRequest { source, destination, id, status, length, trace } => {
|
drtioaux::Packet::DmaAddTraceRequest { destination: _destination, id, status, length, trace } => {
|
||||||
forward!(_routing_table, destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
*self_destination = destination;
|
let succeeded = dmamgr.add(id, status, &trace, length as usize).is_ok();
|
||||||
let succeeded = dmamgr.add(source, id, status, &trace, length as usize).is_ok();
|
drtioaux::send(0,
|
||||||
router.send(drtioaux::Packet::DmaAddTraceReply {
|
&drtioaux::Packet::DmaAddTraceReply { succeeded: succeeded })
|
||||||
source: *self_destination, destination: source, id: id, succeeded: succeeded
|
|
||||||
}, _routing_table, *rank, *self_destination)
|
|
||||||
}
|
}
|
||||||
drtioaux::Packet::DmaAddTraceReply { source, destination: _destination, id, succeeded } => {
|
drtioaux::Packet::DmaRemoveTraceRequest { destination: _destination, id } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
dmamgr.ack_upload(kernelmgr, source, id, succeeded, router, *rank, *self_destination, _routing_table);
|
let succeeded = dmamgr.erase(id).is_ok();
|
||||||
Ok(())
|
drtioaux::send(0,
|
||||||
|
&drtioaux::Packet::DmaRemoveTraceReply { succeeded: succeeded })
|
||||||
}
|
}
|
||||||
drtioaux::Packet::DmaRemoveTraceRequest { source, destination: _destination, id } => {
|
drtioaux::Packet::DmaPlaybackRequest { destination: _destination, id, timestamp } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let succeeded = dmamgr.erase(source, id).is_ok();
|
|
||||||
router.send(drtioaux::Packet::DmaRemoveTraceReply {
|
|
||||||
destination: source, succeeded: succeeded
|
|
||||||
}, _routing_table, *rank, *self_destination)
|
|
||||||
}
|
|
||||||
drtioaux::Packet::DmaPlaybackRequest { source, destination: _destination, id, timestamp } => {
|
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
|
||||||
// no DMA with a running kernel
|
// no DMA with a running kernel
|
||||||
let succeeded = !kernelmgr.is_running() && dmamgr.playback(source, id, timestamp).is_ok();
|
let succeeded = !kernelmgr.is_running() && dmamgr.playback(id, timestamp).is_ok();
|
||||||
router.send(drtioaux::Packet::DmaPlaybackReply {
|
drtioaux::send(0,
|
||||||
destination: source, succeeded: succeeded
|
&drtioaux::Packet::DmaPlaybackReply { succeeded: succeeded })
|
||||||
}, _routing_table, *rank, *self_destination)
|
|
||||||
}
|
|
||||||
drtioaux::Packet::DmaPlaybackReply { destination: _destination, succeeded } => {
|
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
|
||||||
if !succeeded {
|
|
||||||
kernelmgr.ddma_nack();
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
drtioaux::Packet::DmaPlaybackStatus { source: _, destination: _destination, id, error, channel, timestamp } => {
|
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
|
||||||
dmamgr.remote_finished(kernelmgr, id, error, channel, timestamp);
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
drtioaux::Packet::SubkernelAddDataRequest { destination, id, status, length, data } => {
|
drtioaux::Packet::SubkernelAddDataRequest { destination: _destination, id, status, length, data } => {
|
||||||
forward!(_routing_table, destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
*self_destination = destination;
|
|
||||||
let succeeded = kernelmgr.add(id, status, &data, length as usize).is_ok();
|
let succeeded = kernelmgr.add(id, status, &data, length as usize).is_ok();
|
||||||
drtioaux::send(0,
|
drtioaux::send(0,
|
||||||
&drtioaux::Packet::SubkernelAddDataReply { succeeded: succeeded })
|
&drtioaux::Packet::SubkernelAddDataReply { succeeded: succeeded })
|
||||||
}
|
}
|
||||||
drtioaux::Packet::SubkernelLoadRunRequest { source, destination: _destination, id, run } => {
|
drtioaux::Packet::SubkernelLoadRunRequest { destination: _destination, id, run } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let mut succeeded = kernelmgr.load(id).is_ok();
|
let mut succeeded = kernelmgr.load(id).is_ok();
|
||||||
// allow preloading a kernel with delayed run
|
// allow preloading a kernel with delayed run
|
||||||
if run {
|
if run {
|
||||||
|
@ -440,27 +405,14 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
// cannot run kernel while DDMA is running
|
// cannot run kernel while DDMA is running
|
||||||
succeeded = false;
|
succeeded = false;
|
||||||
} else {
|
} else {
|
||||||
succeeded |= kernelmgr.run(source, id).is_ok();
|
succeeded |= kernelmgr.run(id).is_ok();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
router.send(drtioaux::Packet::SubkernelLoadRunReply {
|
drtioaux::send(0,
|
||||||
destination: source, succeeded: succeeded
|
&drtioaux::Packet::SubkernelLoadRunReply { succeeded: succeeded })
|
||||||
},
|
|
||||||
_routing_table, *rank, *self_destination)
|
|
||||||
}
|
|
||||||
drtioaux::Packet::SubkernelLoadRunReply { destination: _destination, succeeded } => {
|
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
|
||||||
// received if local subkernel started another, remote subkernel
|
|
||||||
kernelmgr.subkernel_load_run_reply(succeeded, *self_destination);
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
drtioaux::Packet::SubkernelFinished { destination: _destination, id, with_exception, exception_src } => {
|
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
|
||||||
kernelmgr.remote_subkernel_finished(id, with_exception, exception_src);
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
drtioaux::Packet::SubkernelExceptionRequest { destination: _destination } => {
|
drtioaux::Packet::SubkernelExceptionRequest { destination: _destination } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
let mut data_slice: [u8; SAT_PAYLOAD_MAX_SIZE] = [0; SAT_PAYLOAD_MAX_SIZE];
|
let mut data_slice: [u8; SAT_PAYLOAD_MAX_SIZE] = [0; SAT_PAYLOAD_MAX_SIZE];
|
||||||
let meta = kernelmgr.exception_get_slice(&mut data_slice);
|
let meta = kernelmgr.exception_get_slice(&mut data_slice);
|
||||||
drtioaux::send(0, &drtioaux::Packet::SubkernelException {
|
drtioaux::send(0, &drtioaux::Packet::SubkernelException {
|
||||||
|
@ -469,23 +421,22 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
data: data_slice,
|
data: data_slice,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
drtioaux::Packet::SubkernelMessage { source, destination: _destination, id, status, length, data } => {
|
drtioaux::Packet::SubkernelMessage { destination, id: _id, status, length, data } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, destination, *_rank, _repeaters, &packet);
|
||||||
kernelmgr.message_handle_incoming(status, length as usize, id, &data);
|
kernelmgr.message_handle_incoming(status, length as usize, &data);
|
||||||
router.send(drtioaux::Packet::SubkernelMessageAck {
|
drtioaux::send(0, &drtioaux::Packet::SubkernelMessageAck {
|
||||||
destination: source
|
destination: destination
|
||||||
}, _routing_table, *rank, *self_destination)
|
})
|
||||||
}
|
}
|
||||||
drtioaux::Packet::SubkernelMessageAck { destination: _destination } => {
|
drtioaux::Packet::SubkernelMessageAck { destination: _destination } => {
|
||||||
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
|
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
|
||||||
if kernelmgr.message_ack_slice() {
|
if kernelmgr.message_ack_slice() {
|
||||||
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
|
||||||
if let Some(meta) = kernelmgr.message_get_slice(&mut data_slice) {
|
if let Some(meta) = kernelmgr.message_get_slice(&mut data_slice) {
|
||||||
// route and not send immediately as ACKs are not a beginning of a transaction
|
drtioaux::send(0, &drtioaux::Packet::SubkernelMessage {
|
||||||
router.route(drtioaux::Packet::SubkernelMessage {
|
destination: *_rank, id: kernelmgr.get_current_id().unwrap(),
|
||||||
source: *self_destination, destination: meta.destination, id: kernelmgr.get_current_id().unwrap(),
|
|
||||||
status: meta.status, length: meta.len as u16, data: data_slice
|
status: meta.status, length: meta.len as u16, data: data_slice
|
||||||
}, _routing_table, *rank, *self_destination);
|
})?
|
||||||
} else {
|
} else {
|
||||||
error!("Error receiving message slice");
|
error!("Error receiving message slice");
|
||||||
}
|
}
|
||||||
|
@ -502,19 +453,18 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
|
||||||
|
|
||||||
fn process_aux_packets(dma_manager: &mut DmaManager, analyzer: &mut Analyzer,
|
fn process_aux_packets(dma_manager: &mut DmaManager, analyzer: &mut Analyzer,
|
||||||
kernelmgr: &mut KernelManager, repeaters: &mut [repeater::Repeater],
|
kernelmgr: &mut KernelManager, repeaters: &mut [repeater::Repeater],
|
||||||
routing_table: &mut drtio_routing::RoutingTable, rank: &mut u8, router: &mut routing::Router,
|
routing_table: &mut drtio_routing::RoutingTable, rank: &mut u8) {
|
||||||
destination: &mut u8) {
|
|
||||||
let result =
|
let result =
|
||||||
drtioaux::recv(0).and_then(|packet| {
|
drtioaux::recv(0).and_then(|packet| {
|
||||||
if let Some(packet) = packet.or_else(|| router.get_local_packet()) {
|
if let Some(packet) = packet {
|
||||||
process_aux_packet(dma_manager, analyzer, kernelmgr,
|
process_aux_packet(dma_manager, analyzer, kernelmgr, repeaters, routing_table, rank, packet)
|
||||||
repeaters, routing_table, rank, router, destination, packet)
|
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if let Err(e) = result {
|
match result {
|
||||||
warn!("aux packet error ({})", e);
|
Ok(()) => (),
|
||||||
|
Err(e) => warn!("aux packet error ({})", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -720,7 +670,6 @@ pub extern fn main() -> i32 {
|
||||||
}
|
}
|
||||||
let mut routing_table = drtio_routing::RoutingTable::default_empty();
|
let mut routing_table = drtio_routing::RoutingTable::default_empty();
|
||||||
let mut rank = 1;
|
let mut rank = 1;
|
||||||
let mut destination = 1;
|
|
||||||
|
|
||||||
let mut hardware_tick_ts = 0;
|
let mut hardware_tick_ts = 0;
|
||||||
|
|
||||||
|
@ -728,12 +677,10 @@ pub extern fn main() -> i32 {
|
||||||
ad9117::init().expect("AD9117 initialization failed");
|
ad9117::init().expect("AD9117 initialization failed");
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let mut router = routing::Router::new();
|
|
||||||
|
|
||||||
while !drtiosat_link_rx_up() {
|
while !drtiosat_link_rx_up() {
|
||||||
drtiosat_process_errors();
|
drtiosat_process_errors();
|
||||||
for rep in repeaters.iter_mut() {
|
for rep in repeaters.iter_mut() {
|
||||||
rep.service(&routing_table, rank, destination, &mut router);
|
rep.service(&routing_table, rank);
|
||||||
}
|
}
|
||||||
#[cfg(all(soc_platform = "kasli", hw_rev = "v2.0"))]
|
#[cfg(all(soc_platform = "kasli", hw_rev = "v2.0"))]
|
||||||
{
|
{
|
||||||
|
@ -767,10 +714,10 @@ pub extern fn main() -> i32 {
|
||||||
while drtiosat_link_rx_up() {
|
while drtiosat_link_rx_up() {
|
||||||
drtiosat_process_errors();
|
drtiosat_process_errors();
|
||||||
process_aux_packets(&mut dma_manager, &mut analyzer,
|
process_aux_packets(&mut dma_manager, &mut analyzer,
|
||||||
&mut kernelmgr, &mut repeaters, &mut routing_table,
|
&mut kernelmgr, &mut repeaters,
|
||||||
&mut rank, &mut router, &mut destination);
|
&mut routing_table, &mut rank);
|
||||||
for rep in repeaters.iter_mut() {
|
for rep in repeaters.iter_mut() {
|
||||||
rep.service(&routing_table, rank, destination, &mut router);
|
rep.service(&routing_table, rank);
|
||||||
}
|
}
|
||||||
#[cfg(all(soc_platform = "kasli", hw_rev = "v2.0"))]
|
#[cfg(all(soc_platform = "kasli", hw_rev = "v2.0"))]
|
||||||
{
|
{
|
||||||
|
@ -791,26 +738,7 @@ pub extern fn main() -> i32 {
|
||||||
error!("aux packet error: {}", e);
|
error!("aux packet error: {}", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(status) = dma_manager.get_status() {
|
kernelmgr.process_kern_requests(rank);
|
||||||
info!("playback done, error: {}, channel: {}, timestamp: {}", status.error, status.channel, status.timestamp);
|
|
||||||
router.route(drtioaux::Packet::DmaPlaybackStatus {
|
|
||||||
source: destination, destination: status.source, id: status.id,
|
|
||||||
error: status.error, channel: status.channel, timestamp: status.timestamp
|
|
||||||
}, &routing_table, rank, destination);
|
|
||||||
}
|
|
||||||
|
|
||||||
kernelmgr.process_kern_requests(&mut router, &routing_table, rank, destination, &mut dma_manager);
|
|
||||||
|
|
||||||
#[cfg(has_drtio_routing)]
|
|
||||||
if let Some((repno, packet)) = router.get_downstream_packet() {
|
|
||||||
if let Err(e) = repeaters[repno].aux_send(&packet) {
|
|
||||||
warn!("[REP#{}] Error when sending packet to satellite ({:?})", repno, e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if router.any_upstream_waiting() {
|
|
||||||
drtiosat_async_ready();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
drtiosat_reset_phy(true);
|
drtiosat_reset_phy(true);
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use board_artiq::{drtioaux, drtio_routing};
|
use board_artiq::{drtioaux, drtio_routing};
|
||||||
#[cfg(has_drtio_routing)]
|
#[cfg(has_drtio_routing)]
|
||||||
use board_misoc::{csr, clock};
|
use board_misoc::{csr, clock};
|
||||||
use routing::Router;
|
|
||||||
|
|
||||||
#[cfg(has_drtio_routing)]
|
#[cfg(has_drtio_routing)]
|
||||||
fn rep_link_rx_up(repno: u8) -> bool {
|
fn rep_link_rx_up(repno: u8) -> bool {
|
||||||
|
@ -49,7 +48,7 @@ impl Repeater {
|
||||||
self.state == RepeaterState::Up
|
self.state == RepeaterState::Up
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn service(&mut self, routing_table: &drtio_routing::RoutingTable, rank: u8, destination: u8, router: &mut Router) {
|
pub fn service(&mut self, routing_table: &drtio_routing::RoutingTable, rank: u8) {
|
||||||
self.process_local_errors();
|
self.process_local_errors();
|
||||||
|
|
||||||
match self.state {
|
match self.state {
|
||||||
|
@ -112,11 +111,6 @@ impl Repeater {
|
||||||
info!("[REP#{}] link is down", self.repno);
|
info!("[REP#{}] link is down", self.repno);
|
||||||
self.state = RepeaterState::Down;
|
self.state = RepeaterState::Down;
|
||||||
}
|
}
|
||||||
if self.async_messages_ready() {
|
|
||||||
if let Err(e) = self.handle_async(routing_table, rank, destination, router) {
|
|
||||||
warn!("[REP#{}] Error handling async messages ({})", self.repno, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
RepeaterState::Failed => {
|
RepeaterState::Failed => {
|
||||||
if !rep_link_rx_up(self.repno) {
|
if !rep_link_rx_up(self.repno) {
|
||||||
|
@ -185,40 +179,14 @@ impl Repeater {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn async_messages_ready(&self) -> bool {
|
|
||||||
let async_rdy;
|
|
||||||
unsafe {
|
|
||||||
async_rdy = (csr::DRTIOREP[self.repno as usize].async_messages_ready_read)();
|
|
||||||
(csr::DRTIOREP[self.repno as usize].async_messages_ready_write)(0);
|
|
||||||
}
|
|
||||||
async_rdy == 1
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handle_async(&self, routing_table: &drtio_routing::RoutingTable, rank: u8, self_destination: u8, router: &mut Router
|
|
||||||
) -> Result<(), drtioaux::Error<!>> {
|
|
||||||
loop {
|
|
||||||
drtioaux::send(self.auxno, &drtioaux::Packet::RoutingRetrievePackets).unwrap();
|
|
||||||
let reply = self.recv_aux_timeout(200)?;
|
|
||||||
match reply {
|
|
||||||
drtioaux::Packet::RoutingNoPackets => break,
|
|
||||||
packet => router.route(packet, routing_table, rank, self_destination)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn aux_forward(&self, request: &drtioaux::Packet) -> Result<(), drtioaux::Error<!>> {
|
pub fn aux_forward(&self, request: &drtioaux::Packet) -> Result<(), drtioaux::Error<!>> {
|
||||||
self.aux_send(request)?;
|
|
||||||
let reply = self.recv_aux_timeout(200)?;
|
|
||||||
drtioaux::send(0, &reply).unwrap();
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn aux_send(&self, request: &drtioaux::Packet) -> Result<(), drtioaux::Error<!>> {
|
|
||||||
if self.state != RepeaterState::Up {
|
if self.state != RepeaterState::Up {
|
||||||
return Err(drtioaux::Error::LinkDown);
|
return Err(drtioaux::Error::LinkDown);
|
||||||
}
|
}
|
||||||
drtioaux::send(self.auxno, request)
|
drtioaux::send(self.auxno, request).unwrap();
|
||||||
|
let reply = self.recv_aux_timeout(200)?;
|
||||||
|
drtioaux::send(0, &reply).unwrap();
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sync_tsc(&self) -> Result<(), drtioaux::Error<!>> {
|
pub fn sync_tsc(&self) -> Result<(), drtioaux::Error<!>> {
|
||||||
|
@ -231,6 +199,7 @@ impl Repeater {
|
||||||
(csr::DRTIOREP[repno].set_time_write)(1);
|
(csr::DRTIOREP[repno].set_time_write)(1);
|
||||||
while (csr::DRTIOREP[repno].set_time_read)() == 1 {}
|
while (csr::DRTIOREP[repno].set_time_read)() == 1 {}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TSCAck is the only aux packet that is sent spontaneously
|
// TSCAck is the only aux packet that is sent spontaneously
|
||||||
// by the satellite, in response to a TSC set on the RT link.
|
// by the satellite, in response to a TSC set on the RT link.
|
||||||
let reply = self.recv_aux_timeout(10000)?;
|
let reply = self.recv_aux_timeout(10000)?;
|
||||||
|
@ -306,7 +275,7 @@ pub struct Repeater {
|
||||||
impl Repeater {
|
impl Repeater {
|
||||||
pub fn new(_repno: u8) -> Repeater { Repeater::default() }
|
pub fn new(_repno: u8) -> Repeater { Repeater::default() }
|
||||||
|
|
||||||
pub fn service(&self, _routing_table: &drtio_routing::RoutingTable, _rank: u8, _destination: u8, _router: &mut Router) { }
|
pub fn service(&self, _routing_table: &drtio_routing::RoutingTable, _rank: u8) { }
|
||||||
|
|
||||||
pub fn sync_tsc(&self) -> Result<(), drtioaux::Error<!>> { Ok(()) }
|
pub fn sync_tsc(&self) -> Result<(), drtioaux::Error<!>> { Ok(()) }
|
||||||
|
|
||||||
|
|
|
@ -1,184 +0,0 @@
|
||||||
use alloc::{vec::Vec, collections::vec_deque::VecDeque};
|
|
||||||
use board_artiq::{drtioaux, drtio_routing};
|
|
||||||
#[cfg(has_drtio_routing)]
|
|
||||||
use board_misoc::csr;
|
|
||||||
use core::cmp::min;
|
|
||||||
use proto_artiq::drtioaux_proto::PayloadStatus;
|
|
||||||
use SAT_PAYLOAD_MAX_SIZE;
|
|
||||||
use MASTER_PAYLOAD_MAX_SIZE;
|
|
||||||
|
|
||||||
/* represents data that has to be sent with the aux protocol */
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Sliceable {
|
|
||||||
it: usize,
|
|
||||||
data: Vec<u8>,
|
|
||||||
destination: u8
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SliceMeta {
|
|
||||||
pub destination: u8,
|
|
||||||
pub len: u16,
|
|
||||||
pub status: PayloadStatus
|
|
||||||
}
|
|
||||||
|
|
||||||
macro_rules! get_slice_fn {
|
|
||||||
( $name:tt, $size:expr ) => {
|
|
||||||
pub fn $name(&mut self, data_slice: &mut [u8; $size]) -> SliceMeta {
|
|
||||||
let first = self.it == 0;
|
|
||||||
let len = min($size, self.data.len() - self.it);
|
|
||||||
let last = self.it + len == self.data.len();
|
|
||||||
let status = PayloadStatus::from_status(first, last);
|
|
||||||
data_slice[..len].clone_from_slice(&self.data[self.it..self.it+len]);
|
|
||||||
self.it += len;
|
|
||||||
|
|
||||||
SliceMeta {
|
|
||||||
destination: self.destination,
|
|
||||||
len: len as u16,
|
|
||||||
status: status
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Sliceable {
|
|
||||||
pub fn new(destination: u8, data: Vec<u8>) -> Sliceable {
|
|
||||||
Sliceable {
|
|
||||||
it: 0,
|
|
||||||
data: data,
|
|
||||||
destination: destination
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn at_end(&self) -> bool {
|
|
||||||
self.it == self.data.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extend(&mut self, data: &[u8]) {
|
|
||||||
self.data.extend(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
get_slice_fn!(get_slice_sat, SAT_PAYLOAD_MAX_SIZE);
|
|
||||||
get_slice_fn!(get_slice_master, MASTER_PAYLOAD_MAX_SIZE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Packets from downstream (further satellites) are received and routed appropriately.
|
|
||||||
// they're passed as soon as possible downstream (within the subtree), or sent upstream,
|
|
||||||
// which is notified about pending packets.
|
|
||||||
// for rank 1 (connected to master) satellites, these packets are passed as an answer to DestinationStatusRequest;
|
|
||||||
// for higher ranks, after getting a notification, it will transact with downstream to get the pending packets.
|
|
||||||
|
|
||||||
// forward! macro is not deprecated, as routable packets are only these that can originate
|
|
||||||
// from both master and satellite, e.g. DDMA and Subkernel.
|
|
||||||
|
|
||||||
pub struct Router {
|
|
||||||
upstream_queue: VecDeque<drtioaux::Packet>,
|
|
||||||
local_queue: VecDeque<drtioaux::Packet>,
|
|
||||||
#[cfg(has_drtio_routing)]
|
|
||||||
downstream_queue: VecDeque<(usize, drtioaux::Packet)>,
|
|
||||||
upstream_notified: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Router {
|
|
||||||
pub fn new() -> Router {
|
|
||||||
Router {
|
|
||||||
upstream_queue: VecDeque::new(),
|
|
||||||
local_queue: VecDeque::new(),
|
|
||||||
#[cfg(has_drtio_routing)]
|
|
||||||
downstream_queue: VecDeque::new(),
|
|
||||||
upstream_notified: false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// called by local sources (DDMA, kernel) and by repeaters on receiving async data
|
|
||||||
// messages are always buffered for both upstream and downstream
|
|
||||||
pub fn route(&mut self, packet: drtioaux::Packet,
|
|
||||||
_routing_table: &drtio_routing::RoutingTable, _rank: u8,
|
|
||||||
self_destination: u8
|
|
||||||
) {
|
|
||||||
let destination = packet.routable_destination();
|
|
||||||
#[cfg(has_drtio_routing)]
|
|
||||||
{
|
|
||||||
if let Some(destination) = destination {
|
|
||||||
let hop = _routing_table.0[destination as usize][_rank as usize] as usize;
|
|
||||||
if destination == self_destination {
|
|
||||||
self.local_queue.push_back(packet);
|
|
||||||
} else if hop > 0 && hop < csr::DRTIOREP.len() {
|
|
||||||
let repno = (hop - 1) as usize;
|
|
||||||
self.downstream_queue.push_back((repno, packet));
|
|
||||||
} else {
|
|
||||||
self.upstream_queue.push_back(packet);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
error!("Received an unroutable packet: {:?}", packet);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[cfg(not(has_drtio_routing))]
|
|
||||||
{
|
|
||||||
if destination == Some(self_destination) {
|
|
||||||
self.local_queue.push_back(packet);
|
|
||||||
} else {
|
|
||||||
self.upstream_queue.push_back(packet);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sends a packet to a required destination, routing if it's necessary
|
|
||||||
pub fn send(&mut self, packet: drtioaux::Packet,
|
|
||||||
_routing_table: &drtio_routing::RoutingTable,
|
|
||||||
_rank: u8, _destination: u8
|
|
||||||
) -> Result<(), drtioaux::Error<!>> {
|
|
||||||
#[cfg(has_drtio_routing)]
|
|
||||||
{
|
|
||||||
let destination = packet.routable_destination();
|
|
||||||
if let Some(destination) = destination {
|
|
||||||
let hop = _routing_table.0[destination as usize][_rank as usize] as usize;
|
|
||||||
if destination == 0 {
|
|
||||||
// response is needed immediately if master required it
|
|
||||||
drtioaux::send(0, &packet)?;
|
|
||||||
} else if !(hop > 0 && hop < csr::DRTIOREP.len()) {
|
|
||||||
// higher rank can wait
|
|
||||||
self.upstream_queue.push_back(packet);
|
|
||||||
} else {
|
|
||||||
let repno = (hop - 1) as usize;
|
|
||||||
// transaction will occur at closest possible opportunity
|
|
||||||
self.downstream_queue.push_back((repno, packet));
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
// packet not supported in routing, fallback - sent directly
|
|
||||||
drtioaux::send(0, &packet)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[cfg(not(has_drtio_routing))]
|
|
||||||
{
|
|
||||||
drtioaux::send(0, &packet)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn any_upstream_waiting(&mut self) -> bool {
|
|
||||||
let empty = self.upstream_queue.is_empty();
|
|
||||||
if !empty && !self.upstream_notified {
|
|
||||||
self.upstream_notified = true; // so upstream will not get spammed with notifications
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_upstream_packet(&mut self) -> Option<drtioaux::Packet> {
|
|
||||||
let packet = self.upstream_queue.pop_front();
|
|
||||||
if packet.is_none() {
|
|
||||||
self.upstream_notified = false;
|
|
||||||
}
|
|
||||||
packet
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(has_drtio_routing)]
|
|
||||||
pub fn get_downstream_packet(&mut self) -> Option<(usize, drtioaux::Packet)> {
|
|
||||||
self.downstream_queue.pop_front()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_local_packet(&mut self) -> Option<drtioaux::Packet> {
|
|
||||||
self.local_queue.pop_front()
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -9,7 +9,7 @@ from sipyco.asyncio_tools import AsyncioServer, SignalHandler, atexit_register_c
|
||||||
from sipyco.pc_rpc import Server
|
from sipyco.pc_rpc import Server
|
||||||
from sipyco import common_args
|
from sipyco import common_args
|
||||||
|
|
||||||
from artiq.coredevice.comm_analyzer import get_analyzer_dump, ANALYZER_MAGIC
|
from artiq.coredevice.comm_analyzer import get_analyzer_dump
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -24,7 +24,6 @@ class ProxyServer(AsyncioServer):
|
||||||
|
|
||||||
async def _handle_connection_cr(self, reader, writer):
|
async def _handle_connection_cr(self, reader, writer):
|
||||||
try:
|
try:
|
||||||
writer.write(ANALYZER_MAGIC)
|
|
||||||
queue = asyncio.Queue(self._queue_limit)
|
queue = asyncio.Queue(self._queue_limit)
|
||||||
self._recipients.add(queue)
|
self._recipients.add(queue)
|
||||||
try:
|
try:
|
||||||
|
@ -61,7 +60,9 @@ class ProxyControl:
|
||||||
self.distribute_cb(dump)
|
self.distribute_cb(dump)
|
||||||
except:
|
except:
|
||||||
logger.warning("Trigger failed:", exc_info=True)
|
logger.warning("Trigger failed:", exc_info=True)
|
||||||
raise
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def get_argparser():
|
def get_argparser():
|
||||||
|
|
|
@ -22,7 +22,6 @@ from sipyco.pc_rpc import Client
|
||||||
from sipyco.sync_struct import Subscriber
|
from sipyco.sync_struct import Subscriber
|
||||||
from sipyco.broadcast import Receiver
|
from sipyco.broadcast import Receiver
|
||||||
from sipyco import common_args, pyon
|
from sipyco import common_args, pyon
|
||||||
from sipyco.asyncio_tools import SignalHandler
|
|
||||||
|
|
||||||
from artiq.tools import (scale_from_metadata, short_format, parse_arguments,
|
from artiq.tools import (scale_from_metadata, short_format, parse_arguments,
|
||||||
parse_devarg_override)
|
parse_devarg_override)
|
||||||
|
@ -113,25 +112,11 @@ def get_argparser():
|
||||||
"del-dataset", help="delete a dataset")
|
"del-dataset", help="delete a dataset")
|
||||||
parser_del_dataset.add_argument("name", help="name of the dataset")
|
parser_del_dataset.add_argument("name", help="name of the dataset")
|
||||||
|
|
||||||
parser_supply_interactive = subparsers.add_parser(
|
|
||||||
"supply-interactive", help="supply interactive arguments")
|
|
||||||
parser_supply_interactive.add_argument(
|
|
||||||
"rid", metavar="RID", type=int, help="RID of target experiment")
|
|
||||||
parser_supply_interactive.add_argument(
|
|
||||||
"arguments", metavar="ARGUMENTS", nargs="*",
|
|
||||||
help="interactive arguments")
|
|
||||||
|
|
||||||
parser_cancel_interactive = subparsers.add_parser(
|
|
||||||
"cancel-interactive", help="cancel interactive arguments")
|
|
||||||
parser_cancel_interactive.add_argument(
|
|
||||||
"rid", metavar="RID", type=int, help="RID of target experiment")
|
|
||||||
|
|
||||||
parser_show = subparsers.add_parser(
|
parser_show = subparsers.add_parser(
|
||||||
"show", help="show schedule, log, devices or datasets")
|
"show", help="show schedule, log, devices or datasets")
|
||||||
parser_show.add_argument(
|
parser_show.add_argument(
|
||||||
"what", metavar="WHAT",
|
"what", metavar="WHAT",
|
||||||
choices=["schedule", "log", "ccb", "devices", "datasets",
|
choices=["schedule", "log", "ccb", "devices", "datasets"],
|
||||||
"interactive-args"],
|
|
||||||
help="select object to show: %(choices)s")
|
help="select object to show: %(choices)s")
|
||||||
|
|
||||||
subparsers.add_parser(
|
subparsers.add_parser(
|
||||||
|
@ -150,7 +135,8 @@ def get_argparser():
|
||||||
"ls", help="list a directory on the master")
|
"ls", help="list a directory on the master")
|
||||||
parser_ls.add_argument("directory", default="", nargs="?")
|
parser_ls.add_argument("directory", default="", nargs="?")
|
||||||
|
|
||||||
subparsers.add_parser("terminate", help="terminate the ARTIQ master")
|
subparsers.add_parser(
|
||||||
|
"terminate", help="terminate the ARTIQ master")
|
||||||
|
|
||||||
common_args.verbosity_args(parser)
|
common_args.verbosity_args(parser)
|
||||||
return parser
|
return parser
|
||||||
|
@ -222,15 +208,6 @@ def _action_scan_devices(remote, args):
|
||||||
remote.scan()
|
remote.scan()
|
||||||
|
|
||||||
|
|
||||||
def _action_supply_interactive(remote, args):
|
|
||||||
arguments = parse_arguments(args.arguments)
|
|
||||||
remote.supply(args.rid, arguments)
|
|
||||||
|
|
||||||
|
|
||||||
def _action_cancel_interactive(remote, args):
|
|
||||||
remote.cancel(args.rid)
|
|
||||||
|
|
||||||
|
|
||||||
def _action_scan_repository(remote, args):
|
def _action_scan_repository(remote, args):
|
||||||
if getattr(args, "async"):
|
if getattr(args, "async"):
|
||||||
remote.scan_repository_async(args.revision)
|
remote.scan_repository_async(args.revision)
|
||||||
|
@ -297,34 +274,17 @@ def _show_datasets(datasets):
|
||||||
print(table)
|
print(table)
|
||||||
|
|
||||||
|
|
||||||
def _show_interactive_args(interactive_args):
|
|
||||||
clear_screen()
|
|
||||||
table = PrettyTable(["RID", "Title", "Key", "Type", "Group", "Tooltip"])
|
|
||||||
for rid, input_request in sorted(interactive_args.items(), key=itemgetter(0)):
|
|
||||||
title = input_request["title"]
|
|
||||||
for key, procdesc, group, tooltip in input_request["arglist_desc"]:
|
|
||||||
table.add_row([rid, title, key, procdesc["ty"], group, tooltip])
|
|
||||||
print(table)
|
|
||||||
|
|
||||||
|
|
||||||
def _run_subscriber(host, port, subscriber):
|
def _run_subscriber(host, port, subscriber):
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
try:
|
try:
|
||||||
signal_handler = SignalHandler()
|
loop.run_until_complete(subscriber.connect(host, port))
|
||||||
signal_handler.setup()
|
|
||||||
try:
|
try:
|
||||||
loop.run_until_complete(subscriber.connect(host, port))
|
loop.run_until_complete(asyncio.wait_for(subscriber.receive_task,
|
||||||
try:
|
None))
|
||||||
_, pending = loop.run_until_complete(asyncio.wait(
|
print("Connection to master lost")
|
||||||
[loop.create_task(signal_handler.wait_terminate()), subscriber.receive_task],
|
|
||||||
return_when=asyncio.FIRST_COMPLETED))
|
|
||||||
for task in pending:
|
|
||||||
task.cancel()
|
|
||||||
finally:
|
|
||||||
loop.run_until_complete(subscriber.close())
|
|
||||||
finally:
|
finally:
|
||||||
signal_handler.teardown()
|
loop.run_until_complete(subscriber.close())
|
||||||
finally:
|
finally:
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|
||||||
|
@ -378,22 +338,18 @@ def main():
|
||||||
_show_dict(args, "devices", _show_devices)
|
_show_dict(args, "devices", _show_devices)
|
||||||
elif args.what == "datasets":
|
elif args.what == "datasets":
|
||||||
_show_dict(args, "datasets", _show_datasets)
|
_show_dict(args, "datasets", _show_datasets)
|
||||||
elif args.what == "interactive-args":
|
|
||||||
_show_dict(args, "interactive_args", _show_interactive_args)
|
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
else:
|
else:
|
||||||
port = 3251 if args.port is None else args.port
|
port = 3251 if args.port is None else args.port
|
||||||
target_name = {
|
target_name = {
|
||||||
"submit": "schedule",
|
"submit": "master_schedule",
|
||||||
"delete": "schedule",
|
"delete": "master_schedule",
|
||||||
"set_dataset": "dataset_db",
|
"set_dataset": "master_dataset_db",
|
||||||
"del_dataset": "dataset_db",
|
"del_dataset": "master_dataset_db",
|
||||||
"scan_devices": "device_db",
|
"scan_devices": "master_device_db",
|
||||||
"supply_interactive": "interactive_arg_db",
|
"scan_repository": "master_experiment_db",
|
||||||
"cancel_interactive": "interactive_arg_db",
|
"ls": "master_experiment_db",
|
||||||
"scan_repository": "experiment_db",
|
|
||||||
"ls": "experiment_db",
|
|
||||||
"terminate": "master_management",
|
"terminate": "master_management",
|
||||||
}[action]
|
}[action]
|
||||||
remote = Client(args.server, port, target_name)
|
remote = Client(args.server, port, target_name)
|
||||||
|
|
|
@ -67,21 +67,12 @@ def main():
|
||||||
core.compile(exp.run, [exp_inst], {},
|
core.compile(exp.run, [exp_inst], {},
|
||||||
attribute_writeback=False, print_as_rpc=False)
|
attribute_writeback=False, print_as_rpc=False)
|
||||||
|
|
||||||
subkernels = object_map.subkernels()
|
subkernels = {}
|
||||||
compiled_subkernels = {}
|
for sid, subkernel_fn in object_map.subkernels().items():
|
||||||
while True:
|
destination, subkernel_library = core.compile_subkernel(
|
||||||
new_subkernels = {}
|
sid, subkernel_fn, object_map,
|
||||||
for sid, subkernel_fn in subkernels.items():
|
[exp_inst], subkernel_arg_types)
|
||||||
if sid in compiled_subkernels.keys():
|
subkernels[sid] = (destination, subkernel_library)
|
||||||
continue
|
|
||||||
destination, subkernel_library, embedding_map = core.compile_subkernel(
|
|
||||||
sid, subkernel_fn, object_map,
|
|
||||||
[exp_inst], subkernel_arg_types, subkernels)
|
|
||||||
compiled_subkernels[sid] = (destination, subkernel_library)
|
|
||||||
new_subkernels.update(embedding_map.subkernels())
|
|
||||||
if new_subkernels == subkernels:
|
|
||||||
break
|
|
||||||
subkernels.update(new_subkernels)
|
|
||||||
except CompileError as error:
|
except CompileError as error:
|
||||||
return
|
return
|
||||||
finally:
|
finally:
|
||||||
|
@ -116,7 +107,7 @@ def main():
|
||||||
tar.addfile(main_kernel_info, fileobj=main_kernel_fileobj)
|
tar.addfile(main_kernel_info, fileobj=main_kernel_fileobj)
|
||||||
|
|
||||||
# subkernels as "<sid> <destination>.elf"
|
# subkernels as "<sid> <destination>.elf"
|
||||||
for sid, (destination, subkernel_library) in compiled_subkernels.items():
|
for sid, (destination, subkernel_library) in subkernels.items():
|
||||||
subkernel_fileobj = io.BytesIO(subkernel_library)
|
subkernel_fileobj = io.BytesIO(subkernel_library)
|
||||||
subkernel_info = tarfile.TarInfo(name="{} {}.elf".format(sid, destination))
|
subkernel_info = tarfile.TarInfo(name="{} {}.elf".format(sid, destination))
|
||||||
subkernel_info.size = len(subkernel_library)
|
subkernel_info.size = len(subkernel_library)
|
||||||
|
|
|
@ -6,6 +6,7 @@ import atexit
|
||||||
import importlib
|
import importlib
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
from qasync import QEventLoop
|
from qasync import QEventLoop
|
||||||
|
@ -14,15 +15,13 @@ from sipyco.pc_rpc import AsyncioClient, Client
|
||||||
from sipyco.broadcast import Receiver
|
from sipyco.broadcast import Receiver
|
||||||
from sipyco import common_args
|
from sipyco import common_args
|
||||||
from sipyco.asyncio_tools import atexit_register_coroutine
|
from sipyco.asyncio_tools import atexit_register_coroutine
|
||||||
from sipyco.sync_struct import Subscriber
|
|
||||||
|
|
||||||
from artiq import __artiq_dir__ as artiq_dir, __version__ as artiq_version
|
from artiq import __artiq_dir__ as artiq_dir, __version__ as artiq_version
|
||||||
from artiq.tools import get_user_config_dir
|
from artiq.tools import get_user_config_dir
|
||||||
from artiq.gui.models import ModelSubscriber
|
from artiq.gui.models import ModelSubscriber
|
||||||
from artiq.gui import state, log
|
from artiq.gui import state, log
|
||||||
from artiq.dashboard import (experiments, shortcuts, explorer,
|
from artiq.dashboard import (experiments, shortcuts, explorer,
|
||||||
moninj, datasets, schedule, applets_ccb,
|
moninj, datasets, schedule, applets_ccb)
|
||||||
waveform, interactive_args)
|
|
||||||
|
|
||||||
|
|
||||||
def get_argparser():
|
def get_argparser():
|
||||||
|
@ -48,15 +47,6 @@ def get_argparser():
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-p", "--load-plugin", dest="plugin_modules", action="append",
|
"-p", "--load-plugin", dest="plugin_modules", action="append",
|
||||||
help="Python module to load on startup")
|
help="Python module to load on startup")
|
||||||
parser.add_argument(
|
|
||||||
"--analyzer-proxy-timeout", default=5, type=float,
|
|
||||||
help="connection timeout to core analyzer proxy")
|
|
||||||
parser.add_argument(
|
|
||||||
"--analyzer-proxy-timer", default=5, type=float,
|
|
||||||
help="retry timer to core analyzer proxy")
|
|
||||||
parser.add_argument(
|
|
||||||
"--analyzer-proxy-timer-backoff", default=1.1, type=float,
|
|
||||||
help="retry timer backoff multiplier to core analyzer proxy")
|
|
||||||
common_args.verbosity_args(parser)
|
common_args.verbosity_args(parser)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
@ -70,7 +60,7 @@ class MainWindow(QtWidgets.QMainWindow):
|
||||||
self.setWindowTitle("ARTIQ Dashboard - {}".format(server))
|
self.setWindowTitle("ARTIQ Dashboard - {}".format(server))
|
||||||
|
|
||||||
qfm = QtGui.QFontMetrics(self.font())
|
qfm = QtGui.QFontMetrics(self.font())
|
||||||
self.resize(140 * qfm.averageCharWidth(), 38 * qfm.lineSpacing())
|
self.resize(140*qfm.averageCharWidth(), 38*qfm.lineSpacing())
|
||||||
|
|
||||||
self.exit_request = asyncio.Event()
|
self.exit_request = asyncio.Event()
|
||||||
|
|
||||||
|
@ -110,8 +100,8 @@ class MdiArea(QtWidgets.QMdiArea):
|
||||||
def paintEvent(self, event):
|
def paintEvent(self, event):
|
||||||
QtWidgets.QMdiArea.paintEvent(self, event)
|
QtWidgets.QMdiArea.paintEvent(self, event)
|
||||||
painter = QtGui.QPainter(self.viewport())
|
painter = QtGui.QPainter(self.viewport())
|
||||||
x = (self.width() - self.pixmap.width()) // 2
|
x = (self.width() - self.pixmap.width())//2
|
||||||
y = (self.height() - self.pixmap.height()) // 2
|
y = (self.height() - self.pixmap.height())//2
|
||||||
painter.setOpacity(0.5)
|
painter.setOpacity(0.5)
|
||||||
painter.drawPixmap(x, y, self.pixmap)
|
painter.drawPixmap(x, y, self.pixmap)
|
||||||
|
|
||||||
|
@ -128,9 +118,9 @@ def main():
|
||||||
|
|
||||||
if args.db_file is None:
|
if args.db_file is None:
|
||||||
args.db_file = os.path.join(get_user_config_dir(),
|
args.db_file = os.path.join(get_user_config_dir(),
|
||||||
"artiq_dashboard_{server}_{port}.pyon".format(
|
"artiq_dashboard_{server}_{port}.pyon".format(
|
||||||
server=args.server.replace(":", "."),
|
server=args.server.replace(":","."),
|
||||||
port=args.port_notify))
|
port=args.port_notify))
|
||||||
|
|
||||||
app = QtWidgets.QApplication(["ARTIQ Dashboard"])
|
app = QtWidgets.QApplication(["ARTIQ Dashboard"])
|
||||||
loop = QEventLoop(app)
|
loop = QEventLoop(app)
|
||||||
|
@ -140,10 +130,10 @@ def main():
|
||||||
|
|
||||||
# create connections to master
|
# create connections to master
|
||||||
rpc_clients = dict()
|
rpc_clients = dict()
|
||||||
for target in "schedule", "experiment_db", "dataset_db", "device_db", "interactive_arg_db":
|
for target in "schedule", "experiment_db", "dataset_db", "device_db":
|
||||||
client = AsyncioClient()
|
client = AsyncioClient()
|
||||||
loop.run_until_complete(client.connect_rpc(
|
loop.run_until_complete(client.connect_rpc(
|
||||||
args.server, args.port_control, target))
|
args.server, args.port_control, "master_" + target))
|
||||||
atexit.register(client.close_rpc)
|
atexit.register(client.close_rpc)
|
||||||
rpc_clients[target] = client
|
rpc_clients[target] = client
|
||||||
|
|
||||||
|
@ -154,7 +144,6 @@ def main():
|
||||||
master_management.close_rpc()
|
master_management.close_rpc()
|
||||||
|
|
||||||
disconnect_reported = False
|
disconnect_reported = False
|
||||||
|
|
||||||
def report_disconnect():
|
def report_disconnect():
|
||||||
nonlocal disconnect_reported
|
nonlocal disconnect_reported
|
||||||
if not disconnect_reported:
|
if not disconnect_reported:
|
||||||
|
@ -166,9 +155,9 @@ def main():
|
||||||
for notifier_name, modelf in (("explist", explorer.Model),
|
for notifier_name, modelf in (("explist", explorer.Model),
|
||||||
("explist_status", explorer.StatusUpdater),
|
("explist_status", explorer.StatusUpdater),
|
||||||
("datasets", datasets.Model),
|
("datasets", datasets.Model),
|
||||||
("schedule", schedule.Model),
|
("schedule", schedule.Model)):
|
||||||
("interactive_args", interactive_args.Model)):
|
subscriber = ModelSubscriber(notifier_name, modelf,
|
||||||
subscriber = ModelSubscriber(notifier_name, modelf, report_disconnect)
|
report_disconnect)
|
||||||
loop.run_until_complete(subscriber.connect(
|
loop.run_until_complete(subscriber.connect(
|
||||||
args.server, args.port_notify))
|
args.server, args.port_notify))
|
||||||
atexit_register_coroutine(subscriber.close, loop=loop)
|
atexit_register_coroutine(subscriber.close, loop=loop)
|
||||||
|
@ -227,28 +216,9 @@ def main():
|
||||||
broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify)
|
broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify)
|
||||||
|
|
||||||
d_ttl_dds = moninj.MonInj(rpc_clients["schedule"])
|
d_ttl_dds = moninj.MonInj(rpc_clients["schedule"])
|
||||||
|
loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
|
||||||
atexit_register_coroutine(d_ttl_dds.stop, loop=loop)
|
atexit_register_coroutine(d_ttl_dds.stop, loop=loop)
|
||||||
|
|
||||||
d_waveform = waveform.WaveformDock(
|
|
||||||
args.analyzer_proxy_timeout,
|
|
||||||
args.analyzer_proxy_timer,
|
|
||||||
args.analyzer_proxy_timer_backoff
|
|
||||||
)
|
|
||||||
atexit_register_coroutine(d_waveform.stop, loop=loop)
|
|
||||||
|
|
||||||
def init_cbs(ddb):
|
|
||||||
d_ttl_dds.dm.init_ddb(ddb)
|
|
||||||
d_waveform.init_ddb(ddb)
|
|
||||||
return ddb
|
|
||||||
devices_sub = Subscriber("devices", init_cbs, [d_ttl_dds.dm.notify_ddb, d_waveform.notify_ddb])
|
|
||||||
loop.run_until_complete(devices_sub.connect(args.server, args.port_notify))
|
|
||||||
atexit_register_coroutine(devices_sub.close, loop=loop)
|
|
||||||
|
|
||||||
d_interactive_args = interactive_args.InteractiveArgsDock(
|
|
||||||
sub_clients["interactive_args"],
|
|
||||||
rpc_clients["interactive_arg_db"]
|
|
||||||
)
|
|
||||||
|
|
||||||
d_schedule = schedule.ScheduleDock(
|
d_schedule = schedule.ScheduleDock(
|
||||||
rpc_clients["schedule"], sub_clients["schedule"])
|
rpc_clients["schedule"], sub_clients["schedule"])
|
||||||
smgr.register(d_schedule)
|
smgr.register(d_schedule)
|
||||||
|
@ -262,7 +232,7 @@ def main():
|
||||||
right_docks = [
|
right_docks = [
|
||||||
d_explorer, d_shortcuts,
|
d_explorer, d_shortcuts,
|
||||||
d_ttl_dds.ttl_dock, d_ttl_dds.dds_dock, d_ttl_dds.dac_dock,
|
d_ttl_dds.ttl_dock, d_ttl_dds.dds_dock, d_ttl_dds.dac_dock,
|
||||||
d_datasets, d_applets, d_waveform, d_interactive_args
|
d_datasets, d_applets
|
||||||
]
|
]
|
||||||
main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, right_docks[0])
|
main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, right_docks[0])
|
||||||
for d1, d2 in zip(right_docks, right_docks[1:]):
|
for d1, d2 in zip(right_docks, right_docks[1:]):
|
||||||
|
@ -288,6 +258,7 @@ def main():
|
||||||
if d_log0 is not None:
|
if d_log0 is not None:
|
||||||
main_window.tabifyDockWidget(d_schedule, d_log0)
|
main_window.tabifyDockWidget(d_schedule, d_log0)
|
||||||
|
|
||||||
|
|
||||||
if server_name is not None:
|
if server_name is not None:
|
||||||
server_description = server_name + " ({})".format(args.server)
|
server_description = server_name + " ({})".format(args.server)
|
||||||
else:
|
else:
|
||||||
|
@ -298,6 +269,5 @@ def main():
|
||||||
main_window.show()
|
main_window.show()
|
||||||
loop.run_until_complete(main_window.exit_request.wait())
|
loop.run_until_complete(main_window.exit_request.wait())
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -211,11 +211,6 @@ class PeripheralManager:
|
||||||
urukul_name = self.get_name("urukul")
|
urukul_name = self.get_name("urukul")
|
||||||
synchronization = peripheral["synchronization"]
|
synchronization = peripheral["synchronization"]
|
||||||
channel = count(0)
|
channel = count(0)
|
||||||
pll_en = peripheral["pll_en"]
|
|
||||||
clk_div = peripheral.get("clk_div")
|
|
||||||
if clk_div is None:
|
|
||||||
clk_div = 0 if pll_en else 1
|
|
||||||
|
|
||||||
self.gen("""
|
self.gen("""
|
||||||
device_db["eeprom_{name}"] = {{
|
device_db["eeprom_{name}"] = {{
|
||||||
"type": "local",
|
"type": "local",
|
||||||
|
@ -282,7 +277,7 @@ class PeripheralManager:
|
||||||
sync_device="\"ttl_{name}_sync\"".format(name=urukul_name) if synchronization else "None",
|
sync_device="\"ttl_{name}_sync\"".format(name=urukul_name) if synchronization else "None",
|
||||||
refclk=peripheral.get("refclk", self.primary_description["rtio_frequency"]),
|
refclk=peripheral.get("refclk", self.primary_description["rtio_frequency"]),
|
||||||
clk_sel=peripheral["clk_sel"],
|
clk_sel=peripheral["clk_sel"],
|
||||||
clk_div=clk_div)
|
clk_div=peripheral["clk_div"])
|
||||||
dds = peripheral["dds"]
|
dds = peripheral["dds"]
|
||||||
pll_vco = peripheral.get("pll_vco")
|
pll_vco = peripheral.get("pll_vco")
|
||||||
for i in range(4):
|
for i in range(4):
|
||||||
|
@ -304,7 +299,7 @@ class PeripheralManager:
|
||||||
uchn=i,
|
uchn=i,
|
||||||
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
|
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
|
||||||
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
||||||
pll_n=peripheral.get("pll_n", 32), pll_en=pll_en,
|
pll_n=peripheral.get("pll_n", 32), pll_en=peripheral["pll_en"],
|
||||||
sync_delay_seed=",\n \"sync_delay_seed\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "",
|
sync_delay_seed=",\n \"sync_delay_seed\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "",
|
||||||
io_update_delay=",\n \"io_update_delay\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "")
|
io_update_delay=",\n \"io_update_delay\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "")
|
||||||
elif dds == "ad9912":
|
elif dds == "ad9912":
|
||||||
|
@ -325,7 +320,7 @@ class PeripheralManager:
|
||||||
uchn=i,
|
uchn=i,
|
||||||
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
|
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
|
||||||
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
||||||
pll_n=peripheral.get("pll_n", 8), pll_en=pll_en)
|
pll_n=peripheral.get("pll_n", 8), pll_en=peripheral["pll_en"])
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
return next(channel)
|
return next(channel)
|
||||||
|
|
|
@ -15,8 +15,7 @@ from sipyco.asyncio_tools import atexit_register_coroutine, SignalHandler
|
||||||
|
|
||||||
from artiq import __version__ as artiq_version
|
from artiq import __version__ as artiq_version
|
||||||
from artiq.master.log import log_args, init_log
|
from artiq.master.log import log_args, init_log
|
||||||
from artiq.master.databases import (DeviceDB, DatasetDB,
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
InteractiveArgDB)
|
|
||||||
from artiq.master.scheduler import Scheduler
|
from artiq.master.scheduler import Scheduler
|
||||||
from artiq.master.rid_counter import RIDCounter
|
from artiq.master.rid_counter import RIDCounter
|
||||||
from artiq.master.experiments import (FilesystemBackend, GitBackend,
|
from artiq.master.experiments import (FilesystemBackend, GitBackend,
|
||||||
|
@ -58,10 +57,10 @@ def get_argparser():
|
||||||
log_args(parser)
|
log_args(parser)
|
||||||
|
|
||||||
parser.add_argument("--name",
|
parser.add_argument("--name",
|
||||||
help="friendly name, displayed in dashboards "
|
help="friendly name, displayed in dashboards "
|
||||||
"to identify master instead of server address")
|
"to identify master instead of server address")
|
||||||
parser.add_argument("--log-submissions", default=None,
|
parser.add_argument("--log-submissions", default=None,
|
||||||
help="log experiment submissions to specified file")
|
help="set the filename to create the experiment subimission")
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
@ -82,7 +81,8 @@ def main():
|
||||||
bind, args.port_broadcast))
|
bind, args.port_broadcast))
|
||||||
atexit_register_coroutine(server_broadcast.stop, loop=loop)
|
atexit_register_coroutine(server_broadcast.stop, loop=loop)
|
||||||
|
|
||||||
log_forwarder.callback = lambda msg: server_broadcast.broadcast("log", msg)
|
log_forwarder.callback = (lambda msg:
|
||||||
|
server_broadcast.broadcast("log", msg))
|
||||||
def ccb_issue(service, *args, **kwargs):
|
def ccb_issue(service, *args, **kwargs):
|
||||||
msg = {
|
msg = {
|
||||||
"service": service,
|
"service": service,
|
||||||
|
@ -96,7 +96,6 @@ def main():
|
||||||
atexit.register(dataset_db.close_db)
|
atexit.register(dataset_db.close_db)
|
||||||
dataset_db.start(loop=loop)
|
dataset_db.start(loop=loop)
|
||||||
atexit_register_coroutine(dataset_db.stop, loop=loop)
|
atexit_register_coroutine(dataset_db.stop, loop=loop)
|
||||||
interactive_arg_db = InteractiveArgDB()
|
|
||||||
worker_handlers = dict()
|
worker_handlers = dict()
|
||||||
|
|
||||||
if args.git:
|
if args.git:
|
||||||
|
@ -107,21 +106,15 @@ def main():
|
||||||
repo_backend, worker_handlers, args.experiment_subdir)
|
repo_backend, worker_handlers, args.experiment_subdir)
|
||||||
atexit.register(experiment_db.close)
|
atexit.register(experiment_db.close)
|
||||||
|
|
||||||
scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db,
|
scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db, args.log_submissions)
|
||||||
args.log_submissions)
|
|
||||||
scheduler.start(loop=loop)
|
scheduler.start(loop=loop)
|
||||||
atexit_register_coroutine(scheduler.stop, loop=loop)
|
atexit_register_coroutine(scheduler.stop, loop=loop)
|
||||||
|
|
||||||
# Python doesn't allow writing attributes to bound methods.
|
|
||||||
def get_interactive_arguments(*args, **kwargs):
|
|
||||||
return interactive_arg_db.get(*args, **kwargs)
|
|
||||||
get_interactive_arguments._worker_pass_rid = True
|
|
||||||
worker_handlers.update({
|
worker_handlers.update({
|
||||||
"get_device_db": device_db.get_device_db,
|
"get_device_db": device_db.get_device_db,
|
||||||
"get_device": device_db.get,
|
"get_device": device_db.get,
|
||||||
"get_dataset": dataset_db.get,
|
"get_dataset": dataset_db.get,
|
||||||
"update_dataset": dataset_db.update,
|
"update_dataset": dataset_db.update,
|
||||||
"get_interactive_arguments": get_interactive_arguments,
|
|
||||||
"scheduler_submit": scheduler.submit,
|
"scheduler_submit": scheduler.submit,
|
||||||
"scheduler_delete": scheduler.delete,
|
"scheduler_delete": scheduler.delete,
|
||||||
"scheduler_request_termination": scheduler.request_termination,
|
"scheduler_request_termination": scheduler.request_termination,
|
||||||
|
@ -140,11 +133,10 @@ def main():
|
||||||
|
|
||||||
server_control = RPCServer({
|
server_control = RPCServer({
|
||||||
"master_management": master_management,
|
"master_management": master_management,
|
||||||
"device_db": device_db,
|
"master_device_db": device_db,
|
||||||
"dataset_db": dataset_db,
|
"master_dataset_db": dataset_db,
|
||||||
"interactive_arg_db": interactive_arg_db,
|
"master_schedule": scheduler,
|
||||||
"schedule": scheduler,
|
"master_experiment_db": experiment_db,
|
||||||
"experiment_db": experiment_db,
|
|
||||||
}, allow_parallel=True)
|
}, allow_parallel=True)
|
||||||
loop.run_until_complete(server_control.start(
|
loop.run_until_complete(server_control.start(
|
||||||
bind, args.port_control))
|
bind, args.port_control))
|
||||||
|
@ -154,9 +146,8 @@ def main():
|
||||||
"schedule": scheduler.notifier,
|
"schedule": scheduler.notifier,
|
||||||
"devices": device_db.data,
|
"devices": device_db.data,
|
||||||
"datasets": dataset_db.data,
|
"datasets": dataset_db.data,
|
||||||
"interactive_args": interactive_arg_db.pending,
|
|
||||||
"explist": experiment_db.explist,
|
"explist": experiment_db.explist,
|
||||||
"explist_status": experiment_db.status,
|
"explist_status": experiment_db.status
|
||||||
})
|
})
|
||||||
loop.run_until_complete(server_notify.start(
|
loop.run_until_complete(server_notify.start(
|
||||||
bind, args.port_notify))
|
bind, args.port_notify))
|
||||||
|
|
|
@ -13,7 +13,7 @@ import h5py
|
||||||
|
|
||||||
from llvmlite import binding as llvm
|
from llvmlite import binding as llvm
|
||||||
|
|
||||||
from sipyco import common_args, pyon
|
from sipyco import common_args
|
||||||
|
|
||||||
from artiq import __version__ as artiq_version
|
from artiq import __version__ as artiq_version
|
||||||
from artiq.language.environment import EnvExperiment, ProcessArgumentManager
|
from artiq.language.environment import EnvExperiment, ProcessArgumentManager
|
||||||
|
@ -166,30 +166,9 @@ def get_argparser(with_file=True):
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
class ArgumentManager(ProcessArgumentManager):
|
|
||||||
def get_interactive(self, interactive_arglist, title):
|
|
||||||
print(title)
|
|
||||||
result = dict()
|
|
||||||
for key, processor, group, tooltip in interactive_arglist:
|
|
||||||
success = False
|
|
||||||
while not success:
|
|
||||||
user_input = input("{}:{} (group={}, tooltip={}): ".format(
|
|
||||||
key, type(processor).__name__, group, tooltip))
|
|
||||||
try:
|
|
||||||
user_input_deser = pyon.decode(user_input)
|
|
||||||
value = processor.process(user_input_deser)
|
|
||||||
except:
|
|
||||||
logger.error("failed to process user input, retrying",
|
|
||||||
exc_info=True)
|
|
||||||
else:
|
|
||||||
success = True
|
|
||||||
result[key] = value
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def _build_experiment(device_mgr, dataset_mgr, args):
|
def _build_experiment(device_mgr, dataset_mgr, args):
|
||||||
arguments = parse_arguments(args.arguments)
|
arguments = parse_arguments(args.arguments)
|
||||||
argument_mgr = ArgumentManager(arguments)
|
argument_mgr = ProcessArgumentManager(arguments)
|
||||||
managers = (device_mgr, dataset_mgr, argument_mgr, {})
|
managers = (device_mgr, dataset_mgr, argument_mgr, {})
|
||||||
if hasattr(args, "file"):
|
if hasattr(args, "file"):
|
||||||
is_tar = tarfile.is_tarfile(args.file)
|
is_tar = tarfile.is_tarfile(args.file)
|
||||||
|
|
|
@ -78,7 +78,6 @@ class DRTIOSatellite(Module):
|
||||||
self.reset = CSRStorage(reset=1)
|
self.reset = CSRStorage(reset=1)
|
||||||
self.reset_phy = CSRStorage(reset=1)
|
self.reset_phy = CSRStorage(reset=1)
|
||||||
self.tsc_loaded = CSR()
|
self.tsc_loaded = CSR()
|
||||||
self.async_messages_ready = CSR()
|
|
||||||
# master interface in the sys domain
|
# master interface in the sys domain
|
||||||
self.cri = cri.Interface()
|
self.cri = cri.Interface()
|
||||||
self.async_errors = Record(async_errors_layout)
|
self.async_errors = Record(async_errors_layout)
|
||||||
|
@ -130,9 +129,6 @@ class DRTIOSatellite(Module):
|
||||||
link_layer_sync, interface=self.cri)
|
link_layer_sync, interface=self.cri)
|
||||||
self.comb += self.rt_packet.reset.eq(self.cd_rio.rst)
|
self.comb += self.rt_packet.reset.eq(self.cd_rio.rst)
|
||||||
|
|
||||||
self.sync += If(self.async_messages_ready.re, self.rt_packet.async_msg_stb.eq(1))
|
|
||||||
self.comb += self.async_messages_ready.w.eq(self.rt_packet.async_msg_ack)
|
|
||||||
|
|
||||||
self.comb += [
|
self.comb += [
|
||||||
tsc.load.eq(self.rt_packet.tsc_load),
|
tsc.load.eq(self.rt_packet.tsc_load),
|
||||||
tsc.load_value.eq(self.rt_packet.tsc_load_value)
|
tsc.load_value.eq(self.rt_packet.tsc_load_value)
|
||||||
|
@ -140,14 +136,14 @@ class DRTIOSatellite(Module):
|
||||||
|
|
||||||
self.sync += [
|
self.sync += [
|
||||||
If(self.tsc_loaded.re, self.tsc_loaded.w.eq(0)),
|
If(self.tsc_loaded.re, self.tsc_loaded.w.eq(0)),
|
||||||
If(self.rt_packet.tsc_load, self.tsc_loaded.w.eq(1)),
|
If(self.rt_packet.tsc_load, self.tsc_loaded.w.eq(1))
|
||||||
]
|
]
|
||||||
|
|
||||||
self.submodules.rt_errors = rt_errors_satellite.RTErrorsSatellite(
|
self.submodules.rt_errors = rt_errors_satellite.RTErrorsSatellite(
|
||||||
self.rt_packet, tsc, self.async_errors)
|
self.rt_packet, tsc, self.async_errors)
|
||||||
|
|
||||||
def get_csrs(self):
|
def get_csrs(self):
|
||||||
return ([self.reset, self.reset_phy, self.tsc_loaded, self.async_messages_ready] +
|
return ([self.reset, self.reset_phy, self.tsc_loaded] +
|
||||||
self.link_layer.get_csrs() + self.link_stats.get_csrs() +
|
self.link_layer.get_csrs() + self.link_stats.get_csrs() +
|
||||||
self.rt_errors.get_csrs())
|
self.rt_errors.get_csrs())
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,6 @@ class _CSRs(AutoCSR):
|
||||||
|
|
||||||
self.set_time = CSR()
|
self.set_time = CSR()
|
||||||
self.underflow_margin = CSRStorage(16, reset=300)
|
self.underflow_margin = CSRStorage(16, reset=300)
|
||||||
self.async_messages_ready = CSR()
|
|
||||||
|
|
||||||
self.force_destination = CSRStorage()
|
self.force_destination = CSRStorage()
|
||||||
self.destination = CSRStorage(8)
|
self.destination = CSRStorage(8)
|
||||||
|
@ -61,11 +60,6 @@ class RTController(Module):
|
||||||
If(self.csrs.set_time.re, rt_packet.set_time_stb.eq(1))
|
If(self.csrs.set_time.re, rt_packet.set_time_stb.eq(1))
|
||||||
]
|
]
|
||||||
|
|
||||||
self.sync += [
|
|
||||||
If(rt_packet.async_messages_ready, self.csrs.async_messages_ready.w.eq(1)),
|
|
||||||
If(self.csrs.async_messages_ready.re, self.csrs.async_messages_ready.w.eq(0))
|
|
||||||
]
|
|
||||||
|
|
||||||
# chan_sel forcing
|
# chan_sel forcing
|
||||||
chan_sel = Signal(24)
|
chan_sel = Signal(24)
|
||||||
self.comb += chan_sel.eq(Mux(self.csrs.force_destination.storage,
|
self.comb += chan_sel.eq(Mux(self.csrs.force_destination.storage,
|
||||||
|
|
|
@ -14,7 +14,6 @@ class RTController(Module, AutoCSR):
|
||||||
self.command_missed_cmd = CSRStatus(2)
|
self.command_missed_cmd = CSRStatus(2)
|
||||||
self.command_missed_chan_sel = CSRStatus(24)
|
self.command_missed_chan_sel = CSRStatus(24)
|
||||||
self.buffer_space_timeout_dest = CSRStatus(8)
|
self.buffer_space_timeout_dest = CSRStatus(8)
|
||||||
self.async_messages_ready = CSR()
|
|
||||||
|
|
||||||
self.sync += rt_packet.reset.eq(self.reset.storage)
|
self.sync += rt_packet.reset.eq(self.reset.storage)
|
||||||
|
|
||||||
|
@ -24,12 +23,6 @@ class RTController(Module, AutoCSR):
|
||||||
]
|
]
|
||||||
self.comb += self.set_time.w.eq(rt_packet.set_time_stb)
|
self.comb += self.set_time.w.eq(rt_packet.set_time_stb)
|
||||||
|
|
||||||
self.sync += [
|
|
||||||
If(rt_packet.async_messages_ready, self.async_messages_ready.w.eq(1)),
|
|
||||||
If(self.async_messages_ready.re, self.async_messages_ready.w.eq(0))
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
errors = [
|
errors = [
|
||||||
(rt_packet.err_unknown_packet_type, "rtio_rx", None, None),
|
(rt_packet.err_unknown_packet_type, "rtio_rx", None, None),
|
||||||
(rt_packet.err_packet_truncated, "rtio_rx", None, None),
|
(rt_packet.err_packet_truncated, "rtio_rx", None, None),
|
||||||
|
|
|
@ -61,9 +61,6 @@ class RTPacketMaster(Module):
|
||||||
# a set_time request pending
|
# a set_time request pending
|
||||||
self.tsc_value = Signal(64)
|
self.tsc_value = Signal(64)
|
||||||
|
|
||||||
# async aux messages interface, only received
|
|
||||||
self.async_messages_ready = Signal()
|
|
||||||
|
|
||||||
# rx errors
|
# rx errors
|
||||||
self.err_unknown_packet_type = Signal()
|
self.err_unknown_packet_type = Signal()
|
||||||
self.err_packet_truncated = Signal()
|
self.err_packet_truncated = Signal()
|
||||||
|
@ -286,16 +283,12 @@ class RTPacketMaster(Module):
|
||||||
echo_received_now = Signal()
|
echo_received_now = Signal()
|
||||||
self.sync.rtio_rx += self.echo_received_now.eq(echo_received_now)
|
self.sync.rtio_rx += self.echo_received_now.eq(echo_received_now)
|
||||||
|
|
||||||
async_messages_ready = Signal()
|
|
||||||
self.sync.rtio_rx += self.async_messages_ready.eq(async_messages_ready)
|
|
||||||
|
|
||||||
rx_fsm.act("INPUT",
|
rx_fsm.act("INPUT",
|
||||||
If(rx_dp.frame_r,
|
If(rx_dp.frame_r,
|
||||||
rx_dp.packet_buffer_load.eq(1),
|
rx_dp.packet_buffer_load.eq(1),
|
||||||
If(rx_dp.packet_last,
|
If(rx_dp.packet_last,
|
||||||
Case(rx_dp.packet_type, {
|
Case(rx_dp.packet_type, {
|
||||||
rx_plm.types["echo_reply"]: echo_received_now.eq(1),
|
rx_plm.types["echo_reply"]: echo_received_now.eq(1),
|
||||||
rx_plm.types["async_messages_ready"]: async_messages_ready.eq(1),
|
|
||||||
rx_plm.types["buffer_space_reply"]: NextState("BUFFER_SPACE"),
|
rx_plm.types["buffer_space_reply"]: NextState("BUFFER_SPACE"),
|
||||||
rx_plm.types["read_reply"]: NextState("READ_REPLY"),
|
rx_plm.types["read_reply"]: NextState("READ_REPLY"),
|
||||||
rx_plm.types["read_reply_noevent"]: NextState("READ_REPLY_NOEVENT"),
|
rx_plm.types["read_reply_noevent"]: NextState("READ_REPLY_NOEVENT"),
|
||||||
|
|
|
@ -19,7 +19,6 @@ class RTPacketRepeater(Module):
|
||||||
# in rtio_rx domain
|
# in rtio_rx domain
|
||||||
self.err_unknown_packet_type = Signal()
|
self.err_unknown_packet_type = Signal()
|
||||||
self.err_packet_truncated = Signal()
|
self.err_packet_truncated = Signal()
|
||||||
self.async_messages_ready = Signal()
|
|
||||||
|
|
||||||
# in rtio domain
|
# in rtio domain
|
||||||
self.err_command_missed = Signal()
|
self.err_command_missed = Signal()
|
||||||
|
@ -305,7 +304,6 @@ class RTPacketRepeater(Module):
|
||||||
rx_dp.packet_buffer_load.eq(1),
|
rx_dp.packet_buffer_load.eq(1),
|
||||||
If(rx_dp.packet_last,
|
If(rx_dp.packet_last,
|
||||||
Case(rx_dp.packet_type, {
|
Case(rx_dp.packet_type, {
|
||||||
rx_plm.types["async_messages_ready"]: self.async_messages_ready.eq(1),
|
|
||||||
rx_plm.types["buffer_space_reply"]: NextState("BUFFER_SPACE"),
|
rx_plm.types["buffer_space_reply"]: NextState("BUFFER_SPACE"),
|
||||||
rx_plm.types["read_reply"]: NextState("READ_REPLY"),
|
rx_plm.types["read_reply"]: NextState("READ_REPLY"),
|
||||||
rx_plm.types["read_reply_noevent"]: NextState("READ_REPLY_NOEVENT"),
|
rx_plm.types["read_reply_noevent"]: NextState("READ_REPLY_NOEVENT"),
|
||||||
|
@ -333,4 +331,4 @@ class RTPacketRepeater(Module):
|
||||||
read_not.eq(1),
|
read_not.eq(1),
|
||||||
read_no_event.eq(1),
|
read_no_event.eq(1),
|
||||||
NextState("INPUT")
|
NextState("INPUT")
|
||||||
)
|
)
|
||||||
|
|
|
@ -19,9 +19,6 @@ class RTPacketSatellite(Module):
|
||||||
self.tsc_load = Signal()
|
self.tsc_load = Signal()
|
||||||
self.tsc_load_value = Signal(64)
|
self.tsc_load_value = Signal(64)
|
||||||
|
|
||||||
self.async_msg_stb = Signal()
|
|
||||||
self.async_msg_ack = Signal()
|
|
||||||
|
|
||||||
if interface is None:
|
if interface is None:
|
||||||
interface = cri.Interface()
|
interface = cri.Interface()
|
||||||
self.cri = interface
|
self.cri = interface
|
||||||
|
@ -81,8 +78,6 @@ class RTPacketSatellite(Module):
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
self.sync += If(self.async_msg_ack, self.async_msg_stb.eq(0))
|
|
||||||
|
|
||||||
# RX FSM
|
# RX FSM
|
||||||
cri_read = Signal()
|
cri_read = Signal()
|
||||||
cri_buffer_space = Signal()
|
cri_buffer_space = Signal()
|
||||||
|
@ -202,7 +197,6 @@ class RTPacketSatellite(Module):
|
||||||
|
|
||||||
tx_fsm.act("IDLE",
|
tx_fsm.act("IDLE",
|
||||||
If(echo_req, NextState("ECHO")),
|
If(echo_req, NextState("ECHO")),
|
||||||
If(self.async_msg_stb, NextState("ASYNC_MESSAGES_READY")),
|
|
||||||
If(buffer_space_req, NextState("BUFFER_SPACE")),
|
If(buffer_space_req, NextState("BUFFER_SPACE")),
|
||||||
If(read_request_pending & ~self.cri.i_status[2],
|
If(read_request_pending & ~self.cri.i_status[2],
|
||||||
NextState("READ"),
|
NextState("READ"),
|
||||||
|
@ -216,12 +210,6 @@ class RTPacketSatellite(Module):
|
||||||
If(tx_dp.packet_last, NextState("IDLE"))
|
If(tx_dp.packet_last, NextState("IDLE"))
|
||||||
)
|
)
|
||||||
|
|
||||||
tx_fsm.act("ASYNC_MESSAGES_READY",
|
|
||||||
self.async_msg_ack.eq(1),
|
|
||||||
tx_dp.send("async_messages_ready"),
|
|
||||||
If(tx_dp.packet_last, NextState("IDLE"))
|
|
||||||
)
|
|
||||||
|
|
||||||
tx_fsm.act("BUFFER_SPACE",
|
tx_fsm.act("BUFFER_SPACE",
|
||||||
buffer_space_ack.eq(1),
|
buffer_space_ack.eq(1),
|
||||||
tx_dp.send("buffer_space_reply", space=buffer_space),
|
tx_dp.send("buffer_space_reply", space=buffer_space),
|
||||||
|
|
|
@ -69,7 +69,6 @@ def get_s2m_layouts(alignment):
|
||||||
|
|
||||||
plm.add_type("read_reply", ("timestamp", 64), ("data", 32))
|
plm.add_type("read_reply", ("timestamp", 64), ("data", 32))
|
||||||
plm.add_type("read_reply_noevent", ("overflow", 1)) # overflow=0→timeout
|
plm.add_type("read_reply_noevent", ("overflow", 1)) # overflow=0→timeout
|
||||||
plm.add_type("async_messages_ready")
|
|
||||||
|
|
||||||
return plm
|
return plm
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@ import os
|
||||||
import subprocess
|
import subprocess
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from itertools import count
|
from itertools import count
|
||||||
from types import SimpleNamespace
|
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
|
|
||||||
|
@ -15,16 +14,43 @@ from sipyco.pipe_ipc import AsyncioParentComm
|
||||||
from sipyco.logging_tools import LogParser
|
from sipyco.logging_tools import LogParser
|
||||||
from sipyco import pyon
|
from sipyco import pyon
|
||||||
|
|
||||||
from artiq.gui.entries import procdesc_to_entry, EntryTreeWidget
|
from artiq.gui.entries import procdesc_to_entry
|
||||||
from artiq.gui.tools import QDockWidgetCloseDetect, LayoutWidget
|
from artiq.gui.tools import (QDockWidgetCloseDetect, LayoutWidget,
|
||||||
|
WheelFilter)
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class EntryArea(QtWidgets.QTreeWidget):
|
||||||
class EntryArea(EntryTreeWidget):
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
EntryTreeWidget.__init__(self)
|
QtWidgets.QTreeWidget.__init__(self)
|
||||||
|
self.setColumnCount(3)
|
||||||
|
self.header().setStretchLastSection(False)
|
||||||
|
if hasattr(self.header(), "setSectionResizeMode"):
|
||||||
|
set_resize_mode = self.header().setSectionResizeMode
|
||||||
|
else:
|
||||||
|
set_resize_mode = self.header().setResizeMode
|
||||||
|
set_resize_mode(0, QtWidgets.QHeaderView.ResizeToContents)
|
||||||
|
set_resize_mode(1, QtWidgets.QHeaderView.Stretch)
|
||||||
|
self.header().setVisible(False)
|
||||||
|
self.setSelectionMode(self.NoSelection)
|
||||||
|
self.setHorizontalScrollMode(self.ScrollPerPixel)
|
||||||
|
self.setVerticalScrollMode(self.ScrollPerPixel)
|
||||||
|
|
||||||
|
self.setStyleSheet("QTreeWidget {background: " +
|
||||||
|
self.palette().midlight().color().name() + " ;}")
|
||||||
|
|
||||||
|
self.viewport().installEventFilter(WheelFilter(self.viewport(), True))
|
||||||
|
|
||||||
|
self._groups = dict()
|
||||||
|
self._arg_to_widgets = dict()
|
||||||
|
self._arguments = dict()
|
||||||
|
|
||||||
|
self.gradient = QtGui.QLinearGradient(
|
||||||
|
0, 0, 0, QtGui.QFontMetrics(self.font()).lineSpacing()*2.5)
|
||||||
|
self.gradient.setColorAt(0, self.palette().base().color())
|
||||||
|
self.gradient.setColorAt(1, self.palette().midlight().color())
|
||||||
|
|
||||||
reset_all_button = QtWidgets.QPushButton("Restore defaults")
|
reset_all_button = QtWidgets.QPushButton("Restore defaults")
|
||||||
reset_all_button.setToolTip("Reset all to default values")
|
reset_all_button.setToolTip("Reset all to default values")
|
||||||
reset_all_button.setIcon(
|
reset_all_button.setIcon(
|
||||||
|
@ -36,57 +62,125 @@ class EntryArea(EntryTreeWidget):
|
||||||
buttons.layout.setColumnStretch(1, 0)
|
buttons.layout.setColumnStretch(1, 0)
|
||||||
buttons.layout.setColumnStretch(2, 1)
|
buttons.layout.setColumnStretch(2, 1)
|
||||||
buttons.addWidget(reset_all_button, 0, 1)
|
buttons.addWidget(reset_all_button, 0, 1)
|
||||||
|
self.bottom_item = QtWidgets.QTreeWidgetItem()
|
||||||
|
self.addTopLevelItem(self.bottom_item)
|
||||||
self.setItemWidget(self.bottom_item, 1, buttons)
|
self.setItemWidget(self.bottom_item, 1, buttons)
|
||||||
self._processors = dict()
|
self.bottom_item.setHidden(True)
|
||||||
|
|
||||||
def setattr_argument(self, key, processor, group=None, tooltip=None):
|
def setattr_argument(self, name, proc, group=None, tooltip=None):
|
||||||
argument = dict()
|
argument = dict()
|
||||||
desc = processor.describe()
|
desc = proc.describe()
|
||||||
self._processors[key] = processor
|
|
||||||
argument["desc"] = desc
|
argument["desc"] = desc
|
||||||
argument["group"] = group
|
argument["group"] = group
|
||||||
argument["tooltip"] = tooltip
|
argument["tooltip"] = tooltip
|
||||||
self.set_argument(key, argument)
|
self._arguments[name] = argument
|
||||||
|
widgets = dict()
|
||||||
|
self._arg_to_widgets[name] = widgets
|
||||||
|
entry_class = procdesc_to_entry(argument["desc"])
|
||||||
|
argument["state"] = entry_class.default_state(argument["desc"])
|
||||||
|
entry = entry_class(argument)
|
||||||
|
widget_item = QtWidgets.QTreeWidgetItem([name])
|
||||||
|
if argument["tooltip"]:
|
||||||
|
widget_item.setToolTip(0, argument["tooltip"])
|
||||||
|
widgets["entry"] = entry
|
||||||
|
widgets["widget_item"] = widget_item
|
||||||
|
|
||||||
def __getattr__(self, key):
|
if len(self._arguments) > 1:
|
||||||
return self.get_value(key)
|
self.bottom_item.setHidden(False)
|
||||||
|
|
||||||
def get_value(self, key):
|
for col in range(3):
|
||||||
entry = self._arg_to_widgets[key]["entry"]
|
widget_item.setBackground(col, self.gradient)
|
||||||
argument = self._arguments[key]
|
font = widget_item.font(0)
|
||||||
processor = self._processors[key]
|
font.setBold(True)
|
||||||
return processor.process(entry.state_to_value(argument["state"]))
|
widget_item.setFont(0, font)
|
||||||
|
|
||||||
def set_value(self, key, value):
|
if argument["group"] is None:
|
||||||
ty = self._arguments[key]["desc"]["ty"]
|
self.insertTopLevelItem(self.indexFromItem(self.bottom_item).row(), widget_item)
|
||||||
|
else:
|
||||||
|
self._get_group(argument["group"]).addChild(widget_item)
|
||||||
|
self.bottom_item.setHidden(False)
|
||||||
|
fix_layout = LayoutWidget()
|
||||||
|
widgets["fix_layout"] = fix_layout
|
||||||
|
fix_layout.addWidget(entry)
|
||||||
|
self.setItemWidget(widget_item, 1, fix_layout)
|
||||||
|
|
||||||
|
reset_value = QtWidgets.QToolButton()
|
||||||
|
reset_value.setToolTip("Reset to default value")
|
||||||
|
reset_value.setIcon(
|
||||||
|
QtWidgets.QApplication.style().standardIcon(
|
||||||
|
QtWidgets.QStyle.SP_BrowserReload))
|
||||||
|
reset_value.clicked.connect(partial(self.reset_value, name))
|
||||||
|
|
||||||
|
tool_buttons = LayoutWidget()
|
||||||
|
tool_buttons.addWidget(reset_value, 0)
|
||||||
|
self.setItemWidget(widget_item, 2, tool_buttons)
|
||||||
|
|
||||||
|
def _get_group(self, name):
|
||||||
|
if name in self._groups:
|
||||||
|
return self._groups[name]
|
||||||
|
group = QtWidgets.QTreeWidgetItem([name])
|
||||||
|
for col in range(3):
|
||||||
|
group.setBackground(col, self.palette().mid())
|
||||||
|
group.setForeground(col, self.palette().brightText())
|
||||||
|
font = group.font(col)
|
||||||
|
font.setBold(True)
|
||||||
|
group.setFont(col, font)
|
||||||
|
self.insertTopLevelItem(self.indexFromItem(self.bottom_item).row(), group)
|
||||||
|
self._groups[name] = group
|
||||||
|
return group
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return self.get_value(name)
|
||||||
|
|
||||||
|
def get_value(self, name):
|
||||||
|
entry = self._arg_to_widgets[name]["entry"]
|
||||||
|
argument = self._arguments[name]
|
||||||
|
return entry.state_to_value(argument["state"])
|
||||||
|
|
||||||
|
def set_value(self, name, value):
|
||||||
|
ty = self._arguments[name]["desc"]["ty"]
|
||||||
if ty == "Scannable":
|
if ty == "Scannable":
|
||||||
desc = value.describe()
|
desc = value.describe()
|
||||||
self._arguments[key]["state"][desc["ty"]] = desc
|
self._arguments[name]["state"][desc["ty"]] = desc
|
||||||
self._arguments[key]["state"]["selected"] = desc["ty"]
|
self._arguments[name]["state"]["selected"] = desc["ty"]
|
||||||
else:
|
else:
|
||||||
self._arguments[key]["state"] = value
|
self._arguments[name]["state"] = value
|
||||||
self.update_value(key)
|
self.update_value(name)
|
||||||
|
|
||||||
def get_values(self):
|
def get_values(self):
|
||||||
d = SimpleNamespace()
|
d = dict()
|
||||||
for key in self._arguments.keys():
|
for name in self._arguments.keys():
|
||||||
setattr(d, key, self.get_value(key))
|
d[name] = self.get_value(name)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def set_values(self, values):
|
def set_values(self, values):
|
||||||
for key, value in values.items():
|
for name, value in values.items():
|
||||||
self.set_value(key, value)
|
self.set_value(name, value)
|
||||||
|
|
||||||
def update_value(self, key):
|
def update_value(self, name):
|
||||||
argument = self._arguments[key]
|
widgets = self._arg_to_widgets[name]
|
||||||
self.update_argument(key, argument)
|
argument = self._arguments[name]
|
||||||
|
|
||||||
def reset_value(self, key):
|
# Qt needs a setItemWidget() to handle layout correctly,
|
||||||
self.reset_entry(key)
|
# simply replacing the entry inside the LayoutWidget
|
||||||
|
# results in a bug.
|
||||||
|
|
||||||
|
widgets["entry"].deleteLater()
|
||||||
|
widgets["entry"] = procdesc_to_entry(argument["desc"])(argument)
|
||||||
|
widgets["fix_layout"].deleteLater()
|
||||||
|
widgets["fix_layout"] = LayoutWidget()
|
||||||
|
widgets["fix_layout"].addWidget(widgets["entry"])
|
||||||
|
self.setItemWidget(widgets["widget_item"], 1, widgets["fix_layout"])
|
||||||
|
self.updateGeometries()
|
||||||
|
|
||||||
|
def reset_value(self, name):
|
||||||
|
procdesc = self._arguments[name]["desc"]
|
||||||
|
self._arguments[name]["state"] = procdesc_to_entry(procdesc).default_state(procdesc)
|
||||||
|
self.update_value(name)
|
||||||
|
|
||||||
def reset_all(self):
|
def reset_all(self):
|
||||||
for key in self._arguments.keys():
|
for name in self._arguments.keys():
|
||||||
self.reset_entry(key)
|
self.reset_value(name)
|
||||||
|
|
||||||
|
|
||||||
class AppletIPCServer(AsyncioParentComm):
|
class AppletIPCServer(AsyncioParentComm):
|
||||||
|
@ -161,7 +255,7 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
elif action == "update_dataset":
|
elif action == "update_dataset":
|
||||||
await self.dataset_ctl.update(obj["mod"])
|
await self.dataset_ctl.update(obj["mod"])
|
||||||
elif action == "set_argument_value":
|
elif action == "set_argument_value":
|
||||||
self.expmgr.set_argument_value(obj["expurl"], obj["key"], obj["value"])
|
self.expmgr.set_argument_value(obj["expurl"], obj["name"], obj["value"])
|
||||||
else:
|
else:
|
||||||
raise ValueError("unknown action in applet message")
|
raise ValueError("unknown action in applet message")
|
||||||
except:
|
except:
|
||||||
|
|
|
@ -1,100 +0,0 @@
|
||||||
from PyQt5 import QtCore, QtWidgets
|
|
||||||
|
|
||||||
|
|
||||||
class VDragDropSplitter(QtWidgets.QSplitter):
|
|
||||||
dropped = QtCore.pyqtSignal(int, int)
|
|
||||||
|
|
||||||
def __init__(self, parent):
|
|
||||||
QtWidgets.QSplitter.__init__(self, parent=parent)
|
|
||||||
self.setAcceptDrops(True)
|
|
||||||
self.setContentsMargins(0, 0, 0, 0)
|
|
||||||
self.setOrientation(QtCore.Qt.Vertical)
|
|
||||||
self.setChildrenCollapsible(False)
|
|
||||||
|
|
||||||
def resetSizes(self):
|
|
||||||
self.setSizes(self.count() * [1])
|
|
||||||
|
|
||||||
def dragEnterEvent(self, e):
|
|
||||||
e.accept()
|
|
||||||
|
|
||||||
def dragLeaveEvent(self, e):
|
|
||||||
self.setRubberBand(-1)
|
|
||||||
e.accept()
|
|
||||||
|
|
||||||
def dragMoveEvent(self, e):
|
|
||||||
pos = e.pos()
|
|
||||||
src = e.source()
|
|
||||||
src_i = self.indexOf(src)
|
|
||||||
self.setRubberBand(self.height())
|
|
||||||
# case 0: smaller than source widget
|
|
||||||
if pos.y() < src.y():
|
|
||||||
for n in range(src_i):
|
|
||||||
w = self.widget(n)
|
|
||||||
if pos.y() < w.y() + w.size().height():
|
|
||||||
self.setRubberBand(w.y())
|
|
||||||
break
|
|
||||||
# case 2: greater than source widget
|
|
||||||
elif pos.y() > src.y() + src.size().height():
|
|
||||||
for n in range(src_i + 1, self.count()):
|
|
||||||
w = self.widget(n)
|
|
||||||
if pos.y() < w.y():
|
|
||||||
self.setRubberBand(w.y())
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
self.setRubberBand(-1)
|
|
||||||
e.accept()
|
|
||||||
|
|
||||||
def dropEvent(self, e):
|
|
||||||
self.setRubberBand(-1)
|
|
||||||
pos = e.pos()
|
|
||||||
src = e.source()
|
|
||||||
src_i = self.indexOf(src)
|
|
||||||
for n in range(self.count()):
|
|
||||||
w = self.widget(n)
|
|
||||||
if pos.y() < w.y() + w.size().height():
|
|
||||||
self.dropped.emit(src_i, n)
|
|
||||||
break
|
|
||||||
e.accept()
|
|
||||||
|
|
||||||
|
|
||||||
# Scroll area with auto-scroll on vertical drag
|
|
||||||
class VDragScrollArea(QtWidgets.QScrollArea):
|
|
||||||
def __init__(self, parent):
|
|
||||||
QtWidgets.QScrollArea.__init__(self, parent)
|
|
||||||
self.installEventFilter(self)
|
|
||||||
self._margin = 40
|
|
||||||
self._timer = QtCore.QTimer(self)
|
|
||||||
self._timer.setInterval(20)
|
|
||||||
self._timer.timeout.connect(self._on_auto_scroll)
|
|
||||||
self._direction = 0
|
|
||||||
self._speed = 10
|
|
||||||
|
|
||||||
def setAutoScrollMargin(self, margin):
|
|
||||||
self._margin = margin
|
|
||||||
|
|
||||||
def setAutoScrollSpeed(self, speed):
|
|
||||||
self._speed = speed
|
|
||||||
|
|
||||||
def eventFilter(self, obj, e):
|
|
||||||
if e.type() == QtCore.QEvent.DragMove:
|
|
||||||
val = self.verticalScrollBar().value()
|
|
||||||
height = self.viewport().height()
|
|
||||||
y = e.pos().y()
|
|
||||||
self._direction = 0
|
|
||||||
if y < val + self._margin:
|
|
||||||
self._direction = -1
|
|
||||||
elif y > height + val - self._margin:
|
|
||||||
self._direction = 1
|
|
||||||
if not self._timer.isActive():
|
|
||||||
self._timer.start()
|
|
||||||
elif e.type() in (QtCore.QEvent.Drop, QtCore.QEvent.DragLeave):
|
|
||||||
self._timer.stop()
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _on_auto_scroll(self):
|
|
||||||
val = self.verticalScrollBar().value()
|
|
||||||
min_ = self.verticalScrollBar().minimum()
|
|
||||||
max_ = self.verticalScrollBar().maximum()
|
|
||||||
dy = self._direction * self._speed
|
|
||||||
new_val = min(max_, max(min_, val + dy))
|
|
||||||
self.verticalScrollBar().setValue(new_val)
|
|
|
@ -1,10 +1,9 @@
|
||||||
import logging
|
import logging
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
|
|
||||||
from artiq.gui.tools import LayoutWidget, disable_scroll_wheel, WheelFilter
|
from artiq.gui.tools import LayoutWidget, disable_scroll_wheel
|
||||||
from artiq.gui.scanwidget import ScanWidget
|
from artiq.gui.scanwidget import ScanWidget
|
||||||
from artiq.gui.scientific_spinbox import ScientificSpinBox
|
from artiq.gui.scientific_spinbox import ScientificSpinBox
|
||||||
|
|
||||||
|
@ -12,157 +11,6 @@ from artiq.gui.scientific_spinbox import ScientificSpinBox
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class EntryTreeWidget(QtWidgets.QTreeWidget):
|
|
||||||
quickStyleClicked = QtCore.pyqtSignal()
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
QtWidgets.QTreeWidget.__init__(self)
|
|
||||||
self.setColumnCount(3)
|
|
||||||
self.header().setStretchLastSection(False)
|
|
||||||
if hasattr(self.header(), "setSectionResizeMode"):
|
|
||||||
set_resize_mode = self.header().setSectionResizeMode
|
|
||||||
else:
|
|
||||||
set_resize_mode = self.header().setResizeMode
|
|
||||||
set_resize_mode(0, QtWidgets.QHeaderView.ResizeToContents)
|
|
||||||
set_resize_mode(1, QtWidgets.QHeaderView.Stretch)
|
|
||||||
set_resize_mode(2, QtWidgets.QHeaderView.ResizeToContents)
|
|
||||||
self.header().setVisible(False)
|
|
||||||
self.setSelectionMode(self.NoSelection)
|
|
||||||
self.setHorizontalScrollMode(self.ScrollPerPixel)
|
|
||||||
self.setVerticalScrollMode(self.ScrollPerPixel)
|
|
||||||
|
|
||||||
self.setStyleSheet("QTreeWidget {background: " +
|
|
||||||
self.palette().midlight().color().name() + " ;}")
|
|
||||||
|
|
||||||
self.viewport().installEventFilter(WheelFilter(self.viewport(), True))
|
|
||||||
|
|
||||||
self._groups = dict()
|
|
||||||
self._arg_to_widgets = dict()
|
|
||||||
self._arguments = dict()
|
|
||||||
|
|
||||||
self.gradient = QtGui.QLinearGradient(
|
|
||||||
0, 0, 0, QtGui.QFontMetrics(self.font()).lineSpacing() * 2.5)
|
|
||||||
self.gradient.setColorAt(0, self.palette().base().color())
|
|
||||||
self.gradient.setColorAt(1, self.palette().midlight().color())
|
|
||||||
|
|
||||||
self.bottom_item = QtWidgets.QTreeWidgetItem()
|
|
||||||
self.addTopLevelItem(self.bottom_item)
|
|
||||||
|
|
||||||
def set_argument(self, key, argument):
|
|
||||||
self._arguments[key] = argument
|
|
||||||
widgets = dict()
|
|
||||||
self._arg_to_widgets[key] = widgets
|
|
||||||
entry_class = procdesc_to_entry(argument["desc"])
|
|
||||||
argument["state"] = entry_class.default_state(argument["desc"])
|
|
||||||
entry = entry_class(argument)
|
|
||||||
if argument["desc"].get("quickstyle"):
|
|
||||||
entry.quickStyleClicked.connect(self.quickStyleClicked)
|
|
||||||
widget_item = QtWidgets.QTreeWidgetItem([key])
|
|
||||||
if argument["tooltip"]:
|
|
||||||
widget_item.setToolTip(0, argument["tooltip"])
|
|
||||||
widgets["entry"] = entry
|
|
||||||
widgets["widget_item"] = widget_item
|
|
||||||
|
|
||||||
for col in range(3):
|
|
||||||
widget_item.setBackground(col, self.gradient)
|
|
||||||
font = widget_item.font(0)
|
|
||||||
font.setBold(True)
|
|
||||||
widget_item.setFont(0, font)
|
|
||||||
|
|
||||||
if argument["group"] is None:
|
|
||||||
self.insertTopLevelItem(self.indexFromItem(self.bottom_item).row(), widget_item)
|
|
||||||
else:
|
|
||||||
self._get_group(argument["group"]).addChild(widget_item)
|
|
||||||
fix_layout = LayoutWidget()
|
|
||||||
widgets["fix_layout"] = fix_layout
|
|
||||||
fix_layout.addWidget(entry)
|
|
||||||
self.setItemWidget(widget_item, 1, fix_layout)
|
|
||||||
|
|
||||||
reset_entry = QtWidgets.QToolButton()
|
|
||||||
reset_entry.setToolTip("Reset to default value")
|
|
||||||
reset_entry.setIcon(
|
|
||||||
QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_BrowserReload))
|
|
||||||
reset_entry.clicked.connect(partial(self.reset_entry, key))
|
|
||||||
|
|
||||||
disable_other_scans = QtWidgets.QToolButton()
|
|
||||||
widgets["disable_other_scans"] = disable_other_scans
|
|
||||||
disable_other_scans.setIcon(
|
|
||||||
QtWidgets.QApplication.style().standardIcon(
|
|
||||||
QtWidgets.QStyle.SP_DialogResetButton))
|
|
||||||
disable_other_scans.setToolTip("Disable other scans")
|
|
||||||
disable_other_scans.clicked.connect(
|
|
||||||
partial(self._disable_other_scans, key))
|
|
||||||
if not isinstance(entry, ScanEntry):
|
|
||||||
disable_other_scans.setVisible(False)
|
|
||||||
|
|
||||||
tool_buttons = LayoutWidget()
|
|
||||||
tool_buttons.layout.setRowStretch(0, 1)
|
|
||||||
tool_buttons.layout.setRowStretch(3, 1)
|
|
||||||
tool_buttons.addWidget(reset_entry, 1)
|
|
||||||
tool_buttons.addWidget(disable_other_scans, 2)
|
|
||||||
self.setItemWidget(widget_item, 2, tool_buttons)
|
|
||||||
|
|
||||||
def _get_group(self, key):
|
|
||||||
if key in self._groups:
|
|
||||||
return self._groups[key]
|
|
||||||
group = QtWidgets.QTreeWidgetItem([key])
|
|
||||||
for col in range(3):
|
|
||||||
group.setBackground(col, self.palette().mid())
|
|
||||||
group.setForeground(col, self.palette().brightText())
|
|
||||||
font = group.font(col)
|
|
||||||
font.setBold(True)
|
|
||||||
group.setFont(col, font)
|
|
||||||
self.insertTopLevelItem(self.indexFromItem(self.bottom_item).row(), group)
|
|
||||||
self._groups[key] = group
|
|
||||||
return group
|
|
||||||
|
|
||||||
def _disable_other_scans(self, current_key):
|
|
||||||
for key, widgets in self._arg_to_widgets.items():
|
|
||||||
if (key != current_key and isinstance(widgets["entry"], ScanEntry)):
|
|
||||||
widgets["entry"].disable()
|
|
||||||
|
|
||||||
def update_argument(self, key, argument):
|
|
||||||
widgets = self._arg_to_widgets[key]
|
|
||||||
|
|
||||||
# Qt needs a setItemWidget() to handle layout correctly,
|
|
||||||
# simply replacing the entry inside the LayoutWidget
|
|
||||||
# results in a bug.
|
|
||||||
|
|
||||||
widgets["entry"].deleteLater()
|
|
||||||
widgets["entry"] = procdesc_to_entry(argument["desc"])(argument)
|
|
||||||
widgets["disable_other_scans"].setVisible(
|
|
||||||
isinstance(widgets["entry"], ScanEntry))
|
|
||||||
widgets["fix_layout"].deleteLater()
|
|
||||||
widgets["fix_layout"] = LayoutWidget()
|
|
||||||
widgets["fix_layout"].addWidget(widgets["entry"])
|
|
||||||
self.setItemWidget(widgets["widget_item"], 1, widgets["fix_layout"])
|
|
||||||
self.updateGeometries()
|
|
||||||
|
|
||||||
def reset_entry(self, key):
|
|
||||||
procdesc = self._arguments[key]["desc"]
|
|
||||||
self._arguments[key]["state"] = procdesc_to_entry(procdesc).default_state(procdesc)
|
|
||||||
self.update_argument(key, self._arguments[key])
|
|
||||||
|
|
||||||
def save_state(self):
|
|
||||||
expanded = []
|
|
||||||
for k, v in self._groups.items():
|
|
||||||
if v.isExpanded():
|
|
||||||
expanded.append(k)
|
|
||||||
return {
|
|
||||||
"expanded": expanded,
|
|
||||||
"scroll": self.verticalScrollBar().value()
|
|
||||||
}
|
|
||||||
|
|
||||||
def restore_state(self, state):
|
|
||||||
for e in state["expanded"]:
|
|
||||||
try:
|
|
||||||
self._groups[e].setExpanded(True)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
self.verticalScrollBar().setValue(state["scroll"])
|
|
||||||
|
|
||||||
|
|
||||||
class StringEntry(QtWidgets.QLineEdit):
|
class StringEntry(QtWidgets.QLineEdit):
|
||||||
def __init__(self, argument):
|
def __init__(self, argument):
|
||||||
QtWidgets.QLineEdit.__init__(self)
|
QtWidgets.QLineEdit.__init__(self)
|
||||||
|
@ -197,38 +45,17 @@ class BooleanEntry(QtWidgets.QCheckBox):
|
||||||
return procdesc.get("default", False)
|
return procdesc.get("default", False)
|
||||||
|
|
||||||
|
|
||||||
class EnumerationEntry(QtWidgets.QWidget):
|
class EnumerationEntry(QtWidgets.QComboBox):
|
||||||
quickStyleClicked = QtCore.pyqtSignal()
|
|
||||||
|
|
||||||
def __init__(self, argument):
|
def __init__(self, argument):
|
||||||
QtWidgets.QWidget.__init__(self)
|
QtWidgets.QComboBox.__init__(self)
|
||||||
layout = QtWidgets.QHBoxLayout()
|
disable_scroll_wheel(self)
|
||||||
self.setLayout(layout)
|
choices = argument["desc"]["choices"]
|
||||||
procdesc = argument["desc"]
|
self.addItems(choices)
|
||||||
choices = procdesc["choices"]
|
idx = choices.index(argument["state"])
|
||||||
if procdesc["quickstyle"]:
|
self.setCurrentIndex(idx)
|
||||||
self.btn_group = QtWidgets.QButtonGroup()
|
def update(index):
|
||||||
for i, choice in enumerate(choices):
|
argument["state"] = choices[index]
|
||||||
button = QtWidgets.QPushButton(choice)
|
self.currentIndexChanged.connect(update)
|
||||||
self.btn_group.addButton(button)
|
|
||||||
self.btn_group.setId(button, i)
|
|
||||||
layout.addWidget(button)
|
|
||||||
|
|
||||||
def submit(index):
|
|
||||||
argument["state"] = choices[index]
|
|
||||||
self.quickStyleClicked.emit()
|
|
||||||
self.btn_group.idClicked.connect(submit)
|
|
||||||
else:
|
|
||||||
self.combo_box = QtWidgets.QComboBox()
|
|
||||||
disable_scroll_wheel(self.combo_box)
|
|
||||||
self.combo_box.addItems(choices)
|
|
||||||
idx = choices.index(argument["state"])
|
|
||||||
self.combo_box.setCurrentIndex(idx)
|
|
||||||
layout.addWidget(self.combo_box)
|
|
||||||
|
|
||||||
def update(index):
|
|
||||||
argument["state"] = choices[index]
|
|
||||||
self.combo_box.currentIndexChanged.connect(update)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def state_to_value(state):
|
def state_to_value(state):
|
||||||
|
|
|
@ -69,23 +69,6 @@ async def get_open_file_name(parent, caption, dir, filter):
|
||||||
return await fut
|
return await fut
|
||||||
|
|
||||||
|
|
||||||
async def get_save_file_name(parent, caption, dir, filter, suffix=None):
|
|
||||||
"""like QtWidgets.QFileDialog.getSaveFileName(), but a coroutine"""
|
|
||||||
dialog = QtWidgets.QFileDialog(parent, caption, dir, filter)
|
|
||||||
dialog.setFileMode(dialog.AnyFile)
|
|
||||||
dialog.setAcceptMode(dialog.AcceptSave)
|
|
||||||
if suffix is not None:
|
|
||||||
dialog.setDefaultSuffix(suffix)
|
|
||||||
fut = asyncio.Future()
|
|
||||||
|
|
||||||
def on_accept():
|
|
||||||
fut.set_result(dialog.selectedFiles()[0])
|
|
||||||
dialog.accepted.connect(on_accept)
|
|
||||||
dialog.rejected.connect(fut.cancel)
|
|
||||||
dialog.open()
|
|
||||||
return await fut
|
|
||||||
|
|
||||||
|
|
||||||
# Based on:
|
# Based on:
|
||||||
# http://stackoverflow.com/questions/250890/using-qsortfilterproxymodel-with-a-tree-model
|
# http://stackoverflow.com/questions/250890/using-qsortfilterproxymodel-with-a-tree-model
|
||||||
class QRecursiveFilterProxyModel(QtCore.QSortFilterProxyModel):
|
class QRecursiveFilterProxyModel(QtCore.QSortFilterProxyModel):
|
||||||
|
|
|
@ -72,8 +72,8 @@ def subkernel(arg=None, destination=0, flags={}):
|
||||||
Subkernels behave similarly to kernels, with few key differences:
|
Subkernels behave similarly to kernels, with few key differences:
|
||||||
|
|
||||||
- they are started from main kernels,
|
- they are started from main kernels,
|
||||||
- they do not support RPCs,
|
- they do not support RPCs, or running subsequent subkernels on other devices,
|
||||||
- but they can call other kernels or subkernels.
|
- but they can call other kernels or subkernels with the same destination.
|
||||||
|
|
||||||
Subkernels can accept arguments and return values. However, they must be fully
|
Subkernels can accept arguments and return values. However, they must be fully
|
||||||
annotated with ARTIQ types.
|
annotated with ARTIQ types.
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from inspect import isclass
|
from inspect import isclass
|
||||||
from contextlib import contextmanager
|
|
||||||
from types import SimpleNamespace
|
|
||||||
|
|
||||||
from sipyco import pyon
|
from sipyco import pyon
|
||||||
|
|
||||||
|
@ -12,8 +10,7 @@ from artiq.language.core import rpc
|
||||||
__all__ = ["NoDefault", "DefaultMissing",
|
__all__ = ["NoDefault", "DefaultMissing",
|
||||||
"PYONValue", "BooleanValue", "EnumerationValue",
|
"PYONValue", "BooleanValue", "EnumerationValue",
|
||||||
"NumberValue", "StringValue",
|
"NumberValue", "StringValue",
|
||||||
"HasEnvironment", "Experiment", "EnvExperiment",
|
"HasEnvironment", "Experiment", "EnvExperiment"]
|
||||||
"CancelledArgsError"]
|
|
||||||
|
|
||||||
|
|
||||||
class NoDefault:
|
class NoDefault:
|
||||||
|
@ -27,12 +24,6 @@ class DefaultMissing(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CancelledArgsError(Exception):
|
|
||||||
"""Raised by the ``interactive`` context manager when an interactive
|
|
||||||
arguments request is cancelled."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class _SimpleArgProcessor:
|
class _SimpleArgProcessor:
|
||||||
def __init__(self, default=NoDefault):
|
def __init__(self, default=NoDefault):
|
||||||
# If default is a list, it means multiple defaults are specified, with
|
# If default is a list, it means multiple defaults are specified, with
|
||||||
|
@ -89,12 +80,9 @@ class EnumerationValue(_SimpleArgProcessor):
|
||||||
|
|
||||||
:param choices: A list of string representing the possible values of the
|
:param choices: A list of string representing the possible values of the
|
||||||
argument.
|
argument.
|
||||||
:param quickstyle: Enables the choices to be displayed in the GUI as a
|
|
||||||
list of buttons that submit the experiment when clicked.
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, choices, default=NoDefault, quickstyle=False):
|
def __init__(self, choices, default=NoDefault):
|
||||||
self.choices = choices
|
self.choices = choices
|
||||||
self.quickstyle = quickstyle
|
|
||||||
super().__init__(default)
|
super().__init__(default)
|
||||||
|
|
||||||
def process(self, x):
|
def process(self, x):
|
||||||
|
@ -105,7 +93,6 @@ class EnumerationValue(_SimpleArgProcessor):
|
||||||
def describe(self):
|
def describe(self):
|
||||||
d = _SimpleArgProcessor.describe(self)
|
d = _SimpleArgProcessor.describe(self)
|
||||||
d["choices"] = self.choices
|
d["choices"] = self.choices
|
||||||
d["quickstyle"] = self.quickstyle
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
@ -225,9 +212,6 @@ class TraceArgumentManager:
|
||||||
self.requested_args[key] = processor, group, tooltip
|
self.requested_args[key] = processor, group, tooltip
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_interactive(self, interactive_arglist, title):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class ProcessArgumentManager:
|
class ProcessArgumentManager:
|
||||||
def __init__(self, unprocessed_arguments):
|
def __init__(self, unprocessed_arguments):
|
||||||
|
@ -249,10 +233,6 @@ class ProcessArgumentManager:
|
||||||
raise AttributeError("Supplied argument(s) not queried in experiment: " +
|
raise AttributeError("Supplied argument(s) not queried in experiment: " +
|
||||||
", ".join(unprocessed))
|
", ".join(unprocessed))
|
||||||
|
|
||||||
def get_interactive(self, interactive_arglist, title):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class HasEnvironment:
|
class HasEnvironment:
|
||||||
"""Provides methods to manage the environment of an experiment (arguments,
|
"""Provides methods to manage the environment of an experiment (arguments,
|
||||||
devices, datasets)."""
|
devices, datasets)."""
|
||||||
|
@ -342,33 +322,6 @@ class HasEnvironment:
|
||||||
kernel_invariants = getattr(self, "kernel_invariants", set())
|
kernel_invariants = getattr(self, "kernel_invariants", set())
|
||||||
self.kernel_invariants = kernel_invariants | {key}
|
self.kernel_invariants = kernel_invariants | {key}
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def interactive(self, title=""):
|
|
||||||
"""Request arguments from the user interactively.
|
|
||||||
|
|
||||||
This context manager returns a namespace object on which the method
|
|
||||||
`setattr_argument` should be called, with the usual semantics.
|
|
||||||
|
|
||||||
When the context manager terminates, the experiment is blocked
|
|
||||||
and the user is presented with the requested argument widgets.
|
|
||||||
After the user enters values, the experiment is resumed and
|
|
||||||
the namespace contains the values of the arguments.
|
|
||||||
|
|
||||||
If the interactive arguments request is cancelled, raises
|
|
||||||
``CancelledArgsError``."""
|
|
||||||
interactive_arglist = []
|
|
||||||
namespace = SimpleNamespace()
|
|
||||||
def setattr_argument(key, processor=None, group=None, tooltip=None):
|
|
||||||
interactive_arglist.append((key, processor, group, tooltip))
|
|
||||||
namespace.setattr_argument = setattr_argument
|
|
||||||
yield namespace
|
|
||||||
del namespace.setattr_argument
|
|
||||||
argdict = self.__argument_mgr.get_interactive(interactive_arglist, title)
|
|
||||||
if argdict is None:
|
|
||||||
raise CancelledArgsError
|
|
||||||
for key, value in argdict.items():
|
|
||||||
setattr(namespace, key, value)
|
|
||||||
|
|
||||||
def get_device_db(self):
|
def get_device_db(self):
|
||||||
"""Returns the full contents of the device database."""
|
"""Returns the full contents of the device database."""
|
||||||
return self.__device_mgr.get_device_db()
|
return self.__device_mgr.get_device_db()
|
||||||
|
|
|
@ -2,8 +2,7 @@ import asyncio
|
||||||
|
|
||||||
import lmdb
|
import lmdb
|
||||||
|
|
||||||
from sipyco.sync_struct import (Notifier, process_mod, ModAction,
|
from sipyco.sync_struct import Notifier, process_mod, ModAction, update_from_dict
|
||||||
update_from_dict)
|
|
||||||
from sipyco import pyon
|
from sipyco import pyon
|
||||||
from sipyco.asyncio_tools import TaskObject
|
from sipyco.asyncio_tools import TaskObject
|
||||||
|
|
||||||
|
@ -61,13 +60,12 @@ class DatasetDB(TaskObject):
|
||||||
def save(self):
|
def save(self):
|
||||||
with self.lmdb.begin(write=True) as txn:
|
with self.lmdb.begin(write=True) as txn:
|
||||||
for key in self.pending_keys:
|
for key in self.pending_keys:
|
||||||
if (key not in self.data.raw_view
|
if key not in self.data.raw_view or not self.data.raw_view[key][0]:
|
||||||
or not self.data.raw_view[key][0]):
|
|
||||||
txn.delete(key.encode())
|
txn.delete(key.encode())
|
||||||
else:
|
else:
|
||||||
value_and_metadata = (self.data.raw_view[key][1],
|
value_and_metadata = (self.data.raw_view[key][1],
|
||||||
self.data.raw_view[key][2])
|
self.data.raw_view[key][2])
|
||||||
txn.put(key.encode(),
|
txn.put(key.encode(),
|
||||||
pyon.encode(value_and_metadata).encode())
|
pyon.encode(value_and_metadata).encode())
|
||||||
self.pending_keys.clear()
|
self.pending_keys.clear()
|
||||||
|
|
||||||
|
@ -89,8 +87,7 @@ class DatasetDB(TaskObject):
|
||||||
if mod["path"]:
|
if mod["path"]:
|
||||||
key = mod["path"][0]
|
key = mod["path"][0]
|
||||||
else:
|
else:
|
||||||
assert (mod["action"] == ModAction.setitem.value
|
assert(mod["action"] == ModAction.setitem.value or mod["action"] == ModAction.delitem.value)
|
||||||
or mod["action"] == ModAction.delitem.value)
|
|
||||||
key = mod["key"]
|
key = mod["key"]
|
||||||
self.pending_keys.add(key)
|
self.pending_keys.add(key)
|
||||||
process_mod(self.data, mod)
|
process_mod(self.data, mod)
|
||||||
|
@ -114,34 +111,3 @@ class DatasetDB(TaskObject):
|
||||||
del self.data[key]
|
del self.data[key]
|
||||||
self.pending_keys.add(key)
|
self.pending_keys.add(key)
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
||||||
class InteractiveArgDB:
|
|
||||||
def __init__(self):
|
|
||||||
self.pending = Notifier(dict())
|
|
||||||
self.futures = dict()
|
|
||||||
|
|
||||||
async def get(self, rid, arglist_desc, title):
|
|
||||||
self.pending[rid] = {"title": title, "arglist_desc": arglist_desc}
|
|
||||||
self.futures[rid] = asyncio.get_running_loop().create_future()
|
|
||||||
try:
|
|
||||||
value = await self.futures[rid]
|
|
||||||
finally:
|
|
||||||
del self.pending[rid]
|
|
||||||
del self.futures[rid]
|
|
||||||
return value
|
|
||||||
|
|
||||||
def supply(self, rid, values):
|
|
||||||
# quick sanity checks
|
|
||||||
if rid not in self.futures or self.futures[rid].done():
|
|
||||||
raise ValueError("no experiment with this RID is "
|
|
||||||
"waiting for interactive arguments")
|
|
||||||
if {i[0] for i in self.pending.raw_view[rid]["arglist_desc"]} != set(values.keys()):
|
|
||||||
raise ValueError("supplied and requested keys do not match")
|
|
||||||
self.futures[rid].set_result(values)
|
|
||||||
|
|
||||||
def cancel(self, rid):
|
|
||||||
if rid not in self.futures or self.futures[rid].done():
|
|
||||||
raise ValueError("no experiment with this RID is "
|
|
||||||
"waiting for interactive arguments")
|
|
||||||
self.futures[rid].set_result(None)
|
|
||||||
|
|
|
@ -111,7 +111,7 @@ class ExperimentDB:
|
||||||
try:
|
try:
|
||||||
if new_cur_rev is None:
|
if new_cur_rev is None:
|
||||||
new_cur_rev = self.repo_backend.get_head_rev()
|
new_cur_rev = self.repo_backend.get_head_rev()
|
||||||
wd, _, _ = self.repo_backend.request_rev(new_cur_rev)
|
wd, _ = self.repo_backend.request_rev(new_cur_rev)
|
||||||
self.repo_backend.release_rev(self.cur_rev)
|
self.repo_backend.release_rev(self.cur_rev)
|
||||||
self.cur_rev = new_cur_rev
|
self.cur_rev = new_cur_rev
|
||||||
self.status["cur_rev"] = new_cur_rev
|
self.status["cur_rev"] = new_cur_rev
|
||||||
|
@ -132,7 +132,7 @@ class ExperimentDB:
|
||||||
if use_repository:
|
if use_repository:
|
||||||
if revision is None:
|
if revision is None:
|
||||||
revision = self.cur_rev
|
revision = self.cur_rev
|
||||||
wd, _, revision = self.repo_backend.request_rev(revision)
|
wd, _ = self.repo_backend.request_rev(revision)
|
||||||
filename = os.path.join(wd, filename)
|
filename = os.path.join(wd, filename)
|
||||||
worker = Worker(self.worker_handlers)
|
worker = Worker(self.worker_handlers)
|
||||||
try:
|
try:
|
||||||
|
@ -169,7 +169,7 @@ class FilesystemBackend:
|
||||||
return "N/A"
|
return "N/A"
|
||||||
|
|
||||||
def request_rev(self, rev):
|
def request_rev(self, rev):
|
||||||
return self.root, None, "N/A"
|
return self.root, None
|
||||||
|
|
||||||
def release_rev(self, rev):
|
def release_rev(self, rev):
|
||||||
pass
|
pass
|
||||||
|
@ -200,26 +200,14 @@ class GitBackend:
|
||||||
def get_head_rev(self):
|
def get_head_rev(self):
|
||||||
return str(self.git.head.target)
|
return str(self.git.head.target)
|
||||||
|
|
||||||
def _get_pinned_rev(self, rev):
|
|
||||||
"""
|
|
||||||
Resolve a git reference (e.g. "HEAD", "master", "abcdef123456...") into
|
|
||||||
a git hash
|
|
||||||
"""
|
|
||||||
commit, _ = self.git.resolve_refish(rev)
|
|
||||||
|
|
||||||
logger.debug('Resolved git ref "%s" into "%s"', rev, commit.hex)
|
|
||||||
|
|
||||||
return commit.hex
|
|
||||||
|
|
||||||
def request_rev(self, rev):
|
def request_rev(self, rev):
|
||||||
rev = self._get_pinned_rev(rev)
|
|
||||||
if rev in self.checkouts:
|
if rev in self.checkouts:
|
||||||
co = self.checkouts[rev]
|
co = self.checkouts[rev]
|
||||||
co.ref_count += 1
|
co.ref_count += 1
|
||||||
else:
|
else:
|
||||||
co = _GitCheckout(self.git, rev)
|
co = _GitCheckout(self.git, rev)
|
||||||
self.checkouts[rev] = co
|
self.checkouts[rev] = co
|
||||||
return co.path, co.message, rev
|
return co.path, co.message
|
||||||
|
|
||||||
def release_rev(self, rev):
|
def release_rev(self, rev):
|
||||||
co = self.checkouts[rev]
|
co = self.checkouts[rev]
|
||||||
|
|
|
@ -132,23 +132,15 @@ class RunPool:
|
||||||
writer.writerow([rid, start_time, expid["file"]])
|
writer.writerow([rid, start_time, expid["file"]])
|
||||||
|
|
||||||
def submit(self, expid, priority, due_date, flush, pipeline_name):
|
def submit(self, expid, priority, due_date, flush, pipeline_name):
|
||||||
"""
|
|
||||||
Submits an experiment to be run by this pool
|
|
||||||
|
|
||||||
If expid has the attribute `repo_rev`, treat it as a git revision or
|
|
||||||
reference and resolve into a unique git hash before submission
|
|
||||||
"""
|
|
||||||
# mutates expid to insert head repository revision if None and
|
# mutates expid to insert head repository revision if None and
|
||||||
# replaces relative path with the absolute one.
|
# replaces relative path with the absolute one.
|
||||||
# called through scheduler.
|
# called through scheduler.
|
||||||
rid = self.ridc.get()
|
rid = self.ridc.get()
|
||||||
if "repo_rev" in expid:
|
if "repo_rev" in expid:
|
||||||
repo_rev_or_ref = expid["repo_rev"] or self.experiment_db.cur_rev
|
if expid["repo_rev"] is None:
|
||||||
wd, repo_msg, repo_rev = self.experiment_db.repo_backend.request_rev(repo_rev_or_ref)
|
expid["repo_rev"] = self.experiment_db.cur_rev
|
||||||
|
wd, repo_msg = self.experiment_db.repo_backend.request_rev(
|
||||||
# Mutate expid's repo_rev to that returned from request_rev, in case
|
expid["repo_rev"])
|
||||||
# a branch was passed instead of a hash
|
|
||||||
expid["repo_rev"] = repo_rev
|
|
||||||
else:
|
else:
|
||||||
if "file" in expid:
|
if "file" in expid:
|
||||||
expid["file"] = os.path.abspath(expid["file"])
|
expid["file"] = os.path.abspath(expid["file"])
|
||||||
|
@ -156,7 +148,7 @@ class RunPool:
|
||||||
|
|
||||||
run = Run(rid, pipeline_name, wd, expid, priority, due_date, flush,
|
run = Run(rid, pipeline_name, wd, expid, priority, due_date, flush,
|
||||||
self, repo_msg=repo_msg)
|
self, repo_msg=repo_msg)
|
||||||
if self.log_submissions is not None:
|
if self.log_submissions is not None:
|
||||||
self.log_submission(rid, expid)
|
self.log_submission(rid, expid)
|
||||||
self.runs[rid] = run
|
self.runs[rid] = run
|
||||||
self.state_changed.notify()
|
self.state_changed.notify()
|
||||||
|
@ -239,7 +231,7 @@ class PrepareStage(TaskObject):
|
||||||
try:
|
try:
|
||||||
await run.build()
|
await run.build()
|
||||||
await run.prepare()
|
await run.prepare()
|
||||||
except Exception:
|
except:
|
||||||
logger.error("got worker exception in prepare stage, "
|
logger.error("got worker exception in prepare stage, "
|
||||||
"deleting RID %d", run.rid)
|
"deleting RID %d", run.rid)
|
||||||
log_worker_exception()
|
log_worker_exception()
|
||||||
|
@ -289,7 +281,7 @@ class RunStage(TaskObject):
|
||||||
else:
|
else:
|
||||||
run.status = RunStatus.running
|
run.status = RunStatus.running
|
||||||
completed = await run.run()
|
completed = await run.run()
|
||||||
except Exception:
|
except:
|
||||||
logger.error("got worker exception in run stage, "
|
logger.error("got worker exception in run stage, "
|
||||||
"deleting RID %d", run.rid)
|
"deleting RID %d", run.rid)
|
||||||
log_worker_exception()
|
log_worker_exception()
|
||||||
|
@ -326,7 +318,7 @@ class AnalyzeStage(TaskObject):
|
||||||
run.status = RunStatus.analyzing
|
run.status = RunStatus.analyzing
|
||||||
try:
|
try:
|
||||||
await run.analyze()
|
await run.analyze()
|
||||||
except Exception:
|
except:
|
||||||
logger.error("got worker exception in analyze stage of RID %d.",
|
logger.error("got worker exception in analyze stage of RID %d.",
|
||||||
run.rid)
|
run.rid)
|
||||||
log_worker_exception()
|
log_worker_exception()
|
||||||
|
@ -510,7 +502,8 @@ class Scheduler:
|
||||||
"""Returns ``True`` if termination is requested."""
|
"""Returns ``True`` if termination is requested."""
|
||||||
for pipeline in self._pipelines.values():
|
for pipeline in self._pipelines.values():
|
||||||
if rid in pipeline.pool.runs:
|
if rid in pipeline.pool.runs:
|
||||||
run = pipeline.pool.runs[rid]
|
run = pipeline.pool.runs[rid]
|
||||||
if run.termination_requested:
|
if run.termination_requested:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -226,15 +226,9 @@ class Worker:
|
||||||
else:
|
else:
|
||||||
func = self.handlers[action]
|
func = self.handlers[action]
|
||||||
try:
|
try:
|
||||||
if getattr(func, "_worker_pass_rid", False):
|
data = func(*obj["args"], **obj["kwargs"])
|
||||||
args = [self.rid] + list(obj["args"])
|
|
||||||
else:
|
|
||||||
args = obj["args"]
|
|
||||||
data = func(*args, **obj["kwargs"])
|
|
||||||
if asyncio.iscoroutine(data):
|
|
||||||
data = await data
|
|
||||||
reply = {"status": "ok", "data": data}
|
reply = {"status": "ok", "data": data}
|
||||||
except Exception:
|
except:
|
||||||
reply = {
|
reply = {
|
||||||
"status": "failed",
|
"status": "failed",
|
||||||
"exception": current_exc_packed()
|
"exception": current_exc_packed()
|
||||||
|
|
|
@ -28,10 +28,10 @@ from artiq.master.worker_db import DeviceManager, DatasetManager, DummyDevice
|
||||||
from artiq.language.environment import (
|
from artiq.language.environment import (
|
||||||
is_public_experiment, TraceArgumentManager, ProcessArgumentManager
|
is_public_experiment, TraceArgumentManager, ProcessArgumentManager
|
||||||
)
|
)
|
||||||
from artiq.language.core import host_only, set_watchdog_factory, TerminationRequested
|
from artiq.language.core import set_watchdog_factory, TerminationRequested
|
||||||
from artiq.language.types import TBool
|
from artiq.language.types import TBool
|
||||||
from artiq.compiler import import_cache
|
from artiq.compiler import import_cache
|
||||||
from artiq.coredevice.core import CompileError, _render_diagnostic
|
from artiq.coredevice.core import CompileError, host_only, _render_diagnostic
|
||||||
from artiq import __version__ as artiq_version
|
from artiq import __version__ as artiq_version
|
||||||
|
|
||||||
|
|
||||||
|
@ -200,7 +200,9 @@ def examine(device_mgr, dataset_mgr, file):
|
||||||
name = name[:-1]
|
name = name[:-1]
|
||||||
argument_mgr = TraceArgumentManager()
|
argument_mgr = TraceArgumentManager()
|
||||||
scheduler_defaults = {}
|
scheduler_defaults = {}
|
||||||
exp_class((device_mgr, dataset_mgr, argument_mgr, scheduler_defaults))
|
cls = exp_class( # noqa: F841 (fill argument_mgr)
|
||||||
|
(device_mgr, dataset_mgr, argument_mgr, scheduler_defaults)
|
||||||
|
)
|
||||||
arginfo = OrderedDict(
|
arginfo = OrderedDict(
|
||||||
(k, (proc.describe(), group, tooltip))
|
(k, (proc.describe(), group, tooltip))
|
||||||
for k, (proc, group, tooltip) in argument_mgr.requested_args.items()
|
for k, (proc, group, tooltip) in argument_mgr.requested_args.items()
|
||||||
|
@ -215,19 +217,6 @@ def examine(device_mgr, dataset_mgr, file):
|
||||||
del sys.modules[key]
|
del sys.modules[key]
|
||||||
|
|
||||||
|
|
||||||
class ArgumentManager(ProcessArgumentManager):
|
|
||||||
_get_interactive = make_parent_action("get_interactive_arguments")
|
|
||||||
|
|
||||||
def get_interactive(self, interactive_arglist, title):
|
|
||||||
arglist_desc = [(k, p.describe(), g, t)
|
|
||||||
for k, p, g, t in interactive_arglist]
|
|
||||||
arguments = ArgumentManager._get_interactive(arglist_desc, title)
|
|
||||||
if arguments is not None:
|
|
||||||
for key, processor, _, _ in interactive_arglist:
|
|
||||||
arguments[key] = processor.process(arguments[key])
|
|
||||||
return arguments
|
|
||||||
|
|
||||||
|
|
||||||
def setup_diagnostics(experiment_file, repository_path):
|
def setup_diagnostics(experiment_file, repository_path):
|
||||||
def render_diagnostic(self, diagnostic):
|
def render_diagnostic(self, diagnostic):
|
||||||
message = "While compiling {}\n".format(experiment_file) + \
|
message = "While compiling {}\n".format(experiment_file) + \
|
||||||
|
@ -336,11 +325,11 @@ def main():
|
||||||
rid, obj["pipeline_name"], expid, obj["priority"])
|
rid, obj["pipeline_name"], expid, obj["priority"])
|
||||||
start_local_time = time.localtime(start_time)
|
start_local_time = time.localtime(start_time)
|
||||||
dirname = os.path.join("results",
|
dirname = os.path.join("results",
|
||||||
time.strftime("%Y-%m-%d", start_local_time),
|
time.strftime("%Y-%m-%d", start_local_time),
|
||||||
time.strftime("%H", start_local_time))
|
time.strftime("%H", start_local_time))
|
||||||
os.makedirs(dirname, exist_ok=True)
|
os.makedirs(dirname, exist_ok=True)
|
||||||
os.chdir(dirname)
|
os.chdir(dirname)
|
||||||
argument_mgr = ArgumentManager(expid["arguments"])
|
argument_mgr = ProcessArgumentManager(expid["arguments"])
|
||||||
exp_inst = exp((device_mgr, dataset_mgr, argument_mgr, {}))
|
exp_inst = exp((device_mgr, dataset_mgr, argument_mgr, {}))
|
||||||
argument_mgr.check_unprocessed_arguments()
|
argument_mgr.check_unprocessed_arguments()
|
||||||
put_completed()
|
put_completed()
|
||||||
|
|
|
@ -1,22 +0,0 @@
|
||||||
# RUN: env ARTIQ_DUMP_LLVM=%t %python -m artiq.compiler.testbench.embedding +compile %s
|
|
||||||
# RUN: OutputCheck %s --file-to-check=%t.ll
|
|
||||||
|
|
||||||
from artiq.language.core import *
|
|
||||||
from artiq.language.types import *
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def entrypoint():
|
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
|
||||||
message_pass()
|
|
||||||
# CHECK-NOT: call void @subkernel_send_message\(i32 ., i1 false, i8 1, i8 1, .*\), !dbg !.
|
|
||||||
# CHECK: call i8 @subkernel_await_message\(i32 2, i64 -1, { i8\*, i32 }\* nonnull .*, i8 1, i8 1\), !dbg !.
|
|
||||||
subkernel_recv("message", TInt32)
|
|
||||||
|
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
|
||||||
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
|
||||||
# CHECK-L: declare i8 @subkernel_await_message(i32, i64, { i8*, i32 }*, i8, i8) local_unnamed_addr
|
|
||||||
# CHECK-NOT-L: declare void @subkernel_await_finish(i32, i64) local_unnamed_addr
|
|
||||||
@subkernel(destination=1)
|
|
||||||
def message_pass() -> TNone:
|
|
||||||
subkernel_send(0, "message", 15)
|
|
|
@ -1,20 +0,0 @@
|
||||||
# RUN: env ARTIQ_DUMP_LLVM=%t %python -m artiq.compiler.testbench.embedding +compile %s
|
|
||||||
# RUN: OutputCheck %s --file-to-check=%t.ll
|
|
||||||
|
|
||||||
from artiq.language.core import *
|
|
||||||
from artiq.language.types import *
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def entrypoint():
|
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
|
||||||
message_pass()
|
|
||||||
# CHECK: call void @subkernel_send_message\(i32 2, i1 false, i8 1, i8 1, .*\), !dbg !.
|
|
||||||
# CHECK-NOT: call i8 @subkernel_await_message\(i32 1, i64 10000, { i8\*, i32 }\* nonnull .*, i8 1, i8 1\), !dbg !.
|
|
||||||
subkernel_send(1, "message", 15)
|
|
||||||
|
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
|
||||||
# CHECK-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
|
||||||
@subkernel(destination=1)
|
|
||||||
def message_pass() -> TNone:
|
|
||||||
subkernel_recv("message", TInt32)
|
|
|
@ -6,13 +6,13 @@ from artiq.language.types import *
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def entrypoint():
|
def entrypoint():
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
|
||||||
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
|
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
|
||||||
no_arg()
|
no_arg()
|
||||||
|
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
|
||||||
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
# CHECK-NOT-L: declare void @subkernel_send_message(i32, { i8*, i32 }*, i8**) local_unnamed_addr
|
||||||
@subkernel(destination=1)
|
@subkernel(destination=1)
|
||||||
def no_arg() -> TStr:
|
def no_arg() -> TStr:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -6,15 +6,15 @@ from artiq.language.types import *
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def entrypoint():
|
def entrypoint():
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
|
||||||
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
|
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
|
||||||
returning()
|
returning()
|
||||||
# CHECK: call i8 @subkernel_await_message\(i32 1, i64 -1, { i8\*, i32 }\* nonnull .*, i8 1, i8 1\), !dbg !.
|
# CHECK: call i8 @subkernel_await_message\(i32 1, i64 10000, { i8\*, i32 }\* nonnull .*, i8 1, i8 1\), !dbg !.
|
||||||
# CHECK: call void @subkernel_await_finish\(i32 1, i64 -1\), !dbg !.
|
# CHECK: call void @subkernel_await_finish\(i32 1, i64 10000\), !dbg !.
|
||||||
subkernel_await(returning)
|
subkernel_await(returning)
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
|
||||||
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
||||||
# CHECK-L: declare i8 @subkernel_await_message(i32, i64, { i8*, i32 }*, i8, i8) local_unnamed_addr
|
# CHECK-L: declare i8 @subkernel_await_message(i32, i64, { i8*, i32 }*, i8, i8) local_unnamed_addr
|
||||||
# CHECK-L: declare void @subkernel_await_finish(i32, i64) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_await_finish(i32, i64) local_unnamed_addr
|
||||||
@subkernel(destination=1)
|
@subkernel(destination=1)
|
||||||
|
|
|
@ -6,15 +6,15 @@ from artiq.language.types import *
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def entrypoint():
|
def entrypoint():
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
|
||||||
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
|
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
|
||||||
returning_none()
|
returning_none()
|
||||||
# CHECK: call void @subkernel_await_finish\(i32 1, i64 -1\), !dbg !.
|
# CHECK: call void @subkernel_await_finish\(i32 1, i64 10000\), !dbg !.
|
||||||
# CHECK-NOT: call i8 @subkernel_await_message\(i32 1, i64 -1\, .*\), !dbg !.
|
# CHECK-NOT: call i8 @subkernel_await_message\(i32 1, i64 10000\, .*\), !dbg !.
|
||||||
subkernel_await(returning_none)
|
subkernel_await(returning_none)
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
|
||||||
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
# CHECK-NOT-L: declare void @subkernel_send_message(i32, { i8*, i32 }*, i8**) local_unnamed_addr
|
||||||
# CHECK-L: declare void @subkernel_await_finish(i32, i64) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_await_finish(i32, i64) local_unnamed_addr
|
||||||
# CHECK-NOT-L: declare i8 @subkernel_await_message(i32, i64, { i8*, i32 }*, i8, i8) local_unnamed_addr
|
# CHECK-NOT-L: declare i8 @subkernel_await_message(i32, i64, { i8*, i32 }*, i8, i8) local_unnamed_addr
|
||||||
@subkernel(destination=1)
|
@subkernel(destination=1)
|
||||||
|
|
|
@ -11,7 +11,7 @@ class A:
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def kernel_entrypoint(self):
|
def kernel_entrypoint(self):
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
|
||||||
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
|
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
|
||||||
self.sk()
|
self.sk()
|
||||||
|
|
||||||
|
@ -21,5 +21,5 @@ a = A()
|
||||||
def entrypoint():
|
def entrypoint():
|
||||||
a.kernel_entrypoint()
|
a.kernel_entrypoint()
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
|
||||||
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
||||||
|
|
|
@ -11,8 +11,8 @@ class A:
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def kernel_entrypoint(self):
|
def kernel_entrypoint(self):
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
|
||||||
# CHECK: call void @subkernel_send_message\(i32 1, i1 false, i8 1, i8 1, .*\), !dbg !.
|
# CHECK: call void @subkernel_send_message\(i32 1, i8 1, .*\), !dbg !.
|
||||||
self.sk(1)
|
self.sk(1)
|
||||||
|
|
||||||
a = A()
|
a = A()
|
||||||
|
@ -21,5 +21,5 @@ a = A()
|
||||||
def entrypoint():
|
def entrypoint():
|
||||||
a.kernel_entrypoint()
|
a.kernel_entrypoint()
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
|
||||||
# CHECK-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
||||||
|
|
|
@ -6,13 +6,13 @@ from artiq.language.types import *
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def entrypoint():
|
def entrypoint():
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
|
||||||
# CHECK: call void @subkernel_send_message\(i32 ., i1 false, i8 1, i8 1, .*\), !dbg !.
|
# CHECK: call void @subkernel_send_message\(i32 ., i8 1, .*\), !dbg !.
|
||||||
accept_arg(1)
|
accept_arg(1)
|
||||||
|
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
|
||||||
# CHECK-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
||||||
@subkernel(destination=1)
|
@subkernel(destination=1)
|
||||||
def accept_arg(arg: TInt32) -> TNone:
|
def accept_arg(arg: TInt32) -> TNone:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -6,16 +6,16 @@ from artiq.language.types import *
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def entrypoint():
|
def entrypoint():
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
|
||||||
# CHECK: call void @subkernel_send_message\(i32 ., i1 false, i8 1, i8 1, .*\), !dbg !.
|
# CHECK: call void @subkernel_send_message\(i32 ., i8 1, .*\), !dbg !.
|
||||||
accept_arg(1)
|
accept_arg(1)
|
||||||
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
|
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
|
||||||
# CHECK: call void @subkernel_send_message\(i32 ., i1 false, i8 1, i8 2, .*\), !dbg !.
|
# CHECK: call void @subkernel_send_message\(i32 ., i8 2, .*\), !dbg !.
|
||||||
accept_arg(1, 2)
|
accept_arg(1, 2)
|
||||||
|
|
||||||
|
|
||||||
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
|
||||||
# CHECK-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
# CHECK-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
|
||||||
@subkernel(destination=1)
|
@subkernel(destination=1)
|
||||||
def accept_arg(arg_a, arg_b=5) -> TNone:
|
def accept_arg(arg_a, arg_b=5) -> TNone:
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -9,7 +9,7 @@ class TestFrontends(unittest.TestCase):
|
||||||
"""Test --help as a simple smoke test against catastrophic breakage."""
|
"""Test --help as a simple smoke test against catastrophic breakage."""
|
||||||
commands = {
|
commands = {
|
||||||
"aqctl": [
|
"aqctl": [
|
||||||
"corelog", "moninj_proxy", "coreanalyzer_proxy"
|
"corelog", "moninj_proxy"
|
||||||
],
|
],
|
||||||
"artiq": [
|
"artiq": [
|
||||||
"client", "compile", "coreanalyzer", "coremgmt",
|
"client", "compile", "coreanalyzer", "coremgmt",
|
||||||
|
|
|
@ -34,9 +34,7 @@ mock_modules = ["artiq.gui.waitingspinnerwidget",
|
||||||
"artiq.gui.models",
|
"artiq.gui.models",
|
||||||
"artiq.compiler.module",
|
"artiq.compiler.module",
|
||||||
"artiq.compiler.embedding",
|
"artiq.compiler.embedding",
|
||||||
"artiq.dashboard.waveform",
|
"qasync", "pyqtgraph", "matplotlib",
|
||||||
"artiq.dashboard.interactive_args",
|
|
||||||
"qasync", "pyqtgraph", "matplotlib", "lmdb",
|
|
||||||
"numpy", "dateutil", "dateutil.parser", "prettytable", "PyQt5",
|
"numpy", "dateutil", "dateutil.parser", "prettytable", "PyQt5",
|
||||||
"h5py", "serial", "scipy", "scipy.interpolate",
|
"h5py", "serial", "scipy", "scipy.interpolate",
|
||||||
"llvmlite", "Levenshtein", "pythonparser",
|
"llvmlite", "Levenshtein", "pythonparser",
|
||||||
|
@ -97,7 +95,7 @@ master_doc = 'index'
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = 'ARTIQ'
|
project = 'ARTIQ'
|
||||||
copyright = '2014-2024, M-Labs Limited'
|
copyright = '2014-2023, M-Labs Limited'
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
|
|
@ -275,7 +275,7 @@ Subkernels refer to kernels running on a satellite device. This allows you to of
|
||||||
|
|
||||||
Subkernels behave in most part as regular kernels, they accept arguments and can return values. However, there are few caveats:
|
Subkernels behave in most part as regular kernels, they accept arguments and can return values. However, there are few caveats:
|
||||||
|
|
||||||
- they do not support RPCs,
|
- they do not support RPCs or calling subsequent subkernels on other devices,
|
||||||
- they do not support DRTIO,
|
- they do not support DRTIO,
|
||||||
- their return value must be fully annotated with an ARTIQ type,
|
- their return value must be fully annotated with an ARTIQ type,
|
||||||
- their arguments should be annotated, and only basic ARTIQ types are supported,
|
- their arguments should be annotated, and only basic ARTIQ types are supported,
|
||||||
|
@ -304,13 +304,13 @@ For example, a subkernel performing integer addition: ::
|
||||||
result = subkernel_await(subkernel_add)
|
result = subkernel_await(subkernel_add)
|
||||||
assert result == 4
|
assert result == 4
|
||||||
|
|
||||||
Sometimes the subkernel execution may take more time. By default, the await function will wait forever. However, if timeout is needed it can be set, as ``subkernel_await()`` accepts an optional argument. The value is interpreted in milliseconds and if it is negative, timeout is disabled.
|
Sometimes the subkernel execution may take more time - and the await has a default timeout of 10000 milliseconds (10 seconds). It can be adjusted, as ``subkernel_await()`` accepts an optional timeout argument.
|
||||||
|
|
||||||
Subkernels are compiled after the main kernel, and then immediately uploaded to satellites. When called, master instructs the appropriate satellite to load the subkernel into their kernel core and to run it. If the subkernel is complex, and its binary relatively big, the delay between the call and actually running the subkernel may be substantial; if that delay has to be minimized, ``subkernel_preload(function)`` should be used before the call.
|
Subkernels are compiled after the main kernel, and then immediately uploaded to satellites. When called, master instructs the appropriate satellite to load the subkernel into their kernel core and to run it. If the subkernel is complex, and its binary relatively big, the delay between the call and actually running the subkernel may be substantial; if that delay has to be minimized, ``subkernel_preload(function)`` should be used before the call.
|
||||||
|
|
||||||
While ``self`` is accepted as an argument for subkernels, it is embedded into the compiled data. Any changes made by the main kernel or other subkernels, will not be available.
|
While ``self`` is accepted as an argument for subkernels, it is embedded into the compiled data. Any changes made by the main kernel or other subkernels, will not be available.
|
||||||
|
|
||||||
Subkernels can call other kernels and subkernels. For a more complex example: ::
|
Subkernels can call other kernels and subkernels, if they're within the same destination. For a more complex example: ::
|
||||||
|
|
||||||
from artiq.experiment import *
|
from artiq.experiment import *
|
||||||
|
|
||||||
|
@ -346,35 +346,4 @@ Without the preload, the delay after the core reset would need to be longer. It'
|
||||||
In general, subkernels do not have to be awaited, but awaiting is required to retrieve returned values and exceptions.
|
In general, subkernels do not have to be awaited, but awaiting is required to retrieve returned values and exceptions.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
When a subkernel is running, regardless of devices used by it, RTIO devices on that satellite are not available to the master. Control is returned to master after the subkernel finishes - to be sure that you can use the device, the subkernel should be awaited before any RTIO operations on the affected satellite are performed.
|
When a subkernel is running, regardless of devices used by it, RTIO devices on that satellite are not available to the master. Control is returned to master after the subkernel finishes - to be sure that you can use the device, the subkernel should be awaited before any RTIO operations on the affected satellite are performed.
|
||||||
|
|
||||||
Message passing
|
|
||||||
^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Subkernels besides arguments and returns, can also pass messages between each other or the master with built-in ``subkernel_send()`` and ``subkernel_recv()`` functions. This can be used for communication between subkernels, passing additional data, or partially computed data. Consider the following example: ::
|
|
||||||
|
|
||||||
from artiq.experiment import *
|
|
||||||
|
|
||||||
@subkernel(destination=1)
|
|
||||||
def simple_message() -> TInt32:
|
|
||||||
data = subkernel_recv("message", TInt32)
|
|
||||||
return data + 20
|
|
||||||
|
|
||||||
class MessagePassing(EnvExperiment):
|
|
||||||
def build(self):
|
|
||||||
self.setattr_device("core")
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
simple_self()
|
|
||||||
subkernel_send(1, "message", 150)
|
|
||||||
result = subkernel_await(simple_self)
|
|
||||||
assert result == 170
|
|
||||||
|
|
||||||
The ``subkernel_send(destination, name, value)`` function requires three arguments: destination, name of the message that will be linked with the ``subkernel_recv()``, and the passed value.
|
|
||||||
|
|
||||||
The ``subkernel_recv(name, type, [timeout])`` function requires two arguments: message name (matching the name provided in ``subkernel_send``) and expected type. Optionally, it accepts a third argument - timeout for the operation in milliseconds. If the value is negative, timeout is disabled. The default value is no timeout.
|
|
||||||
|
|
||||||
The "name" argument in both ``send`` and ``recv`` functions acts as a link, and must match exactly between the two for a successful message transaction. The type of the value sent by ``subkernel_send`` is checked against the type declared in ``subkernel_recv`` with the same name, to avoid misinterpretation of the data. The compiler also checks if all subkernel message names have both a sending and receiving functions to help with typos. However, it cannot help if wrong names are used - the receiver will wait only for a matching message for the duration of the timeout.
|
|
||||||
|
|
||||||
A message can be received only when a subkernel is running, and is put into a buffer to be taken when required - thus whatever sending order will not cause a deadlock. However, a subkernel may timeout or wait forever, if destination or names do not match (e.g. message sent to wrong destination, or under different than expected name even if types match).
|
|
|
@ -27,4 +27,4 @@ Website: https://m-labs.hk/artiq
|
||||||
|
|
||||||
`Cite ARTIQ <http://dx.doi.org/10.5281/zenodo.51303>`_ as ``Bourdeauducq, Sébastien et al. (2016). ARTIQ 1.0. Zenodo. 10.5281/zenodo.51303``.
|
`Cite ARTIQ <http://dx.doi.org/10.5281/zenodo.51303>`_ as ``Bourdeauducq, Sébastien et al. (2016). ARTIQ 1.0. Zenodo. 10.5281/zenodo.51303``.
|
||||||
|
|
||||||
Copyright (C) 2014-2024 M-Labs Limited. Licensed under GNU LGPL version 3+.
|
Copyright (C) 2014-2023 M-Labs Limited. Licensed under GNU LGPL version 3+.
|
||||||
|
|
|
@ -157,10 +157,11 @@ Embedded applets should use `AppletRequestIPC` while standalone applets use `App
|
||||||
Applet entry area
|
Applet entry area
|
||||||
*****************
|
*****************
|
||||||
|
|
||||||
Argument widgets can be used in applets through the `EntryArea` class.
|
Extensions are provided to enable the use of argument widgets in applets through the `EntryArea` class.
|
||||||
|
|
||||||
Below is a simple example code snippet using the `EntryArea` class: ::
|
Below is a simple example code snippet using the `EntryArea` class: ::
|
||||||
|
|
||||||
|
# Create the experiment area
|
||||||
entry_area = EntryArea()
|
entry_area = EntryArea()
|
||||||
|
|
||||||
# Create a new widget
|
# Create a new widget
|
||||||
|
|
30
flake.lock
30
flake.lock
|
@ -11,11 +11,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1707216368,
|
"lastModified": 1701573753,
|
||||||
"narHash": "sha256-ZXoqzG2QsVsybALLYXs473avXcyKSZNh2kIgcPo60XQ=",
|
"narHash": "sha256-vhEtXjb9AM6/HnsgfVmhJQeqQ9JqysUm7iWNzTIbexs=",
|
||||||
"owner": "m-labs",
|
"owner": "m-labs",
|
||||||
"repo": "artiq-comtools",
|
"repo": "artiq-comtools",
|
||||||
"rev": "e5d0204490bccc07ef9141b0d7c405ab01cb8273",
|
"rev": "199bdabf4de49cb7ada8a4ac7133008e0f8434b7",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -45,11 +45,11 @@
|
||||||
"mozilla-overlay": {
|
"mozilla-overlay": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1704373101,
|
"lastModified": 1695805681,
|
||||||
"narHash": "sha256-+gi59LRWRQmwROrmE1E2b3mtocwueCQqZ60CwLG+gbg=",
|
"narHash": "sha256-1ElPLD8eFfnuIk0G52HGGpRtQZ4QPCjChRlEOfkZ5ro=",
|
||||||
"owner": "mozilla",
|
"owner": "mozilla",
|
||||||
"repo": "nixpkgs-mozilla",
|
"repo": "nixpkgs-mozilla",
|
||||||
"rev": "9b11a87c0cc54e308fa83aac5b4ee1816d5418a2",
|
"rev": "6eabade97bc28d707a8b9d82ad13ef143836736e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -60,11 +60,11 @@
|
||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1711668574,
|
"lastModified": 1702346276,
|
||||||
"narHash": "sha256-u1dfs0ASQIEr1icTVrsKwg2xToIpn7ZXxW3RHfHxshg=",
|
"narHash": "sha256-eAQgwIWApFQ40ipeOjVSoK4TEHVd6nbSd9fApiHIw5A=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "219951b495fc2eac67b1456824cc1ec1fd2ee659",
|
"rev": "cf28ee258fd5f9a52de6b9865cdb93a1f96d09b7",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -92,11 +92,11 @@
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1708937641,
|
"lastModified": 1701572254,
|
||||||
"narHash": "sha256-Hkb9VYFzFgkYxfbh4kYcDSn7DbMUYehoQDeTALrxo2Q=",
|
"narHash": "sha256-ixq8dlpyOytDr+d/OmW8v1Ioy9V2G2ibOlNj8GFDSq4=",
|
||||||
"owner": "m-labs",
|
"owner": "m-labs",
|
||||||
"repo": "sipyco",
|
"repo": "sipyco",
|
||||||
"rev": "4a28b311ce0069454b4e8fe1e6049db11b9f1296",
|
"rev": "cceac0df537887135f99aa6b1bdd82853f16b4d6",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -108,11 +108,11 @@
|
||||||
"src-migen": {
|
"src-migen": {
|
||||||
"flake": false,
|
"flake": false,
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1702942348,
|
"lastModified": 1699335478,
|
||||||
"narHash": "sha256-gKIfHZxsv+jcgDFRW9mPqmwqbZXuRvXefkZcSFjOGHw=",
|
"narHash": "sha256-BsubN4Mfdj02QPK6ZCrl+YOaSg7DaLQdSCVP49ztWik=",
|
||||||
"owner": "m-labs",
|
"owner": "m-labs",
|
||||||
"repo": "migen",
|
"repo": "migen",
|
||||||
"rev": "50934ad10a87ade47219b796535978b9bdf24023",
|
"rev": "fd0bf5855a1367eab14b0d6f7f8266178e25d78e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
|
52
flake.nix
52
flake.nix
|
@ -92,6 +92,22 @@
|
||||||
disabledTestPaths = [ "tests/test_qeventloop.py" ];
|
disabledTestPaths = [ "tests/test_qeventloop.py" ];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
outputcheck = pkgs.python3Packages.buildPythonApplication rec {
|
||||||
|
pname = "outputcheck";
|
||||||
|
version = "0.4.2";
|
||||||
|
src = pkgs.fetchFromGitHub {
|
||||||
|
owner = "stp";
|
||||||
|
repo = "OutputCheck";
|
||||||
|
rev = "e0f533d3c5af2949349856c711bf4bca50022b48";
|
||||||
|
sha256 = "1y27vz6jq6sywas07kz3v01sqjd0sga9yv9w2cksqac3v7wmf2a0";
|
||||||
|
};
|
||||||
|
prePatch = "echo ${version} > RELEASE-VERSION";
|
||||||
|
postPatch = ''
|
||||||
|
substituteInPlace OutputCheck/Driver.py \
|
||||||
|
--replace "argparse.FileType('rU')" "argparse.FileType('r')"
|
||||||
|
'';
|
||||||
|
};
|
||||||
|
|
||||||
libartiq-support = pkgs.stdenv.mkDerivation {
|
libartiq-support = pkgs.stdenv.mkDerivation {
|
||||||
name = "libartiq-support";
|
name = "libartiq-support";
|
||||||
src = self;
|
src = self;
|
||||||
|
@ -171,7 +187,7 @@
|
||||||
# FIXME: automatically propagate lld_14 llvm_14 dependencies
|
# FIXME: automatically propagate lld_14 llvm_14 dependencies
|
||||||
# cacert is required in the check stage only, as certificates are to be
|
# cacert is required in the check stage only, as certificates are to be
|
||||||
# obtained from system elsewhere
|
# obtained from system elsewhere
|
||||||
nativeCheckInputs = with pkgs; [ lld_14 llvm_14 lit outputcheck cacert ] ++ [ libartiq-support ];
|
nativeCheckInputs = [ pkgs.lld_14 pkgs.llvm_14 libartiq-support pkgs.lit outputcheck pkgs.cacert ];
|
||||||
checkPhase = ''
|
checkPhase = ''
|
||||||
python -m unittest discover -v artiq.test
|
python -m unittest discover -v artiq.test
|
||||||
|
|
||||||
|
@ -335,6 +351,17 @@
|
||||||
paths = [ openocd-fixed bscan_spi_bitstreams-pkg ];
|
paths = [ openocd-fixed bscan_spi_bitstreams-pkg ];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
sphinxcontrib-wavedrom = pkgs.python3Packages.buildPythonPackage rec {
|
||||||
|
pname = "sphinxcontrib-wavedrom";
|
||||||
|
version = "3.0.4";
|
||||||
|
format = "pyproject";
|
||||||
|
src = pkgs.python3Packages.fetchPypi {
|
||||||
|
inherit pname version;
|
||||||
|
sha256 = "sha256-0zTHVBr9kXwMEo4VRTFsxdX2HI31DxdHfLUHCQmw1Ko=";
|
||||||
|
};
|
||||||
|
nativeBuildInputs = [ pkgs.python3Packages.setuptools-scm ];
|
||||||
|
propagatedBuildInputs = (with pkgs.python3Packages; [ wavedrom sphinx xcffib cairosvg ]);
|
||||||
|
};
|
||||||
latex-artiq-manual = pkgs.texlive.combine {
|
latex-artiq-manual = pkgs.texlive.combine {
|
||||||
inherit (pkgs.texlive)
|
inherit (pkgs.texlive)
|
||||||
scheme-basic latexmk cmap collection-fontsrecommended fncychap
|
scheme-basic latexmk cmap collection-fontsrecommended fncychap
|
||||||
|
@ -368,14 +395,14 @@
|
||||||
target = "efc";
|
target = "efc";
|
||||||
variant = "shuttler";
|
variant = "shuttler";
|
||||||
};
|
};
|
||||||
inherit latex-artiq-manual;
|
inherit sphinxcontrib-wavedrom latex-artiq-manual;
|
||||||
artiq-manual-html = pkgs.stdenvNoCC.mkDerivation rec {
|
artiq-manual-html = pkgs.stdenvNoCC.mkDerivation rec {
|
||||||
name = "artiq-manual-html-${version}";
|
name = "artiq-manual-html-${version}";
|
||||||
version = artiqVersion;
|
version = artiqVersion;
|
||||||
src = self;
|
src = self;
|
||||||
buildInputs = with pkgs.python3Packages; [
|
buildInputs = [
|
||||||
sphinx sphinx_rtd_theme
|
pkgs.python3Packages.sphinx pkgs.python3Packages.sphinx_rtd_theme
|
||||||
sphinx-argparse sphinxcontrib-wavedrom
|
pkgs.python3Packages.sphinx-argparse sphinxcontrib-wavedrom
|
||||||
];
|
];
|
||||||
buildPhase = ''
|
buildPhase = ''
|
||||||
export VERSIONEER_OVERRIDE=${artiqVersion}
|
export VERSIONEER_OVERRIDE=${artiqVersion}
|
||||||
|
@ -393,10 +420,11 @@
|
||||||
name = "artiq-manual-pdf-${version}";
|
name = "artiq-manual-pdf-${version}";
|
||||||
version = artiqVersion;
|
version = artiqVersion;
|
||||||
src = self;
|
src = self;
|
||||||
buildInputs = with pkgs.python3Packages; [
|
buildInputs = [
|
||||||
sphinx sphinx_rtd_theme
|
pkgs.python3Packages.sphinx pkgs.python3Packages.sphinx_rtd_theme
|
||||||
sphinx-argparse sphinxcontrib-wavedrom
|
pkgs.python3Packages.sphinx-argparse sphinxcontrib-wavedrom
|
||||||
] ++ [ latex-artiq-manual ];
|
latex-artiq-manual
|
||||||
|
];
|
||||||
buildPhase = ''
|
buildPhase = ''
|
||||||
export VERSIONEER_OVERRIDE=${artiq.version}
|
export VERSIONEER_OVERRIDE=${artiq.version}
|
||||||
export SOURCE_DATE_EPOCH=${builtins.toString self.sourceInfo.lastModified}
|
export SOURCE_DATE_EPOCH=${builtins.toString self.sourceInfo.lastModified}
|
||||||
|
@ -412,7 +440,7 @@
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
inherit makeArtiqBoardPackage openocd-bscanspi-f;
|
inherit makeArtiqBoardPackage;
|
||||||
|
|
||||||
defaultPackage.x86_64-linux = pkgs.python3.withPackages(ps: [ packages.x86_64-linux.artiq ]);
|
defaultPackage.x86_64-linux = pkgs.python3.withPackages(ps: [ packages.x86_64-linux.artiq ]);
|
||||||
|
|
||||||
|
@ -433,14 +461,14 @@
|
||||||
artiq-frontend-dev-wrappers
|
artiq-frontend-dev-wrappers
|
||||||
# To manually run compiler tests:
|
# To manually run compiler tests:
|
||||||
pkgs.lit
|
pkgs.lit
|
||||||
pkgs.outputcheck
|
outputcheck
|
||||||
libartiq-support
|
libartiq-support
|
||||||
# use the vivado-env command to enter a FHS shell that lets you run the Vivado installer
|
# use the vivado-env command to enter a FHS shell that lets you run the Vivado installer
|
||||||
packages.x86_64-linux.vivadoEnv
|
packages.x86_64-linux.vivadoEnv
|
||||||
packages.x86_64-linux.vivado
|
packages.x86_64-linux.vivado
|
||||||
packages.x86_64-linux.openocd-bscanspi
|
packages.x86_64-linux.openocd-bscanspi
|
||||||
pkgs.python3Packages.sphinx pkgs.python3Packages.sphinx_rtd_theme
|
pkgs.python3Packages.sphinx pkgs.python3Packages.sphinx_rtd_theme
|
||||||
pkgs.python3Packages.sphinx-argparse pkgs.python3Packages.sphinxcontrib-wavedrom latex-artiq-manual
|
pkgs.python3Packages.sphinx-argparse sphinxcontrib-wavedrom latex-artiq-manual
|
||||||
];
|
];
|
||||||
shellHook = ''
|
shellHook = ''
|
||||||
export LIBARTIQ_SUPPORT=`libartiq-support`
|
export LIBARTIQ_SUPPORT=`libartiq-support`
|
||||||
|
|
Loading…
Reference in New Issue