Compare commits

..

166 Commits

Author SHA1 Message Date
b5e762854b moninj: DAC horizontal layout 2024-04-17 15:00:26 +08:00
d6c396fc69 moninj: DDS change to vert layout 2024-04-17 15:00:26 +08:00
5ad4199668 moninj: change to vboxlayout 2024-04-17 13:42:22 +08:00
2d8a3d14a6 moninj: TTL widgets horizontal layout 2024-04-17 13:42:22 +08:00
88ec806879 moninj: add to_model_path 2024-04-16 17:35:46 +08:00
0c8511eab6 moninj: merge moninj docks 2024-04-16 16:50:54 +08:00
13bf72574b moninj: dummy _AddChannelDialog 2024-04-16 16:20:20 +08:00
d1decc9198 moninj: add channel dummy button 2024-04-16 14:26:03 +08:00
2a16b84df9 moninj: add LayoutWidget 2024-04-16 14:14:44 +08:00
b4fb9b9bc6 moninj: remove unneccesary callbacks 2024-04-16 13:48:45 +08:00
aeaa337b1a moninj: handler create, delete widgets 2024-04-16 13:10:24 +08:00
3b8f6edd52 moninj: merge _DDSModel and _DDSHandler 2024-04-16 13:04:52 +08:00
585d8a4d5b moninj: decouple DAC logic and display 2024-04-15 17:39:07 +08:00
6408ae45e0 moninj: decouple DDS logic and display 2024-04-15 17:31:42 +08:00
6224270396 moninj: decouple TTL logic and display 2024-04-15 17:23:47 +08:00
6f00a4ac55 moninj: create dummy handler classes 2024-04-15 17:23:47 +08:00
1c506973a3 moninj: rename widget to handler 2024-04-15 16:18:52 +08:00
acd7ea3efb moninj: refactor _DACWidget 2024-04-15 16:04:32 +08:00
Norman Krackow
baa58343ac
urukul: fix tune_sync_delay() (#2374) 2024-04-12 09:03:17 +08:00
1bcbee988d update copyright year 2024-04-11 16:35:44 +08:00
ab206ac154 worker: import host_only from the right place 2024-04-11 16:34:08 +08:00
4a2352c2df browser: disable quickstyle 2024-04-11 16:06:33 +08:00
f9a447e8e0 entries: fix EnumerationEntry disable_scroll_wheel 2024-04-11 16:05:39 +08:00
c4892cf285 shortcuts: style (NFC) 2024-04-11 10:33:03 +08:00
c1e6ae2193 schedule: style (NFC) 2024-04-11 10:33:03 +08:00
4f302ee675 explorer: style (NFC) 2024-04-11 10:33:03 +08:00
3ecd115252 experiments: style (NFC) 2024-04-11 10:33:03 +08:00
400c1644b0 datasets: style (NFC) 2024-04-11 10:33:03 +08:00
1b2a18c9c8 applets_ccb: style (NFC) 2024-04-11 10:33:03 +08:00
7d9199a2ee artiq_dashboard: style (NFC) 2024-04-11 10:33:03 +08:00
43edffc67e waveform: clean up imports 2024-04-10 12:05:32 +08:00
49930a2df2 datasets: clean imports 2024-04-10 12:05:32 +08:00
9d3509d7b0 shortcuts: clean imports 2024-04-10 12:05:32 +08:00
b555f08ed8 artiq_dashboard: clean imports 2024-04-10 12:05:32 +08:00
65005ed45a moninj: flake8 style fixes (NFC) 2024-04-10 11:10:52 +08:00
6ac532a00e moninj: clean up imports 2024-04-10 10:56:28 +08:00
856e43fd61 interactive_args: add default message 2024-04-08 16:54:37 +08:00
af11dc6b74 interactive_args: use bottom_item for supply, cancel 2024-04-05 18:53:06 +08:00
0fb31ddbb1 flake: update dependencies 2024-04-02 17:01:19 +08:00
9bf5695ab2 interactive_args: add quickstyle 2024-04-02 16:31:42 +08:00
5f49e582c8 master: fix race condition in interactive args supply
Closes #2375
2024-04-02 16:10:02 +08:00
fddff13842 docs: mock interactive_args 2024-04-02 15:47:48 +08:00
915d3613f1 artiq_dashboard: add InteractiveArgsDock 2024-04-02 15:47:48 +08:00
d463ccb218 interactive_args: add InteractiveArgsDock 2024-04-02 15:47:48 +08:00
b4d070fa1b docs: add quickstyle param 2024-04-02 15:46:57 +08:00
9934c756b2 RELEASE_NOTES: quickstyle EnumerationValue 2024-04-02 15:46:57 +08:00
47716badef add quickstyle option to EnumerationValue 2024-04-02 15:46:57 +08:00
8e68501081 applets: EntryArea return processed values 2024-04-02 15:45:38 +08:00
19b652d4c0 fix interactive args cancellation 2024-04-02 15:45:12 +08:00
dc0b803b19 use nixpkgs sphinxcontrib-wavedrom
Signed-off-by: Florian Agbuya <fa@m-labs.ph>
2024-03-27 18:02:49 +08:00
bc8bc952d7 use nixpkgs outputcheck
Signed-off-by: Florian Agbuya <fa@m-labs.ph>
2024-03-27 18:02:13 +08:00
aea5f04d74 dashboard, browser: fix missing recompute arguments 2024-03-26 16:46:53 +08:00
d0f893c01c flake: export openocd-bscanspi-f 2024-03-26 15:57:44 +08:00
7fa770fba9 artiq_client: cancel interactive arguments 2024-03-26 15:36:23 +08:00
5a8bc17e4d example: expand interactive 2024-03-26 15:35:58 +08:00
329e7189cc example: add interactive args 2024-03-25 14:55:17 +08:00
13a36bf911 browser, dashboard: fix restore scrollbar state 2024-03-20 10:39:35 +08:00
88438e2d76 browser: subclass _ArgumentEditor as EntryTreeWidget 2024-03-19 18:59:21 +08:00
1a41b16fb6 dashboard: subclass _ArgumentEditor as EntryTreeWidget 2024-03-19 18:59:21 +08:00
6978101b1f applets: subclass EntryArea as EntryTreeWidget 2024-03-19 18:59:21 +08:00
244c73a592 entries: add EntryTreeWidget 2024-03-19 18:59:21 +08:00
c4323e1179 interactive args: add title param 2024-03-13 12:13:55 +08:00
609684664a coredevice schema: add enable_wrpll option to json 2024-03-11 16:42:20 +08:00
7e6ed1655f artiq_client: fix deprecated wait usage 2024-03-11 13:10:10 +08:00
332c9c0fcd waveform: consistent log messages 2024-03-07 12:11:28 +08:00
27178c1478 moninj: remove CancelledError workaround 2024-03-07 12:10:05 +08:00
e56331248e dashboard: fix device subscriber connections 2024-03-06 18:09:12 +08:00
692572a3b9 style (NFC) 2024-02-28 12:48:31 +08:00
18f55bb196 master: fix asyncio exception handling
Follow Python 3.8.
2024-02-28 12:48:11 +08:00
3e8a853e53 artiq_client: implement interactive arguments 2024-02-28 11:51:30 +08:00
de29db0b35 master: implement interactive arguments
Interaction with experiment termination (forceful and requested) still
needs some work.
2024-02-28 11:49:33 +08:00
42d3c3b4b2 session: workaround for stream.close interrupted 2024-02-27 18:06:58 +08:00
450fe91e93 artiq_client: handle Ctrl-C gracefully 2024-02-27 15:46:23 +08:00
002325be17 applets: rename params 2024-02-27 15:26:37 +08:00
92eb3947a4 master: shorten RPC target names 2024-02-27 15:24:43 +08:00
3609f95207 flake: add new lmdb mock module for artiq-manual
Signed-off-by: Florian Agbuya <fa@m-labs.ph>
2024-02-27 12:00:19 +08:00
5e01661443 master.databases: style (NFC) 2024-02-27 11:25:56 +08:00
a21805598a dashboard fix moninj, analyzer clients 2024-02-27 11:25:43 +08:00
c151f0c3ce waveform: remove unused setTimescale 2024-02-27 11:24:33 +08:00
c794e51c1c waveform: fix log msg display 2024-02-27 11:24:07 +08:00
bafa69098a style/doc fixes (NFC) 2024-02-27 11:10:32 +08:00
b2ba087acd dashboard: do not use float64. Closes #2347 2024-02-27 11:09:37 +08:00
a8a5fc213b worker_impl: style fixes (NFC) 2024-02-27 10:39:37 +08:00
7688f380b1 environment, artiq_run: introduce interactive arguments 2024-02-26 19:30:31 +08:00
a0450555e2 RELEASE_NOTES: units in datasets 2024-02-26 19:29:31 +08:00
b142428607 doc: remove incorrect and unneeded code comment 2024-02-26 17:26:23 +08:00
750fdf89b3 doc: get rid of confusing 'Extensions' wording 2024-02-26 17:18:28 +08:00
0a24d72b9f dashboard: change analyzer proxy client 2024-02-26 16:55:55 +08:00
7c1274f254 RELEASE_NOTES: Python 3.10 -> 3.11 2024-02-26 16:30:22 +08:00
716d0f556d grabber: timeout fixes 2024-02-26 11:03:59 +08:00
Charles Baynham
20d7604f87 grabber: Add optional timeout for getting grabber data
Signed-off-by: Charles Baynham <c.baynham@imperial.ac.uk>
2024-02-26 11:02:02 +08:00
4c142ec3f1 waveform: add reset zoom button 2024-02-23 10:56:37 +08:00
c49600a2fc docs: fix sampler, waveform 2024-02-23 10:56:19 +08:00
cda758ef53 docs: fix waveform imports 2024-02-22 17:04:37 +08:00
bd9e8b3977 waveform: simplify AddChannelDialog 2024-02-20 16:30:26 +08:00
779b7704ed waveform, comm_analyzer add cursor label unit 2024-02-20 15:53:01 +08:00
edd23977f8 waveform: delete all waveforms confirm dialog 2024-02-19 17:37:08 +08:00
f460af3a6a waveform: remove vertical grids 2024-02-19 17:37:08 +08:00
1b0fd2e2d3 comm_analyzer: remove interval, timestamp 2024-02-19 17:37:08 +08:00
652bcc22c6 waveform: remove empty waveform error msg 2024-02-19 17:37:08 +08:00
de539a4d33 waveform: remove None cursor label 2024-02-19 17:37:08 +08:00
1749fa661f waveform: reset cursor onDataChange 2024-02-16 15:54:25 +08:00
6ed6fb0bce waveform: fix log messages 2024-02-16 15:24:39 +08:00
fc282d4e17 artiq_ddb_template: fix clk_div config
remove clk_div default in jsonschema
set CLK IN divided by 1 as default when bypassing PLL
2024-02-16 15:23:55 +08:00
795b8ae4c6 add analyzer proxy magic 2024-02-16 14:48:49 +08:00
21b77567f2 waveform: add timeout to reconnect_cr 2024-02-16 14:48:49 +08:00
d085c1e4a4 waveform, analyzer proxy fix connect errors 2024-02-16 14:48:49 +08:00
720cbb4490 comm_analyzer, waveform add ndecimals 2024-02-15 15:46:12 +08:00
efb8aaf9f9 comm_analyzer: fix stopped_x 2024-02-15 15:32:00 +08:00
7c583b9c04 flake: update dependencies 2024-02-09 14:04:16 +08:00
7f43c5c31a waveform: add cursor 2024-02-08 12:59:22 +08:00
40cea30285 waveform: add open, save channel list 2024-02-02 17:23:52 +08:00
8b503c3b4f waveform: add remove, clear waveform actions 2024-02-02 11:18:03 +08:00
1e9070a2af testing: add coreanalyzer_proxy smoke test 2024-02-02 10:21:22 +08:00
dcf1bba8c6 waveform: implement _create_waveform 2024-02-01 19:02:09 +08:00
a7b045a478 waveform: misc fixes 2024-02-01 19:02:09 +08:00
3aaa7e04f2 flake: update dependencies 2024-02-01 18:58:27 +08:00
b648a2930b docs: elaborate on subkernel message names 2024-02-01 18:56:07 +08:00
b64c75fd71 subkernel: warn on kernel finish w/ pending msgs 2024-02-01 18:56:07 +08:00
392533f8ee subkernel lit tests: fix timeouts to no-timeouts 2024-02-01 18:56:07 +08:00
7fee68ede0 subkernel messages: check for send/recv pairs 2024-02-01 18:56:07 +08:00
849b77fbf2 compiler: fix send_message after stackrestore 2024-02-01 18:56:07 +08:00
502204cab2 subkernel: fix DMA return control to wrong master 2024-02-01 18:56:07 +08:00
d1ee0ffb83 subkernel: fix passing arguments 2024-02-01 18:56:07 +08:00
cbe7ac1cfd waveform: add AnalogWaveform 2024-01-31 17:26:22 +08:00
2d8de3ed93 waveform: add BitVectorWaveform 2024-01-31 16:59:12 +08:00
5f3126f393 waveform: add BitWaveform 2024-01-31 15:51:34 +08:00
09462442f7 subkernel: allow negative timeouts for no timeout 2024-01-31 11:46:07 +08:00
726cb092ca tests: add message passing tests 2024-01-31 11:46:07 +08:00
fbbc8d3dd1 docs: add a section for subkernel message passing 2024-01-31 11:46:07 +08:00
0ba0330b53 compiler: support free subkernel message passing 2024-01-31 11:46:07 +08:00
7d3bcc7cac satman: support free subkernel message passing 2024-01-31 11:46:07 +08:00
171c7a6e11 runtime: use the destination passed by kernel 2024-01-31 11:46:07 +08:00
c087a47e45 waveform: add _BaseWaveform 2024-01-31 11:41:35 +08:00
28dfe1f9c6 waveform: add _WaveformView 2024-01-30 09:45:29 +08:00
3861d58749 dndwidgets: change splitter to use signal 2024-01-30 09:45:29 +08:00
6c9f1cbf7c waveform: add save_vcd 2024-01-25 15:06:21 +08:00
06b908fd18 waveform: fix in _AddChannelDialog 2024-01-25 14:49:55 +08:00
e72f37eb4e waveform: add _WaveformModel 2024-01-25 14:49:55 +08:00
847b4ee2a3 waveform: add save_trace 2024-01-25 14:49:16 +08:00
863daca2da waveform: remove punctuation in log msgs 2024-01-25 14:01:25 +08:00
fcaf4a8af0 gui.tools: add get_save_file_name helper 2024-01-25 12:32:06 +08:00
466d865e58 waveform: add _AddChannelDialog 2024-01-22 15:39:46 +08:00
5036230ff3 waveform: change log channel update 2024-01-22 15:39:11 +08:00
12a44fad3c comm_analyzer: change usage of logs field 2024-01-22 15:39:11 +08:00
096664c1ba dndwidgets: add drag drop helper widgets 2024-01-22 15:37:57 +08:00
8a9b6a449b artiq_dashboard: start proxy clients, device_sub 2024-01-22 15:37:10 +08:00
73be2257d3 waveform: add proxy clients 2024-01-22 15:37:10 +08:00
9088ffa2ca artiq_dashboard: add WaveformDock 2024-01-22 13:59:31 +08:00
d44f55c6d9 waveform: add WaveformDock 2024-01-22 13:59:31 +08:00
e393b3ab37 comm_analyzer: add set_end_time call 2024-01-15 13:39:06 +08:00
3af4c9d517 comm_analyzer: add get_channel_list 2024-01-13 00:07:42 +08:00
64567bc26f comm_analyzer: add AnalyzerProxyReceiver 2024-01-13 00:06:14 +08:00
da15e94c22 flake: update dependencies 2024-01-11 12:30:37 +08:00
Charles Baynham
669edf17c5
scheduler: resolve git references into revisions on submission (#2296)
Signed-off-by: Charles Baynham <c.baynham@imperial.ac.uk>
2024-01-10 16:05:26 +08:00
b215df2d25 comm_analyzer: add WaveformManager, WaveformChannel 2024-01-10 16:02:04 +08:00
6c0ff9a912 satman: fix targets without drtio routing 2024-01-09 10:41:22 +08:00
c9e3771cd5 subkernels: add support for (d)dma 2024-01-09 08:44:45 +08:00
c876acd5a5 docs: subkernels can call other subkernels now 2024-01-09 08:44:45 +08:00
4363cdf9fa master: make use of the async message ready flag 2024-01-09 08:44:45 +08:00
95b92a178b satman: make use of the async flag 2024-01-09 08:44:45 +08:00
1cc7398bc0 drtio: add sat -> mst async notif packet 2024-01-09 08:44:45 +08:00
4956fac861 satman: allow subkernels start subkernels 2024-01-09 08:44:45 +08:00
9bc66e5c14 support routing packets between satellites and master 2024-01-09 08:44:45 +08:00
4495f6035e master: support source parameters 2024-01-09 08:44:45 +08:00
e556c29b40 drtioaux: add source to relevant drtio packets 2024-01-09 08:44:45 +08:00
83 changed files with 4382 additions and 1590 deletions

View File

@ -29,7 +29,7 @@ Website: https://m-labs.hk/artiq
License
=======
Copyright (C) 2014-2023 M-Labs Limited.
Copyright (C) 2014-2024 M-Labs Limited.
ARTIQ is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by

View File

@ -39,12 +39,19 @@ Highlights:
- Hotkeys now organize experiment windows in the order they were last interacted with:
+ CTRL+SHIFT+T tiles experiment windows
+ CTRL+SHIFT+C cascades experiment windows
- By enabling the ``quickstyle`` option, ``EnumerationValue`` entry widgets can now alternatively display
its choices as buttons that submit the experiment on click.
* Datasets can now be associated with units and scale factors, and displayed accordingly in the dashboard
including applets, like widgets such as ``NumberValue`` already did in earlier ARTIQ versions.
* Experiments can now request arguments interactively from the user at any time.
* Persistent datasets are now stored in a LMDB database for improved performance.
* Python's built-in types (such as ``float``, or ``List[...]``) can now be used in type annotations on
kernel functions.
* Full Python 3.10 support.
* Full Python 3.11 support.
* MSYS2 packaging for Windows, which replaces Conda. Conda packages are still available to
support legacy installations, but may be removed in a future release.
* Experiments can now be submitted with revisions set to a branch / tag name instead of only git hashes.
* Grabber image input now has an optional timeout.
Breaking changes:

View File

@ -42,13 +42,13 @@ class _AppletRequestInterface:
"""
raise NotImplementedError
def set_argument_value(self, expurl, name, value):
def set_argument_value(self, expurl, key, value):
"""
Temporarily set the value of an argument in a experiment in the dashboard.
The value resets to default value when recomputing the argument.
:param expurl: Experiment URL identifying the experiment in the dashboard. Example: 'repo:ArgumentsDemo'.
:param name: Name of the argument in the experiment.
:param key: Name of the argument in the experiment.
:param value: Object representing the new temporary value of the argument. For ``Scannable`` arguments, this parameter
should be a ``ScanObject``. The type of the ``ScanObject`` will be set as the selected type when this function is called.
"""
@ -77,10 +77,10 @@ class AppletRequestIPC(_AppletRequestInterface):
mod = {"action": "append", "path": [key, 1], "x": value}
self.ipc.update_dataset(mod)
def set_argument_value(self, expurl, name, value):
def set_argument_value(self, expurl, key, value):
if isinstance(value, ScanObject):
value = value.describe()
self.ipc.set_argument_value(expurl, name, value)
self.ipc.set_argument_value(expurl, key, value)
class AppletRequestRPC(_AppletRequestInterface):
@ -182,10 +182,10 @@ class AppletIPCClient(AsyncioChildComm):
self.write_pyon({"action": "update_dataset",
"mod": mod})
def set_argument_value(self, expurl, name, value):
def set_argument_value(self, expurl, key, value):
self.write_pyon({"action": "set_argument_value",
"expurl": expurl,
"name": name,
"key": key,
"value": value})
@ -255,7 +255,7 @@ class SimpleApplet:
if self.embed is None:
dataset_ctl = RPCClient()
self.loop.run_until_complete(dataset_ctl.connect_rpc(
self.args.server, self.args.port_control, "master_dataset_db"))
self.args.server, self.args.port_control, "dataset_db"))
self.req = AppletRequestRPC(self.loop, dataset_ctl)
else:
self.req = AppletRequestIPC(self.ipc)

View File

@ -33,7 +33,7 @@ class DatasetCtl:
try:
remote = RPCClient()
await remote.connect_rpc(self.master_host, self.master_port,
"master_dataset_db")
"dataset_db")
try:
if op_name == "set":
await remote.set(key_or_mod, value, persist, metadata)

View File

@ -10,87 +10,26 @@ import h5py
from sipyco import pyon
from artiq import __artiq_dir__ as artiq_dir
from artiq.gui.tools import (LayoutWidget, WheelFilter,
log_level_to_name, get_open_file_name)
from artiq.gui.entries import procdesc_to_entry
from artiq.gui.tools import (LayoutWidget, log_level_to_name, get_open_file_name)
from artiq.gui.entries import procdesc_to_entry, EntryTreeWidget
from artiq.master.worker import Worker, log_worker_exception
logger = logging.getLogger(__name__)
class _ArgumentEditor(QtWidgets.QTreeWidget):
class _ArgumentEditor(EntryTreeWidget):
def __init__(self, dock):
QtWidgets.QTreeWidget.__init__(self)
self.setColumnCount(3)
self.header().setStretchLastSection(False)
try:
set_resize_mode = self.header().setSectionResizeMode
except AttributeError:
set_resize_mode = self.header().setResizeMode
set_resize_mode(0, QtWidgets.QHeaderView.ResizeToContents)
set_resize_mode(1, QtWidgets.QHeaderView.Stretch)
set_resize_mode(2, QtWidgets.QHeaderView.ResizeToContents)
self.header().setVisible(False)
self.setSelectionMode(self.NoSelection)
self.setHorizontalScrollMode(self.ScrollPerPixel)
self.setVerticalScrollMode(self.ScrollPerPixel)
self.setStyleSheet("QTreeWidget {background: " +
self.palette().midlight().color().name() + " ;}")
self.viewport().installEventFilter(WheelFilter(self.viewport(), True))
self._groups = dict()
self._arg_to_widgets = dict()
EntryTreeWidget.__init__(self)
self._dock = dock
if not self._dock.arguments:
self.addTopLevelItem(QtWidgets.QTreeWidgetItem(["No arguments"]))
gradient = QtGui.QLinearGradient(
0, 0, 0, QtGui.QFontMetrics(self.font()).lineSpacing()*2.5)
gradient.setColorAt(0, self.palette().base().color())
gradient.setColorAt(1, self.palette().midlight().color())
self.insertTopLevelItem(0, QtWidgets.QTreeWidgetItem(["No arguments"]))
for name, argument in self._dock.arguments.items():
widgets = dict()
self._arg_to_widgets[name] = widgets
self.set_argument(name, argument)
entry = procdesc_to_entry(argument["desc"])(argument)
widget_item = QtWidgets.QTreeWidgetItem([name])
if argument["tooltip"]:
widget_item.setToolTip(0, argument["tooltip"])
widgets["entry"] = entry
widgets["widget_item"] = widget_item
self.quickStyleClicked.connect(self._dock._run_clicked)
for col in range(3):
widget_item.setBackground(col, gradient)
font = widget_item.font(0)
font.setBold(True)
widget_item.setFont(0, font)
if argument["group"] is None:
self.addTopLevelItem(widget_item)
else:
self._get_group(argument["group"]).addChild(widget_item)
fix_layout = LayoutWidget()
widgets["fix_layout"] = fix_layout
fix_layout.addWidget(entry)
self.setItemWidget(widget_item, 1, fix_layout)
recompute_argument = QtWidgets.QToolButton()
recompute_argument.setToolTip("Re-run the experiment's build "
"method and take the default value")
recompute_argument.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_BrowserReload))
recompute_argument.clicked.connect(
partial(self._recompute_argument_clicked, name))
fix_layout = LayoutWidget()
fix_layout.addWidget(recompute_argument)
self.setItemWidget(widget_item, 2, fix_layout)
widget_item = QtWidgets.QTreeWidgetItem()
self.addTopLevelItem(widget_item)
recompute_arguments = QtWidgets.QPushButton("Recompute all arguments")
recompute_arguments.setIcon(
QtWidgets.QApplication.style().standardIcon(
@ -108,21 +47,7 @@ class _ArgumentEditor(QtWidgets.QTreeWidget):
buttons.addWidget(load, 1, 2)
for i, s in enumerate((1, 0, 0, 1)):
buttons.layout.setColumnStretch(i, s)
self.setItemWidget(widget_item, 1, buttons)
def _get_group(self, name):
if name in self._groups:
return self._groups[name]
group = QtWidgets.QTreeWidgetItem([name])
for col in range(3):
group.setBackground(col, self.palette().mid())
group.setForeground(col, self.palette().brightText())
font = group.font(col)
font.setBold(True)
group.setFont(col, font)
self.addTopLevelItem(group)
self._groups[name] = group
return group
self.setItemWidget(self.bottom_item, 1, buttons)
def _load_clicked(self):
asyncio.ensure_future(self._dock.load_hdf5_task())
@ -130,8 +55,8 @@ class _ArgumentEditor(QtWidgets.QTreeWidget):
def _recompute_arguments_clicked(self):
asyncio.ensure_future(self._dock._recompute_arguments())
def _recompute_argument_clicked(self, name):
asyncio.ensure_future(self._recompute_argument(name))
def reset_entry(self, key):
asyncio.ensure_future(self._recompute_argument(key))
async def _recompute_argument(self, name):
try:
@ -146,29 +71,7 @@ class _ArgumentEditor(QtWidgets.QTreeWidget):
state = procdesc_to_entry(procdesc).default_state(procdesc)
argument["desc"] = procdesc
argument["state"] = state
widgets = self._arg_to_widgets[name]
widgets["entry"].deleteLater()
widgets["entry"] = procdesc_to_entry(procdesc)(argument)
widgets["fix_layout"] = LayoutWidget()
widgets["fix_layout"].addWidget(widgets["entry"])
self.setItemWidget(widgets["widget_item"], 1, widgets["fix_layout"])
self.updateGeometries()
def save_state(self):
expanded = []
for k, v in self._groups.items():
if v.isExpanded():
expanded.append(k)
return {"expanded": expanded}
def restore_state(self, state):
for e in state["expanded"]:
try:
self._groups[e].setExpanded(True)
except KeyError:
pass
self.update_argument(name, argument)
log_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
@ -277,8 +180,8 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
state = self.argeditor.save_state()
self.argeditor.deleteLater()
self.argeditor = _ArgumentEditor(self)
self.argeditor.restore_state(state)
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
self.argeditor.restore_state(state)
async def load_hdf5_task(self, filename=None):
if filename is None:
@ -466,6 +369,8 @@ class ExperimentsArea(QtWidgets.QMdiArea):
def initialize_submission_arguments(self, arginfo):
arguments = OrderedDict()
for name, (procdesc, group, tooltip) in arginfo.items():
if procdesc["ty"] == "EnumerationValue" and procdesc["quickstyle"]:
procdesc["quickstyle"] = False
state = procdesc_to_entry(procdesc).default_state(procdesc)
arguments[name] = {
"desc": procdesc,

View File

@ -253,6 +253,12 @@ def fn_subkernel_await():
def fn_subkernel_preload():
return types.TBuiltinFunction("subkernel_preload")
def fn_subkernel_send():
return types.TBuiltinFunction("subkernel_send")
def fn_subkernel_recv():
return types.TBuiltinFunction("subkernel_recv")
# Accessors
def is_none(typ):

View File

@ -47,8 +47,15 @@ class SpecializedFunction:
return hash((self.instance_type, self.host_function))
class SubkernelMessageType:
def __init__(self, name, value_type):
self.name = name
self.value_type = value_type
self.send_loc = None
self.recv_loc = None
class EmbeddingMap:
def __init__(self):
def __init__(self, old_embedding_map=None):
self.object_current_key = 0
self.object_forward_map = {}
self.object_reverse_map = {}
@ -65,6 +72,22 @@ class EmbeddingMap:
self.str_forward_map = {}
self.str_reverse_map = {}
# mapping `name` to object ID
self.subkernel_message_map = {}
# subkernels: dict of ID: function, just like object_forward_map
# allow the embedding map to be aware of subkernels from other kernels
if not old_embedding_map is None:
for key, obj_ref in old_embedding_map.subkernels().items():
self.object_forward_map[key] = obj_ref
obj_id = id(obj_ref)
self.object_reverse_map[obj_id] = key
for msg_id, msg_type in old_embedding_map.subkernel_messages().items():
self.object_forward_map[msg_id] = msg_type
obj_id = id(msg_type)
self.subkernel_message_map[msg_type.name] = msg_id
self.object_reverse_map[obj_id] = msg_id
self.preallocate_runtime_exception_names(["RuntimeError",
"RTIOUnderflow",
"RTIOOverflow",
@ -165,6 +188,11 @@ class EmbeddingMap:
return self.object_reverse_map[obj_id]
self.object_current_key += 1
while self.object_forward_map.get(self.object_current_key):
# make sure there's no collisions with previously inserted subkernels
# their identifiers must be consistent across all kernels/subkernels
self.object_current_key += 1
self.object_forward_map[self.object_current_key] = obj_ref
self.object_reverse_map[obj_id] = self.object_current_key
return self.object_current_key
@ -177,7 +205,7 @@ class EmbeddingMap:
obj_ref = self.object_forward_map[obj_id]
if isinstance(obj_ref, (pytypes.FunctionType, pytypes.MethodType,
pytypes.BuiltinFunctionType, pytypes.ModuleType,
SpecializedFunction)):
SpecializedFunction, SubkernelMessageType)):
continue
elif isinstance(obj_ref, type):
_, obj_typ = self.type_map[obj_ref]
@ -193,6 +221,35 @@ class EmbeddingMap:
subkernels[k] = v
return subkernels
def store_subkernel_message(self, name, value_type, function_type, function_loc):
if name in self.subkernel_message_map:
msg_id = self.subkernel_message_map[name]
else:
msg_id = self.store_object(SubkernelMessageType(name, value_type))
self.subkernel_message_map[name] = msg_id
subkernel_msg = self.retrieve_object(msg_id)
if function_type == "send":
subkernel_msg.send_loc = function_loc
elif function_type == "recv":
subkernel_msg.recv_loc = function_loc
else:
assert False
return msg_id, subkernel_msg
def subkernel_messages(self):
messages = {}
for msg_id in self.subkernel_message_map.values():
messages[msg_id] = self.retrieve_object(msg_id)
return messages
def subkernel_messages_unpaired(self):
unpaired = []
for msg_id in self.subkernel_message_map.values():
msg_obj = self.retrieve_object(msg_id)
if msg_obj.send_loc is None or msg_obj.recv_loc is None:
unpaired.append(msg_obj)
return unpaired
def has_rpc(self):
return any(filter(
lambda x: (inspect.isfunction(x) or inspect.ismethod(x)) and \
@ -200,10 +257,6 @@ class EmbeddingMap:
self.object_forward_map.values()
))
def has_rpc_or_subkernel(self):
return any(filter(lambda x: inspect.isfunction(x) or inspect.ismethod(x),
self.object_forward_map.values()))
class ASTSynthesizer:
def __init__(self, embedding_map, value_map, quote_function=None, expanded_from=None):
@ -794,7 +847,7 @@ class TypedtreeHasher(algorithm.Visitor):
return hash(tuple(freeze(getattr(node, field_name)) for field_name in fields))
class Stitcher:
def __init__(self, core, dmgr, engine=None, print_as_rpc=True, destination=0, subkernel_arg_types=[]):
def __init__(self, core, dmgr, engine=None, print_as_rpc=True, destination=0, subkernel_arg_types=[], old_embedding_map=None):
self.core = core
self.dmgr = dmgr
if engine is None:
@ -816,7 +869,7 @@ class Stitcher:
self.functions = {}
self.embedding_map = EmbeddingMap()
self.embedding_map = EmbeddingMap(old_embedding_map)
self.value_map = defaultdict(lambda: [])
self.definitely_changed = False

View File

@ -59,4 +59,6 @@ def globals():
# ARTIQ subkernel utility functions
"subkernel_await": builtins.fn_subkernel_await(),
"subkernel_preload": builtins.fn_subkernel_preload(),
"subkernel_send": builtins.fn_subkernel_send(),
"subkernel_recv": builtins.fn_subkernel_recv(),
}

View File

@ -2537,7 +2537,7 @@ class ARTIQIRGenerator(algorithm.Visitor):
timeout = self.visit(node.args[1])
elif len(node.args) == 1 and len(node.keywords) == 0:
fn = node.args[0].type
timeout = ir.Constant(10_000, builtins.TInt64())
timeout = ir.Constant(-1, builtins.TInt64())
else:
assert False
if types.is_method(fn):
@ -2557,7 +2557,44 @@ class ARTIQIRGenerator(algorithm.Visitor):
if types.is_method(fn):
fn = types.get_method_function(fn)
sid = ir.Constant(fn.sid, builtins.TInt32())
return self.append(ir.Builtin("subkernel_preload", [sid], builtins.TNone()))
dest = ir.Constant(fn.destination, builtins.TInt32())
return self.append(ir.Builtin("subkernel_preload", [sid, dest], builtins.TNone()))
elif types.is_builtin(typ, "subkernel_send"):
if len(node.args) == 3 and len(node.keywords) == 0:
dest = self.visit(node.args[0])
name = node.args[1].s
value = self.visit(node.args[2])
else:
assert False
msg_id, msg = self.embedding_map.store_subkernel_message(name, value.type, "send", node.loc)
msg_id = ir.Constant(msg_id, builtins.TInt32())
if value.type != msg.value_type:
diag = diagnostic.Diagnostic("error",
"type mismatch for subkernel message '{name}', receiver expects {recv} while sending {send}",
{"name": name, "recv": msg.value_type, "send": value.type},
node.loc)
self.engine.process(diag)
return self.append(ir.Builtin("subkernel_send", [msg_id, dest, value], builtins.TNone()))
elif types.is_builtin(typ, "subkernel_recv"):
if len(node.args) == 2 and len(node.keywords) == 0:
name = node.args[0].s
vartype = node.args[1].value
timeout = ir.Constant(-1, builtins.TInt64())
elif len(node.args) == 3 and len(node.keywords) == 0:
name = node.args[0].s
vartype = node.args[1].value
timeout = self.visit(node.args[2])
else:
assert False
msg_id, msg = self.embedding_map.store_subkernel_message(name, vartype, "recv", node.loc)
msg_id = ir.Constant(msg_id, builtins.TInt32())
if vartype != msg.value_type:
diag = diagnostic.Diagnostic("error",
"type mismatch for subkernel message '{name}', receiver expects {recv} while sending {send}",
{"name": name, "recv": vartype, "send": msg.value_type},
node.loc)
self.engine.process(diag)
return self.append(ir.Builtin("subkernel_recv", [msg_id, timeout], vartype))
elif types.is_exn_constructor(typ):
return self.alloc_exn(node.type, *[self.visit(arg_node) for arg_node in node.args])
elif types.is_constructor(typ):

View File

@ -1343,6 +1343,57 @@ class Inferencer(algorithm.Visitor):
node.loc, None)
else:
diagnose(valid_forms())
elif types.is_builtin(typ, "subkernel_send"):
valid_forms = lambda: [
valid_form("subkernel_send(dest: numpy.int?, name: str, value: V) -> None"),
]
self._unify(node.type, builtins.TNone(),
node.loc, None)
if len(node.args) == 3:
arg0 = node.args[0]
if types.is_var(arg0.type):
pass # undetermined yet
else:
if builtins.is_int(arg0.type):
self._unify(arg0.type, builtins.TInt8(),
arg0.loc, None)
else:
diagnose(valid_forms())
arg1 = node.args[1]
self._unify(arg1.type, builtins.TStr(),
arg1.loc, None)
else:
diagnose(valid_forms())
elif types.is_builtin(typ, "subkernel_recv"):
valid_forms = lambda: [
valid_form("subkernel_recv(name: str, value_type: type) -> value_type"),
valid_form("subkernel_recv(name: str, value_type: type, timeout: numpy.int64) -> value_type"),
]
if 2 <= len(node.args) <= 3:
arg0 = node.args[0]
if types.is_var(arg0.type):
pass
else:
self._unify(arg0.type, builtins.TStr(),
arg0.loc, None)
arg1 = node.args[1]
if types.is_var(arg1.type):
pass
else:
self._unify(node.type, arg1.value,
node.loc, None)
if len(node.args) == 3:
arg2 = node.args[2]
if types.is_var(arg2.type):
pass
elif builtins.is_int(arg2.type):
# promote to TInt64
self._unify(arg2.type, builtins.TInt64(),
arg2.loc, None)
else:
diagnose(valid_forms())
else:
diagnose(valid_forms())
else:
assert False

View File

@ -399,9 +399,9 @@ class LLVMIRGenerator:
llty = ll.FunctionType(lli32, [llptr])
elif name == "subkernel_send_message":
llty = ll.FunctionType(llvoid, [lli32, lli8, llsliceptr, llptrptr])
llty = ll.FunctionType(llvoid, [lli32, lli1, lli8, lli8, llsliceptr, llptrptr])
elif name == "subkernel_load_run":
llty = ll.FunctionType(llvoid, [lli32, lli1])
llty = ll.FunctionType(llvoid, [lli32, lli8, lli1])
elif name == "subkernel_await_finish":
llty = ll.FunctionType(llvoid, [lli32, lli64])
elif name == "subkernel_await_message":
@ -1417,8 +1417,23 @@ class LLVMIRGenerator:
return self._build_rpc_recv(insn.type, llstackptr)
elif insn.op == "subkernel_preload":
llsid = self.map(insn.operands[0])
return self.llbuilder.call(self.llbuiltin("subkernel_load_run"), [llsid, ll.Constant(lli1, 0)],
lldest = ll.Constant(lli8, insn.operands[1].value)
return self.llbuilder.call(self.llbuiltin("subkernel_load_run"), [llsid, lldest, ll.Constant(lli1, 0)],
name="subkernel.preload")
elif insn.op == "subkernel_send":
llmsgid = self.map(insn.operands[0])
lldest = self.map(insn.operands[1])
return self._build_subkernel_message(llmsgid, lldest, [insn.operands[2]])
elif insn.op == "subkernel_recv":
llmsgid = self.map(insn.operands[0])
lltimeout = self.map(insn.operands[1])
lltagptr = self._build_subkernel_tags([insn.type])
self.llbuilder.call(self.llbuiltin("subkernel_await_message"),
[llmsgid, lltimeout, lltagptr, ll.Constant(lli8, 1), ll.Constant(lli8, 1)],
name="subkernel.await.message")
llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), [],
name="subkernel.arg.stack")
return self._build_rpc_recv(insn.type, llstackptr)
else:
assert False
@ -1427,7 +1442,7 @@ class LLVMIRGenerator:
llmax = self.map(insn.operands[1])
lltagptr = self._build_subkernel_tags(insn.arg_types)
return self.llbuilder.call(self.llbuiltin("subkernel_await_message"),
[ll.Constant(lli32, 0), ll.Constant(lli64, 10_000), lltagptr, llmin, llmax],
[ll.Constant(lli32, -1), ll.Constant(lli64, 10_000), lltagptr, llmin, llmax],
name="subkernel.await.args")
def process_Closure(self, insn):
@ -1579,11 +1594,8 @@ class LLVMIRGenerator:
self.llbuilder.branch(llnormalblock)
return llret
def _build_rpc(self, fun_loc, fun_type, args, llnormalblock, llunwindblock):
llservice = ll.Constant(lli32, fun_type.service)
def _build_arg_tag(self, args, call_type):
tag = b""
for arg in args:
def arg_error_handler(typ):
printer = types.TypePrinter()
@ -1592,12 +1604,18 @@ class LLVMIRGenerator:
{"type": printer.name(typ)},
arg.loc)
diag = diagnostic.Diagnostic("error",
"type {type} is not supported in remote procedure calls",
{"type": printer.name(arg.type)},
"type {type} is not supported in {call_type} calls",
{"type": printer.name(arg.type), "call_type": call_type},
arg.loc, notes=[note])
self.engine.process(diag)
tag += ir.rpc_tag(arg.type, arg_error_handler)
tag += b":"
return tag
def _build_rpc(self, fun_loc, fun_type, args, llnormalblock, llunwindblock):
llservice = ll.Constant(lli32, fun_type.service)
tag = self._build_arg_tag(args, call_type="remote procedure")
def ret_error_handler(typ):
printer = types.TypePrinter()
@ -1660,36 +1678,26 @@ class LLVMIRGenerator:
def _build_subkernel_call(self, fun_loc, fun_type, args):
llsid = ll.Constant(lli32, fun_type.sid)
tag = b""
for arg in args:
def arg_error_handler(typ):
printer = types.TypePrinter()
note = diagnostic.Diagnostic("note",
"value of type {type}",
{"type": printer.name(typ)},
arg.loc)
diag = diagnostic.Diagnostic("error",
"type {type} is not supported in subkernel calls",
{"type": printer.name(arg.type)},
arg.loc, notes=[note])
self.engine.process(diag)
tag += ir.rpc_tag(arg.type, arg_error_handler)
tag += b":"
lldest = ll.Constant(lli8, fun_type.destination)
# run the kernel first
self.llbuilder.call(self.llbuiltin("subkernel_load_run"), [llsid, ll.Constant(lli1, 1)])
self.llbuilder.call(self.llbuiltin("subkernel_load_run"), [llsid, lldest, ll.Constant(lli1, 1)])
if args:
# only send args if there's anything to send, 'self' is excluded
self._build_subkernel_message(llsid, lldest, args)
return llsid
def _build_subkernel_message(self, llid, lldest, args):
# args (or messages) are sent in the same vein as RPC
tag = self._build_arg_tag(args, call_type="subkernel")
# arg sent in the same vein as RPC
llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), [],
name="subkernel.stack")
lltag = self.llconst_of_const(ir.Constant(tag, builtins.TStr()))
lltagptr = self.llbuilder.alloca(lltag.type)
self.llbuilder.store(lltag, lltagptr)
if args:
# only send args if there's anything to send, 'self' is excluded
llargs = self.llbuilder.alloca(llptr, ll.Constant(lli32, len(args)),
name="subkernel.args")
for index, arg in enumerate(args):
@ -1708,11 +1716,10 @@ class LLVMIRGenerator:
llargcount = ll.Constant(lli8, len(args))
llisreturn = ll.Constant(lli1, False)
self.llbuilder.call(self.llbuiltin("subkernel_send_message"),
[llsid, llargcount, lltagptr, llargs])
self.llbuilder.call(self.llbuiltin("llvm.stackrestore"), [llstackptr])
return llsid
[llid, llisreturn, lldest, llargcount, lltagptr, llargs])
return self.llbuilder.call(self.llbuiltin("llvm.stackrestore"), [llstackptr])
def _build_subkernel_return(self, insn):
# builds a remote return.
@ -1746,10 +1753,12 @@ class LLVMIRGenerator:
llretslot = self.llbuilder.bitcast(llretslot, llptr)
self.llbuilder.store(llretslot, llrets)
llsid = ll.Constant(lli32, 0) # return goes back to master, sid is ignored
llsid = ll.Constant(lli32, 0) # return goes back to the caller, sid is ignored
lltagcount = ll.Constant(lli8, 1) # only one thing is returned
llisreturn = ll.Constant(lli1, True) # it's a return, so destination is ignored
lldest = ll.Constant(lli8, 0)
self.llbuilder.call(self.llbuiltin("subkernel_send_message"),
[llsid, lltagcount, lltagptr, llrets])
[llsid, llisreturn, lldest, lltagcount, lltagptr, llrets])
def process_Call(self, insn):
functiontyp = insn.target_function().type

View File

@ -999,7 +999,7 @@ class AD9910:
"""
if not self.cpld.sync_div:
raise ValueError("parent cpld does not drive SYNC")
search_span = 31
search_span = 13
# FIXME https://github.com/sinara-hw/Urukul/issues/16
# should both be 2-4 once kasli sync_in jitter is identified
min_window = 0

View File

@ -2,15 +2,22 @@ from operator import itemgetter
from collections import namedtuple
from itertools import count
from contextlib import contextmanager
from sipyco import keepalive
import asyncio
from enum import Enum
import struct
import logging
import socket
import math
logger = logging.getLogger(__name__)
DEFAULT_REF_PERIOD = 1e-9
ANALYZER_MAGIC = b"ARTIQ Analyzer Proxy\n"
class MessageType(Enum):
output = 0b00
input = 0b01
@ -34,6 +41,13 @@ class ExceptionType(Enum):
i_overflow = 0b100001
class WaveformType(Enum):
ANALOG = 0
BIT = 1
VECTOR = 2
LOG = 3
def get_analyzer_dump(host, port=1382):
sock = socket.create_connection((host, port))
try:
@ -104,6 +118,8 @@ def decode_dump(data):
(sent_bytes, total_byte_count,
error_occurred, log_channel, dds_onehot_sel) = parts
logger.debug("analyzer dump has length %d", sent_bytes)
expected_len = sent_bytes + 15
if expected_len != len(data):
raise ValueError("analyzer dump has incorrect length "
@ -115,15 +131,83 @@ def decode_dump(data):
if total_byte_count > sent_bytes:
logger.info("analyzer ring buffer has wrapped %d times",
total_byte_count//sent_bytes)
if sent_bytes == 0:
logger.warning("analyzer dump is empty")
position = 15
messages = []
for _ in range(sent_bytes//32):
messages.append(decode_message(data[position:position+32]))
position += 32
if len(messages) == 1 and isinstance(messages[0], StoppedMessage):
logger.warning("analyzer dump is empty aside from stop message")
return DecodedDump(log_channel, bool(dds_onehot_sel), messages)
# simplified from sipyco broadcast Receiver
class AnalyzerProxyReceiver:
def __init__(self, receive_cb, disconnect_cb=None):
self.receive_cb = receive_cb
self.disconnect_cb = disconnect_cb
async def connect(self, host, port):
self.reader, self.writer = \
await keepalive.async_open_connection(host, port)
try:
line = await self.reader.readline()
assert line == ANALYZER_MAGIC
self.receive_task = asyncio.create_task(self._receive_cr())
except:
self.writer.close()
del self.reader
del self.writer
raise
async def close(self):
self.disconnect_cb = None
try:
self.receive_task.cancel()
try:
await self.receive_task
except asyncio.CancelledError:
pass
finally:
self.writer.close()
del self.reader
del self.writer
async def _receive_cr(self):
try:
while True:
endian_byte = await self.reader.read(1)
if endian_byte == b"E":
endian = '>'
elif endian_byte == b"e":
endian = '<'
elif endian_byte == b"":
# EOF reached, connection lost
return
else:
raise ValueError
payload_length_word = await self.reader.readexactly(4)
payload_length = struct.unpack(endian + "I", payload_length_word)[0]
if payload_length > 10 * 512 * 1024:
# 10x buffer size of firmware
raise ValueError
# The remaining header length is 11 bytes.
remaining_data = await self.reader.readexactly(payload_length + 11)
data = endian_byte + payload_length_word + remaining_data
self.receive_cb(data)
except Exception:
logger.error("analyzer receiver connection terminating with exception", exc_info=True)
finally:
if self.disconnect_cb is not None:
self.disconnect_cb()
def vcd_codes():
codechars = [chr(i) for i in range(33, 127)]
for n in count():
@ -150,38 +234,129 @@ class VCDChannel:
integer_cast = struct.unpack(">Q", struct.pack(">d", x))[0]
self.set_value("{:064b}".format(integer_cast))
def set_log(self, log_message):
value = ""
for c in log_message:
value += "{:08b}".format(ord(c))
self.set_value(value)
class VCDManager:
def __init__(self, fileobj):
self.out = fileobj
self.codes = vcd_codes()
self.current_time = None
self.start_time = 0
def set_timescale_ps(self, timescale):
self.out.write("$timescale {}ps $end\n".format(round(timescale)))
def get_channel(self, name, width):
def get_channel(self, name, width, ty, precision=0, unit=""):
code = next(self.codes)
self.out.write("$var wire {width} {code} {name} $end\n"
.format(name=name, code=code, width=width))
return VCDChannel(self.out, code)
@contextmanager
def scope(self, name):
self.out.write("$scope module {} $end\n".format(name))
def scope(self, scope, name):
self.out.write("$scope module {}/{} $end\n".format(scope, name))
yield
self.out.write("$upscope $end\n")
def set_time(self, time):
time -= self.start_time
if time != self.current_time:
self.out.write("#{}\n".format(time))
self.current_time = time
def set_start_time(self, time):
self.start_time = time
def set_end_time(self, time):
pass
class WaveformManager:
def __init__(self):
self.current_time = 0
self.start_time = 0
self.end_time = 0
self.channels = list()
self.current_scope = ""
self.trace = {"timescale": 1, "stopped_x": None, "logs": dict(), "data": dict()}
def set_timescale_ps(self, timescale):
self.trace["timescale"] = int(timescale)
def get_channel(self, name, width, ty, precision=0, unit=""):
if ty == WaveformType.LOG:
self.trace["logs"][self.current_scope + name] = (ty, width, precision, unit)
data = self.trace["data"][self.current_scope + name] = list()
channel = WaveformChannel(data, self.current_time)
self.channels.append(channel)
return channel
@contextmanager
def scope(self, scope, name):
old_scope = self.current_scope
self.current_scope = scope + "/"
yield
self.current_scope = old_scope
def set_time(self, time):
time -= self.start_time
for channel in self.channels:
channel.set_time(time)
def set_start_time(self, time):
self.start_time = time
if self.trace["stopped_x"] is not None:
self.trace["stopped_x"] = self.end_time - self.start_time
def set_end_time(self, time):
self.end_time = time
self.trace["stopped_x"] = self.end_time - self.start_time
class WaveformChannel:
def __init__(self, data, current_time):
self.data = data
self.current_time = current_time
def set_value(self, value):
self.data.append((self.current_time, value))
def set_value_double(self, x):
self.data.append((self.current_time, x))
def set_time(self, time):
self.current_time = time
def set_log(self, log_message):
self.data.append((self.current_time, log_message))
class ChannelSignatureManager:
def __init__(self):
self.current_scope = ""
self.channels = dict()
def get_channel(self, name, width, ty, precision=0, unit=""):
self.channels[self.current_scope + name] = (ty, width, precision, unit)
return None
@contextmanager
def scope(self, scope, name):
old_scope = self.current_scope
self.current_scope = scope + "/"
yield
self.current_scope = old_scope
class TTLHandler:
def __init__(self, vcd_manager, name):
def __init__(self, manager, name):
self.name = name
self.channel_value = vcd_manager.get_channel("ttl/" + name, 1)
self.channel_value = manager.get_channel("ttl/" + name, 1, ty=WaveformType.BIT)
self.last_value = "X"
self.oe = True
@ -206,11 +381,12 @@ class TTLHandler:
class TTLClockGenHandler:
def __init__(self, vcd_manager, name, ref_period):
def __init__(self, manager, name, ref_period):
self.name = name
self.ref_period = ref_period
self.channel_frequency = vcd_manager.get_channel(
"ttl_clkgen/" + name, 64)
precision = max(0, math.ceil(math.log10(2**24 * ref_period) + 6))
self.channel_frequency = manager.get_channel(
"ttl_clkgen/" + name, 64, ty=WaveformType.ANALOG, precision=precision, unit="MHz")
def process_message(self, message):
if isinstance(message, OutputMessage):
@ -221,8 +397,8 @@ class TTLClockGenHandler:
class DDSHandler:
def __init__(self, vcd_manager, onehot_sel, sysclk):
self.vcd_manager = vcd_manager
def __init__(self, manager, onehot_sel, sysclk):
self.manager = manager
self.onehot_sel = onehot_sel
self.sysclk = sysclk
@ -231,11 +407,18 @@ class DDSHandler:
def add_dds_channel(self, name, dds_channel_nr):
dds_channel = dict()
with self.vcd_manager.scope("dds/{}".format(name)):
frequency_precision = max(0, math.ceil(math.log10(2**32 / self.sysclk) + 6))
phase_precision = max(0, math.ceil(math.log10(2**16)))
with self.manager.scope("dds", name):
dds_channel["vcd_frequency"] = \
self.vcd_manager.get_channel(name + "/frequency", 64)
self.manager.get_channel(name + "/frequency", 64,
ty=WaveformType.ANALOG,
precision=frequency_precision,
unit="MHz")
dds_channel["vcd_phase"] = \
self.vcd_manager.get_channel(name + "/phase", 64)
self.manager.get_channel(name + "/phase", 64,
ty=WaveformType.ANALOG,
precision=phase_precision)
dds_channel["ftw"] = [None, None]
dds_channel["pow"] = None
self.dds_channels[dds_channel_nr] = dds_channel
@ -285,10 +468,10 @@ class DDSHandler:
class WishboneHandler:
def __init__(self, vcd_manager, name, read_bit):
def __init__(self, manager, name, read_bit):
self._reads = []
self._read_bit = read_bit
self.stb = vcd_manager.get_channel("{}/{}".format(name, "stb"), 1)
self.stb = manager.get_channel(name + "/stb", 1, ty=WaveformType.BIT)
def process_message(self, message):
self.stb.set_value("1")
@ -318,16 +501,17 @@ class WishboneHandler:
class SPIMasterHandler(WishboneHandler):
def __init__(self, vcd_manager, name):
def __init__(self, manager, name):
self.channels = {}
with vcd_manager.scope("spi/{}".format(name)):
super().__init__(vcd_manager, name, read_bit=0b100)
self.scope = "spi"
with manager.scope("spi", name):
super().__init__(manager, name, read_bit=0b100)
for reg_name, reg_width in [
("config", 32), ("chip_select", 16),
("write_length", 8), ("read_length", 8),
("write", 32), ("read", 32)]:
self.channels[reg_name] = vcd_manager.get_channel(
"{}/{}".format(name, reg_name), reg_width)
self.channels[reg_name] = manager.get_channel(
"{}/{}".format(name, reg_name), reg_width, ty=WaveformType.VECTOR)
def process_write(self, address, data):
if address == 0:
@ -352,11 +536,12 @@ class SPIMasterHandler(WishboneHandler):
class SPIMaster2Handler(WishboneHandler):
def __init__(self, vcd_manager, name):
def __init__(self, manager, name):
self._reads = []
self.channels = {}
with vcd_manager.scope("spi2/{}".format(name)):
self.stb = vcd_manager.get_channel("{}/{}".format(name, "stb"), 1)
self.scope = "spi2"
with manager.scope("spi2", name):
self.stb = manager.get_channel(name + "/stb", 1, ty=WaveformType.BIT)
for reg_name, reg_width in [
("flags", 8),
("length", 5),
@ -364,8 +549,8 @@ class SPIMaster2Handler(WishboneHandler):
("chip_select", 8),
("write", 32),
("read", 32)]:
self.channels[reg_name] = vcd_manager.get_channel(
"{}/{}".format(name, reg_name), reg_width)
self.channels[reg_name] = manager.get_channel(
"{}/{}".format(name, reg_name), reg_width, ty=WaveformType.VECTOR)
def process_message(self, message):
self.stb.set_value("1")
@ -413,11 +598,12 @@ def _extract_log_chars(data):
class LogHandler:
def __init__(self, vcd_manager, vcd_log_channels):
self.vcd_channels = dict()
for name, maxlength in vcd_log_channels.items():
self.vcd_channels[name] = vcd_manager.get_channel("log/" + name,
maxlength*8)
def __init__(self, manager, log_channels):
self.channels = dict()
for name, maxlength in log_channels.items():
self.channels[name] = manager.get_channel("logs/" + name,
maxlength * 8,
ty=WaveformType.LOG)
self.current_entry = ""
def process_message(self, message):
@ -425,15 +611,12 @@ class LogHandler:
self.current_entry += _extract_log_chars(message.data)
if len(self.current_entry) > 1 and self.current_entry[-1] == "\x1D":
channel_name, log_message = self.current_entry[:-1].split("\x1E", maxsplit=1)
vcd_value = ""
for c in log_message:
vcd_value += "{:08b}".format(ord(c))
self.vcd_channels[channel_name].set_value(vcd_value)
self.channels[channel_name].set_log(log_message)
self.current_entry = ""
def get_vcd_log_channels(log_channel, messages):
vcd_log_channels = dict()
def get_log_channels(log_channel, messages):
log_channels = dict()
log_entry = ""
for message in messages:
if (isinstance(message, OutputMessage)
@ -442,13 +625,13 @@ def get_vcd_log_channels(log_channel, messages):
if len(log_entry) > 1 and log_entry[-1] == "\x1D":
channel_name, log_message = log_entry[:-1].split("\x1E", maxsplit=1)
l = len(log_message)
if channel_name in vcd_log_channels:
if vcd_log_channels[channel_name] < l:
vcd_log_channels[channel_name] = l
if channel_name in log_channels:
if log_channels[channel_name] < l:
log_channels[channel_name] = l
else:
vcd_log_channels[channel_name] = l
log_channels[channel_name] = l
log_entry = ""
return vcd_log_channels
return log_channels
def get_single_device_argument(devices, module, cls, argument):
@ -475,7 +658,7 @@ def get_dds_sysclk(devices):
("AD9914",), "sysclk")
def create_channel_handlers(vcd_manager, devices, ref_period,
def create_channel_handlers(manager, devices, ref_period,
dds_sysclk, dds_onehot_sel):
channel_handlers = dict()
for name, desc in sorted(devices.items(), key=itemgetter(0)):
@ -483,11 +666,11 @@ def create_channel_handlers(vcd_manager, devices, ref_period,
if (desc["module"] == "artiq.coredevice.ttl"
and desc["class"] in {"TTLOut", "TTLInOut"}):
channel = desc["arguments"]["channel"]
channel_handlers[channel] = TTLHandler(vcd_manager, name)
channel_handlers[channel] = TTLHandler(manager, name)
if (desc["module"] == "artiq.coredevice.ttl"
and desc["class"] == "TTLClockGen"):
channel = desc["arguments"]["channel"]
channel_handlers[channel] = TTLClockGenHandler(vcd_manager, name, ref_period)
channel_handlers[channel] = TTLClockGenHandler(manager, name, ref_period)
if (desc["module"] == "artiq.coredevice.ad9914"
and desc["class"] == "AD9914"):
dds_bus_channel = desc["arguments"]["bus_channel"]
@ -495,37 +678,60 @@ def create_channel_handlers(vcd_manager, devices, ref_period,
if dds_bus_channel in channel_handlers:
dds_handler = channel_handlers[dds_bus_channel]
else:
dds_handler = DDSHandler(vcd_manager, dds_onehot_sel, dds_sysclk)
dds_handler = DDSHandler(manager, dds_onehot_sel, dds_sysclk)
channel_handlers[dds_bus_channel] = dds_handler
dds_handler.add_dds_channel(name, dds_channel)
if (desc["module"] == "artiq.coredevice.spi2" and
desc["class"] == "SPIMaster"):
channel = desc["arguments"]["channel"]
channel_handlers[channel] = SPIMaster2Handler(
vcd_manager, name)
manager, name)
return channel_handlers
def get_channel_list(devices):
manager = ChannelSignatureManager()
create_channel_handlers(manager, devices, 1e-9, 3e9, False)
ref_period = get_ref_period(devices)
if ref_period is None:
ref_period = DEFAULT_REF_PERIOD
precision = max(0, math.ceil(math.log10(1 / ref_period) - 6))
manager.get_channel("rtio_slack", 64, ty=WaveformType.ANALOG, precision=precision, unit="us")
return manager.channels
def get_message_time(message):
return getattr(message, "timestamp", message.rtio_counter)
def decoded_dump_to_vcd(fileobj, devices, dump, uniform_interval=False):
vcd_manager = VCDManager(fileobj)
decoded_dump_to_target(vcd_manager, devices, dump, uniform_interval)
def decoded_dump_to_waveform_data(devices, dump, uniform_interval=False):
manager = WaveformManager()
decoded_dump_to_target(manager, devices, dump, uniform_interval)
return manager.trace
def decoded_dump_to_target(manager, devices, dump, uniform_interval):
ref_period = get_ref_period(devices)
if ref_period is not None:
if not uniform_interval:
vcd_manager.set_timescale_ps(ref_period*1e12)
else:
if ref_period is None:
logger.warning("unable to determine core device ref_period")
ref_period = 1e-9 # guess
ref_period = DEFAULT_REF_PERIOD
if not uniform_interval:
manager.set_timescale_ps(ref_period*1e12)
dds_sysclk = get_dds_sysclk(devices)
if dds_sysclk is None:
logger.warning("unable to determine DDS sysclk")
dds_sysclk = 3e9 # guess
if isinstance(dump.messages[-1], StoppedMessage):
m = dump.messages[-1]
end_time = get_message_time(m)
manager.set_end_time(end_time)
messages = dump.messages[:-1]
else:
logger.warning("StoppedMessage missing")
@ -533,38 +739,39 @@ def decoded_dump_to_vcd(fileobj, devices, dump, uniform_interval=False):
messages = sorted(messages, key=get_message_time)
channel_handlers = create_channel_handlers(
vcd_manager, devices, ref_period,
manager, devices, ref_period,
dds_sysclk, dump.dds_onehot_sel)
vcd_log_channels = get_vcd_log_channels(dump.log_channel, messages)
log_channels = get_log_channels(dump.log_channel, messages)
channel_handlers[dump.log_channel] = LogHandler(
vcd_manager, vcd_log_channels)
manager, log_channels)
if uniform_interval:
# RTIO event timestamp in machine units
timestamp = vcd_manager.get_channel("timestamp", 64)
timestamp = manager.get_channel("timestamp", 64, ty=WaveformType.VECTOR)
# RTIO time interval between this and the next timed event
# in SI seconds
interval = vcd_manager.get_channel("interval", 64)
slack = vcd_manager.get_channel("rtio_slack", 64)
interval = manager.get_channel("interval", 64, ty=WaveformType.ANALOG)
slack = manager.get_channel("rtio_slack", 64, ty=WaveformType.ANALOG)
vcd_manager.set_time(0)
manager.set_time(0)
start_time = 0
for m in messages:
start_time = get_message_time(m)
if start_time:
break
t0 = 0
if not uniform_interval:
manager.set_start_time(start_time)
t0 = start_time
for i, message in enumerate(messages):
if message.channel in channel_handlers:
t = get_message_time(message) - start_time
t = get_message_time(message)
if t >= 0:
if uniform_interval:
interval.set_value_double((t - t0)*ref_period)
vcd_manager.set_time(i)
manager.set_time(i)
timestamp.set_value("{:064b}".format(t))
t0 = t
else:
vcd_manager.set_time(t)
manager.set_time(t)
channel_handlers[message.channel].process_message(message)
if isinstance(message, OutputMessage):
slack.set_value_double(

View File

@ -94,9 +94,7 @@ class CommMonInj:
self.injection_status_cb(channel, override, value)
else:
raise ValueError("Unknown packet type", ty)
except asyncio.CancelledError:
raise
except:
except Exception:
logger.error("Moninj connection terminating with exception", exc_info=True)
finally:
if self.disconnect_cb is not None:

View File

@ -120,13 +120,15 @@ class Core:
def compile(self, function, args, kwargs, set_result=None,
attribute_writeback=True, print_as_rpc=True,
target=None, destination=0, subkernel_arg_types=[]):
target=None, destination=0, subkernel_arg_types=[],
old_embedding_map=None):
try:
engine = _DiagnosticEngine(all_errors_are_fatal=True)
stitcher = Stitcher(engine=engine, core=self, dmgr=self.dmgr,
print_as_rpc=print_as_rpc,
destination=destination, subkernel_arg_types=subkernel_arg_types)
destination=destination, subkernel_arg_types=subkernel_arg_types,
old_embedding_map=old_embedding_map)
stitcher.stitch_call(function, args, kwargs, set_result)
stitcher.finalize()
@ -165,7 +167,7 @@ class Core:
self._run_compiled(kernel_library, embedding_map, symbolizer, demangler)
return result
def compile_subkernel(self, sid, subkernel_fn, embedding_map, args, subkernel_arg_types):
def compile_subkernel(self, sid, subkernel_fn, embedding_map, args, subkernel_arg_types, subkernels):
# pass self to subkernels (if applicable)
# assuming the first argument is self
subkernel_args = getfullargspec(subkernel_fn.artiq_embedded.function)
@ -179,17 +181,49 @@ class Core:
object_map, kernel_library, _, _, _ = \
self.compile(subkernel_fn, self_arg, {}, attribute_writeback=False,
print_as_rpc=False, target=target, destination=destination,
subkernel_arg_types=subkernel_arg_types.get(sid, []))
if object_map.has_rpc_or_subkernel():
raise ValueError("Subkernel must not use RPC or subkernels in other destinations")
return destination, kernel_library
subkernel_arg_types=subkernel_arg_types.get(sid, []),
old_embedding_map=embedding_map)
if object_map.has_rpc():
raise ValueError("Subkernel must not use RPC")
return destination, kernel_library, object_map
def compile_and_upload_subkernels(self, embedding_map, args, subkernel_arg_types):
for sid, subkernel_fn in embedding_map.subkernels().items():
destination, kernel_library = \
subkernels = embedding_map.subkernels()
subkernels_compiled = []
while True:
new_subkernels = {}
for sid, subkernel_fn in subkernels.items():
if sid in subkernels_compiled:
continue
destination, kernel_library, embedding_map = \
self.compile_subkernel(sid, subkernel_fn, embedding_map,
args, subkernel_arg_types)
args, subkernel_arg_types, subkernels)
self.comm.upload_subkernel(kernel_library, sid, destination)
new_subkernels.update(embedding_map.subkernels())
subkernels_compiled.append(sid)
if new_subkernels == subkernels:
break
subkernels.update(new_subkernels)
# check for messages without a send/recv pair
unpaired_messages = embedding_map.subkernel_messages_unpaired()
if unpaired_messages:
for unpaired_message in unpaired_messages:
engine = _DiagnosticEngine(all_errors_are_fatal=False)
# errors are non-fatal in order to display
# all unpaired message errors before raising an excption
if unpaired_message.send_loc is None:
diag = diagnostic.Diagnostic("error",
"subkernel message '{name}' only has a receiver but no sender",
{"name": unpaired_message.name},
unpaired_message.recv_loc)
else:
diag = diagnostic.Diagnostic("error",
"subkernel message '{name}' only has a sender but no receiver",
{"name": unpaired_message.name},
unpaired_message.send_loc)
engine.process(diag)
raise ValueError("Found subkernel message(s) without a full send/recv pair")
def precompile(self, function, *args, **kwargs):
"""Precompile a kernel and return a callable that executes it on the core device
@ -319,6 +353,4 @@ class Core:
if self.analyzer_proxy is None:
raise IOError("No analyzer proxy configured")
else:
success = self.analyzer_proxy.trigger()
if not success:
raise IOError("Analyzer proxy reported failure")
self.analyzer_proxy.trigger()

View File

@ -49,6 +49,10 @@
"default": 125e6,
"description": "RTIO frequency"
},
"enable_wrpll": {
"type": "boolean",
"default": false
},
"core_addr": {
"type": "string",
"format": "ipv4",
@ -308,8 +312,7 @@
"clk_div": {
"type": "integer",
"minimum": 0,
"maximum": 3,
"default": 0
"maximum": 3
},
"pll_n": {
"type": "integer"

View File

@ -2,7 +2,7 @@ from numpy import int32, int64
from artiq.language.core import *
from artiq.language.types import *
from artiq.coredevice.rtio import rtio_output, rtio_input_data
from artiq.coredevice.rtio import rtio_output, rtio_input_timestamped_data
class OutOfSyncException(Exception):
@ -11,6 +11,11 @@ class OutOfSyncException(Exception):
pass
class GrabberTimeoutException(Exception):
"""Raised when a timeout occurs while attempting to read Grabber RTIO input events."""
pass
class Grabber:
"""Driver for the Grabber camera interface."""
kernel_invariants = {"core", "channel_base", "sentinel"}
@ -82,10 +87,10 @@ class Grabber:
self.gate_roi(0)
@kernel
def input_mu(self, data):
def input_mu(self, data, timeout_mu=-1):
"""
Retrieves the accumulated values for one frame from the ROI engines.
Blocks until values are available.
Blocks until values are available or timeout is reached.
The input list must be a list of integers of the same length as there
are enabled ROI engines. This method replaces the elements of the
@ -95,15 +100,26 @@ class Grabber:
If the number of elements in the list does not match the number of
ROI engines that produced output, an exception will be raised during
this call or the next.
If the timeout is reached before data is available, the exception
GrabberTimeoutException is raised.
:param timeout_mu: Timestamp at which a timeout will occur. Set to -1
(default) to disable timeout.
"""
channel = self.channel_base + 1
sentinel = rtio_input_data(channel)
timestamp, sentinel = rtio_input_timestamped_data(timeout_mu, channel)
if timestamp == -1:
raise GrabberTimeoutException("Timeout before Grabber frame available")
if sentinel != self.sentinel:
raise OutOfSyncException
for i in range(len(data)):
roi_output = rtio_input_data(channel)
timestamp, roi_output = rtio_input_timestamped_data(timeout_mu, channel)
if roi_output == self.sentinel:
raise OutOfSyncException
if timestamp == -1:
raise GrabberTimeoutException(
"Timeout retrieving ROIs (attempting to read more ROIs than enabled?)")
data[i] = roi_output

View File

@ -8,7 +8,6 @@ from sipyco import pyon
from artiq.tools import scale_from_metadata, short_format, exc_to_warning
from artiq.gui.tools import LayoutWidget, QRecursiveFilterProxyModel
from artiq.gui.models import DictSyncTreeSepModel
from artiq.gui.scientific_spinbox import ScientificSpinBox
logger = logging.getLogger(__name__)
@ -81,8 +80,7 @@ class CreateEditDialog(QtWidgets.QDialog):
t = value.dtype if value is np.ndarray else type(value)
if scale != 1 and np.issubdtype(t, np.number):
# degenerates to float type
value_edit_string = self.value_to_edit_string(
np.float64(value / scale))
value_edit_string = self.value_to_edit_string(value / scale)
self.unit_widget.setText(metadata.get('unit', ''))
self.scale_widget.setText(str(metadata.get('scale', '')))
self.precision_widget.setText(str(metadata.get('precision', '')))
@ -109,11 +107,13 @@ class CreateEditDialog(QtWidgets.QDialog):
t = value.dtype if value is np.ndarray else type(value)
if scale != 1 and np.issubdtype(t, np.number):
# degenerates to float type
value = np.float64(value * scale)
value = float(value * scale)
if self.key and self.key != key:
asyncio.ensure_future(exc_to_warning(rename(self.key, key, value, metadata, persist, self.dataset_ctl)))
asyncio.ensure_future(exc_to_warning(rename(self.key, key, value, metadata, persist,
self.dataset_ctl)))
else:
asyncio.ensure_future(exc_to_warning(self.dataset_ctl.set(key, value, metadata=metadata, persist=persist)))
asyncio.ensure_future(exc_to_warning(self.dataset_ctl.set(key, value, metadata=metadata,
persist=persist)))
self.key = key
QtWidgets.QDialog.accept(self)

View File

@ -9,10 +9,9 @@ import h5py
from sipyco import pyon
from artiq.gui.entries import procdesc_to_entry, ScanEntry
from artiq.gui.entries import procdesc_to_entry, EntryTreeWidget
from artiq.gui.fuzzy_select import FuzzySelectWidget
from artiq.gui.tools import (LayoutWidget, WheelFilter,
log_level_to_name, get_open_file_name)
from artiq.gui.tools import (LayoutWidget, log_level_to_name, get_open_file_name)
from artiq.tools import parse_devarg_override, unparse_devarg_override
@ -25,99 +24,23 @@ logger = logging.getLogger(__name__)
# 2. file:<class name>@<file name>
class _ArgumentEditor(QtWidgets.QTreeWidget):
class _ArgumentEditor(EntryTreeWidget):
def __init__(self, manager, dock, expurl):
self.manager = manager
self.expurl = expurl
QtWidgets.QTreeWidget.__init__(self)
self.setColumnCount(3)
self.header().setStretchLastSection(False)
if hasattr(self.header(), "setSectionResizeMode"):
set_resize_mode = self.header().setSectionResizeMode
else:
set_resize_mode = self.header().setResizeMode
set_resize_mode(0, QtWidgets.QHeaderView.ResizeToContents)
set_resize_mode(1, QtWidgets.QHeaderView.Stretch)
set_resize_mode(2, QtWidgets.QHeaderView.ResizeToContents)
self.header().setVisible(False)
self.setSelectionMode(self.NoSelection)
self.setHorizontalScrollMode(self.ScrollPerPixel)
self.setVerticalScrollMode(self.ScrollPerPixel)
self.setStyleSheet("QTreeWidget {background: " +
self.palette().midlight().color().name() + " ;}")
self.viewport().installEventFilter(WheelFilter(self.viewport(), True))
self._groups = dict()
self._arg_to_widgets = dict()
EntryTreeWidget.__init__(self)
arguments = self.manager.get_submission_arguments(self.expurl)
if not arguments:
self.addTopLevelItem(QtWidgets.QTreeWidgetItem(["No arguments"]))
self.insertTopLevelItem(0, QtWidgets.QTreeWidgetItem(["No arguments"]))
gradient = QtGui.QLinearGradient(
0, 0, 0, QtGui.QFontMetrics(self.font()).lineSpacing()*2.5)
gradient.setColorAt(0, self.palette().base().color())
gradient.setColorAt(1, self.palette().midlight().color())
for name, argument in arguments.items():
widgets = dict()
self._arg_to_widgets[name] = widgets
self.set_argument(name, argument)
entry = procdesc_to_entry(argument["desc"])(argument)
widget_item = QtWidgets.QTreeWidgetItem([name])
if argument["tooltip"]:
widget_item.setToolTip(0, argument["tooltip"])
widgets["entry"] = entry
widgets["widget_item"] = widget_item
self.quickStyleClicked.connect(dock.submit_clicked)
for col in range(3):
widget_item.setBackground(col, gradient)
font = widget_item.font(0)
font.setBold(True)
widget_item.setFont(0, font)
if argument["group"] is None:
self.addTopLevelItem(widget_item)
else:
self._get_group(argument["group"]).addChild(widget_item)
fix_layout = LayoutWidget()
widgets["fix_layout"] = fix_layout
fix_layout.addWidget(entry)
self.setItemWidget(widget_item, 1, fix_layout)
recompute_argument = QtWidgets.QToolButton()
recompute_argument.setToolTip("Re-run the experiment's build "
"method and take the default value")
recompute_argument.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_BrowserReload))
recompute_argument.clicked.connect(
partial(self._recompute_argument_clicked, name))
tool_buttons = LayoutWidget()
tool_buttons.addWidget(recompute_argument, 1)
disable_other_scans = QtWidgets.QToolButton()
widgets["disable_other_scans"] = disable_other_scans
disable_other_scans.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_DialogResetButton))
disable_other_scans.setToolTip("Disable all other scans in "
"this experiment")
disable_other_scans.clicked.connect(
partial(self._disable_other_scans, name))
tool_buttons.layout.setRowStretch(0, 1)
tool_buttons.layout.setRowStretch(3, 1)
tool_buttons.addWidget(disable_other_scans, 2)
if not isinstance(entry, ScanEntry):
disable_other_scans.setVisible(False)
self.setItemWidget(widget_item, 2, tool_buttons)
widget_item = QtWidgets.QTreeWidgetItem()
self.addTopLevelItem(widget_item)
recompute_arguments = QtWidgets.QPushButton("Recompute all arguments")
recompute_arguments.setIcon(
QtWidgets.QApplication.style().standardIcon(
@ -136,41 +59,10 @@ class _ArgumentEditor(QtWidgets.QTreeWidget):
buttons.layout.setColumnStretch(1, 0)
buttons.layout.setColumnStretch(2, 0)
buttons.layout.setColumnStretch(3, 1)
self.setItemWidget(widget_item, 1, buttons)
self.setItemWidget(self.bottom_item, 1, buttons)
def _get_group(self, name):
if name in self._groups:
return self._groups[name]
group = QtWidgets.QTreeWidgetItem([name])
for col in range(3):
group.setBackground(col, self.palette().mid())
group.setForeground(col, self.palette().brightText())
font = group.font(col)
font.setBold(True)
group.setFont(col, font)
self.addTopLevelItem(group)
self._groups[name] = group
return group
def update_argument(self, name, argument):
widgets = self._arg_to_widgets[name]
# Qt needs a setItemWidget() to handle layout correctly,
# simply replacing the entry inside the LayoutWidget
# results in a bug.
widgets["entry"].deleteLater()
widgets["entry"] = procdesc_to_entry(argument["desc"])(argument)
widgets["disable_other_scans"].setVisible(
isinstance(widgets["entry"], ScanEntry))
widgets["fix_layout"].deleteLater()
widgets["fix_layout"] = LayoutWidget()
widgets["fix_layout"].addWidget(widgets["entry"])
self.setItemWidget(widgets["widget_item"], 1, widgets["fix_layout"])
self.updateGeometries()
def _recompute_argument_clicked(self, name):
asyncio.ensure_future(self._recompute_argument(name))
def reset_entry(self, key):
asyncio.ensure_future(self._recompute_argument(key))
async def _recompute_argument(self, name):
try:
@ -187,30 +79,6 @@ class _ArgumentEditor(QtWidgets.QTreeWidget):
argument["state"] = state
self.update_argument(name, argument)
def _disable_other_scans(self, current_name):
for name, widgets in self._arg_to_widgets.items():
if (name != current_name
and isinstance(widgets["entry"], ScanEntry)):
widgets["entry"].disable()
def save_state(self):
expanded = []
for k, v in self._groups.items():
if v.isExpanded():
expanded.append(k)
return {
"expanded": expanded,
"scroll": self.verticalScrollBar().value()
}
def restore_state(self, state):
for e in state["expanded"]:
try:
self._groups[e].setExpanded(True)
except KeyError:
pass
self.verticalScrollBar().setValue(state["scroll"])
# Hooks that allow user-supplied argument editors to react to imminent user
# actions. Here, we always keep the manager-stored submission arguments
# up-to-date, so no further action is required.
@ -230,7 +98,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
def __init__(self, manager, expurl):
QtWidgets.QMdiSubWindow.__init__(self)
qfm = QtGui.QFontMetrics(self.font())
self.resize(100*qfm.averageCharWidth(), 30*qfm.lineSpacing())
self.resize(100 * qfm.averageCharWidth(), 30 * qfm.lineSpacing())
self.setWindowTitle(expurl)
self.setWindowIcon(QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_FileDialogContentsView))
@ -263,17 +131,17 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
datetime.setDate(QtCore.QDate.currentDate())
else:
datetime.setDateTime(QtCore.QDateTime.fromMSecsSinceEpoch(
int(scheduling["due_date"]*1000)))
int(scheduling["due_date"] * 1000)))
datetime_en.setChecked(scheduling["due_date"] is not None)
def update_datetime(dt):
scheduling["due_date"] = dt.toMSecsSinceEpoch()/1000
scheduling["due_date"] = dt.toMSecsSinceEpoch() / 1000
datetime_en.setChecked(True)
datetime.dateTimeChanged.connect(update_datetime)
def update_datetime_en(checked):
if checked:
due_date = datetime.dateTime().toMSecsSinceEpoch()/1000
due_date = datetime.dateTime().toMSecsSinceEpoch() / 1000
else:
due_date = None
scheduling["due_date"] = due_date
@ -349,9 +217,10 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
repo_rev = QtWidgets.QLineEdit()
repo_rev.setPlaceholderText("current")
repo_rev.setClearButtonEnabled(True)
repo_rev_label = QtWidgets.QLabel("Revision:")
repo_rev_label = QtWidgets.QLabel("Rev / ref:")
repo_rev_label.setToolTip("Experiment repository revision "
"(commit ID) to use")
"(commit ID) or reference (branch "
"or tag) to use")
self.layout.addWidget(repo_rev_label, 3, 2)
self.layout.addWidget(repo_rev, 3, 3)
@ -420,8 +289,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
arginfo = expdesc["arginfo"]
for k, v in overrides.items():
# Some values (e.g. scans) may have multiple defaults in a list
if ("default" in arginfo[k][0]
and isinstance(arginfo[k][0]["default"], list)):
if ("default" in arginfo[k][0] and isinstance(arginfo[k][0]["default"], list)):
arginfo[k][0]["default"].insert(0, v)
else:
arginfo[k][0]["default"] = v
@ -432,8 +300,8 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
editor_class = self.manager.get_argument_editor_class(self.expurl)
self.argeditor = editor_class(self.manager, self, self.expurl)
self.argeditor.restore_state(argeditor_state)
self.layout.addWidget(self.argeditor, 0, 0, 1, 5)
self.argeditor.restore_state(argeditor_state)
def contextMenuEvent(self, event):
menu = QtWidgets.QMenu(self)
@ -486,9 +354,9 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
unparse_devarg_override(expid["devarg_override"]))
self.log_level.setCurrentIndex(log_levels.index(
log_level_to_name(expid["log_level"])))
if ("repo_rev" in expid and
expid["repo_rev"] != "N/A" and
hasattr(self, "repo_rev")):
if "repo_rev" in expid and \
expid["repo_rev"] != "N/A" and \
hasattr(self, "repo_rev"):
self.repo_rev.setText(expid["repo_rev"])
except:
logger.error("Could not set submission options from HDF5 expid",
@ -691,7 +559,8 @@ class ExperimentManager:
if expurl in self.open_experiments.keys():
self.open_experiments[expurl].argeditor.update_argument(name, argument)
except:
logger.warn("Failed to set value for argument \"{}\" in experiment: {}.".format(name, expurl), exc_info=1)
logger.warn("Failed to set value for argument \"{}\" in experiment: {}."
.format(name, expurl), exc_info=1)
def get_submission_arguments(self, expurl):
if expurl in self.submission_arguments:
@ -701,8 +570,8 @@ class ExperimentManager:
raise ValueError("Submission arguments must be preinitialized "
"when not using repository")
class_desc = self.explist[expurl[5:]]
return self.initialize_submission_arguments(expurl,
class_desc["arginfo"], class_desc.get("argument_ui", None))
return self.initialize_submission_arguments(expurl, class_desc["arginfo"],
class_desc.get("argument_ui", None))
def open_experiment(self, expurl):
if expurl in self.open_experiments:
@ -739,7 +608,12 @@ class ExperimentManager:
del self.open_experiments[expurl]
async def _submit_task(self, expurl, *args):
try:
rid = await self.schedule_ctl.submit(*args)
except KeyError:
expid = args[1]
logger.error("Submission failed - revision \"%s\" was not found", expid["repo_rev"])
else:
logger.info("Submitted '%s', RID is %d", expurl, rid)
def submit(self, expurl):
@ -797,9 +671,9 @@ class ExperimentManager:
repo_match = "repo_rev" in expid
else:
repo_match = "repo_rev" not in expid
if (repo_match and
("file" in expid and expid["file"] == file) and
expid["class_name"] == class_name):
if repo_match and \
("file" in expid and expid["file"] == file) and \
expid["class_name"] == class_name:
rids.append(rid)
asyncio.ensure_future(self._request_term_multiple(rids))
@ -853,6 +727,7 @@ class ExperimentManager:
self.is_quick_open_shown = True
dialog = _QuickOpenDialog(self)
def closed():
self.is_quick_open_shown = False
dialog.closed.connect(closed)

View File

@ -94,7 +94,7 @@ class _OpenFileDialog(QtWidgets.QDialog):
else:
break
self.explorer.current_directory = \
self.explorer.current_directory[:idx+1]
self.explorer.current_directory[:idx + 1]
if self.explorer.current_directory == "/":
self.explorer.current_directory = ""
asyncio.ensure_future(self.refresh_view())
@ -103,6 +103,7 @@ class _OpenFileDialog(QtWidgets.QDialog):
asyncio.ensure_future(self.refresh_view())
else:
file = self.explorer.current_directory + selected
async def open_task():
try:
await self.exp_manager.open_file(file)
@ -232,7 +233,7 @@ class ExplorerDock(QtWidgets.QDockWidget):
set_shortcut_menu = QtWidgets.QMenu()
for i in range(12):
action = QtWidgets.QAction("F" + str(i+1), self.el)
action = QtWidgets.QAction("F" + str(i + 1), self.el)
action.triggered.connect(partial(self.set_shortcut, i))
set_shortcut_menu.addAction(action)
@ -246,12 +247,14 @@ class ExplorerDock(QtWidgets.QDockWidget):
scan_repository_action = QtWidgets.QAction("Scan repository HEAD",
self.el)
def scan_repository():
asyncio.ensure_future(experiment_db_ctl.scan_repository_async())
scan_repository_action.triggered.connect(scan_repository)
self.el.addAction(scan_repository_action)
scan_ddb_action = QtWidgets.QAction("Scan device database", self.el)
def scan_ddb():
asyncio.ensure_future(device_db_ctl.scan())
scan_ddb_action.triggered.connect(scan_ddb)
@ -292,7 +295,7 @@ class ExplorerDock(QtWidgets.QDockWidget):
if expname is not None:
expurl = "repo:" + expname
self.d_shortcuts.set_shortcut(nr, expurl)
logger.info("Set shortcut F%d to '%s'", nr+1, expurl)
logger.info("Set shortcut F%d to '%s'", nr + 1, expurl)
def update_scanning(self, scanning):
if scanning:

View File

@ -0,0 +1,155 @@
import logging
import asyncio
from PyQt5 import QtCore, QtWidgets, QtGui
from artiq.gui.models import DictSyncModel
from artiq.gui.entries import EntryTreeWidget, procdesc_to_entry
from artiq.gui.tools import LayoutWidget
logger = logging.getLogger(__name__)
class Model(DictSyncModel):
def __init__(self, init):
DictSyncModel.__init__(self, ["RID", "Title", "Args"], init)
def convert(self, k, v, column):
if column == 0:
return k
elif column == 1:
txt = ": " + v["title"] if v["title"] != "" else ""
return str(k) + txt
elif column == 2:
return v["arglist_desc"]
else:
raise ValueError
def sort_key(self, k, v):
return k
class _InteractiveArgsRequest(EntryTreeWidget):
supplied = QtCore.pyqtSignal(int, dict)
cancelled = QtCore.pyqtSignal(int)
def __init__(self, rid, arglist_desc):
EntryTreeWidget.__init__(self)
self.rid = rid
self.arguments = dict()
for key, procdesc, group, tooltip in arglist_desc:
self.arguments[key] = {"desc": procdesc, "group": group, "tooltip": tooltip}
self.set_argument(key, self.arguments[key])
self.quickStyleClicked.connect(self.supply)
cancel_btn = QtWidgets.QPushButton("Cancel")
cancel_btn.setIcon(QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_DialogCancelButton))
cancel_btn.clicked.connect(self.cancel)
supply_btn = QtWidgets.QPushButton("Supply")
supply_btn.setIcon(QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_DialogOkButton))
supply_btn.clicked.connect(self.supply)
buttons = LayoutWidget()
buttons.addWidget(cancel_btn, 1, 1)
buttons.addWidget(supply_btn, 1, 2)
buttons.layout.setColumnStretch(0, 1)
buttons.layout.setColumnStretch(1, 0)
buttons.layout.setColumnStretch(2, 0)
buttons.layout.setColumnStretch(3, 1)
self.setItemWidget(self.bottom_item, 1, buttons)
def supply(self):
argument_values = dict()
for key, argument in self.arguments.items():
entry_cls = procdesc_to_entry(argument["desc"])
argument_values[key] = entry_cls.state_to_value(argument["state"])
self.supplied.emit(self.rid, argument_values)
def cancel(self):
self.cancelled.emit(self.rid)
class _InteractiveArgsView(QtWidgets.QStackedWidget):
supplied = QtCore.pyqtSignal(int, dict)
cancelled = QtCore.pyqtSignal(int)
def __init__(self):
QtWidgets.QStackedWidget.__init__(self)
self.tabs = QtWidgets.QTabWidget()
self.default_label = QtWidgets.QLabel("No pending interactive arguments requests.")
self.default_label.setAlignment(QtCore.Qt.AlignCenter)
font = QtGui.QFont(self.default_label.font())
font.setItalic(True)
self.default_label.setFont(font)
self.addWidget(self.tabs)
self.addWidget(self.default_label)
self.model = Model({})
def setModel(self, model):
self.setCurrentIndex(1)
for i in range(self.tabs.count()):
widget = self.tabs.widget(i)
self.tabs.removeTab(i)
widget.deleteLater()
self.model = model
self.model.rowsInserted.connect(self.rowsInserted)
self.model.rowsRemoved.connect(self.rowsRemoved)
for i in range(self.model.rowCount(QtCore.QModelIndex())):
self._insert_widget(i)
def _insert_widget(self, row):
rid = self.model.data(self.model.index(row, 0), QtCore.Qt.DisplayRole)
title = self.model.data(self.model.index(row, 1), QtCore.Qt.DisplayRole)
arglist_desc = self.model.data(self.model.index(row, 2), QtCore.Qt.DisplayRole)
inter_args_request = _InteractiveArgsRequest(rid, arglist_desc)
inter_args_request.supplied.connect(self.supplied)
inter_args_request.cancelled.connect(self.cancelled)
self.tabs.insertTab(row, inter_args_request, title)
def rowsInserted(self, parent, first, last):
assert first == last
self.setCurrentIndex(0)
self._insert_widget(first)
def rowsRemoved(self, parent, first, last):
assert first == last
widget = self.tabs.widget(first)
self.tabs.removeTab(first)
widget.deleteLater()
if self.tabs.count() == 0:
self.setCurrentIndex(1)
class InteractiveArgsDock(QtWidgets.QDockWidget):
def __init__(self, interactive_args_sub, interactive_args_rpc):
QtWidgets.QDockWidget.__init__(self, "Interactive Args")
self.setObjectName("Interactive Args")
self.setFeatures(
QtWidgets.QDockWidget.DockWidgetMovable | QtWidgets.QDockWidget.DockWidgetFloatable)
self.interactive_args_rpc = interactive_args_rpc
self.request_view = _InteractiveArgsView()
self.request_view.supplied.connect(self.supply)
self.request_view.cancelled.connect(self.cancel)
self.setWidget(self.request_view)
interactive_args_sub.add_setmodel_callback(self.request_view.setModel)
def supply(self, rid, values):
asyncio.ensure_future(self._supply_task(rid, values))
async def _supply_task(self, rid, values):
try:
await self.interactive_args_rpc.supply(rid, values)
except Exception:
logger.error("failed to supply interactive arguments for experiment: %d",
rid, exc_info=True)
def cancel(self, rid):
asyncio.ensure_future(self._cancel_task(rid))
async def _cancel_task(self, rid):
try:
await self.interactive_args_rpc.cancel(rid)
except Exception:
logger.error("failed to cancel interactive args request for experiment: %d",
rid, exc_info=True)

File diff suppressed because it is too large Load Diff

View File

@ -16,8 +16,7 @@ class Model(DictSyncModel):
def __init__(self, init):
DictSyncModel.__init__(self,
["RID", "Pipeline", "Status", "Prio", "Due date",
"Revision", "File", "Class name"],
init)
"Revision", "File", "Class name"], init)
def sort_key(self, k, v):
# order by priority, and then by due date and RID
@ -96,14 +95,14 @@ class ScheduleDock(QtWidgets.QDockWidget):
cw = QtGui.QFontMetrics(self.font()).averageCharWidth()
h = self.table.horizontalHeader()
h.resizeSection(0, 7*cw)
h.resizeSection(1, 12*cw)
h.resizeSection(2, 16*cw)
h.resizeSection(3, 6*cw)
h.resizeSection(4, 16*cw)
h.resizeSection(5, 30*cw)
h.resizeSection(6, 20*cw)
h.resizeSection(7, 20*cw)
h.resizeSection(0, 7 * cw)
h.resizeSection(1, 12 * cw)
h.resizeSection(2, 16 * cw)
h.resizeSection(3, 6 * cw)
h.resizeSection(4, 16 * cw)
h.resizeSection(5, 30 * cw)
h.resizeSection(6, 20 * cw)
h.resizeSection(7, 20 * cw)
def set_model(self, model):
self.table_model = model
@ -151,7 +150,6 @@ class ScheduleDock(QtWidgets.QDockWidget):
rids.add(rid)
asyncio.ensure_future(self.request_term_multiple(rids))
def save_state(self):
return bytes(self.table.horizontalHeader().saveState())

View File

@ -3,8 +3,6 @@ from functools import partial
from PyQt5 import QtCore, QtWidgets
from artiq.gui.tools import LayoutWidget
logger = logging.getLogger(__name__)
@ -35,7 +33,7 @@ class ShortcutsDock(QtWidgets.QDockWidget):
for i in range(12):
row = i + 1
layout.addWidget(QtWidgets.QLabel("F" + str(i+1)), row, 0)
layout.addWidget(QtWidgets.QLabel("F" + str(i + 1)), row, 0)
label = QtWidgets.QLabel()
label.setSizePolicy(QtWidgets.QSizePolicy.Ignored,
@ -70,7 +68,7 @@ class ShortcutsDock(QtWidgets.QDockWidget):
"open": open,
"submit": submit
}
shortcut = QtWidgets.QShortcut("F" + str(i+1), main_window)
shortcut = QtWidgets.QShortcut("F" + str(i + 1), main_window)
shortcut.setContext(QtCore.Qt.ApplicationShortcut)
shortcut.activated.connect(partial(self._activated, i))

914
artiq/dashboard/waveform.py Normal file
View File

@ -0,0 +1,914 @@
import os
import asyncio
import logging
import bisect
import itertools
import math
from PyQt5 import QtCore, QtWidgets, QtGui
import pyqtgraph as pg
import numpy as np
from sipyco.pc_rpc import AsyncioClient
from sipyco import pyon
from artiq.tools import exc_to_warning, short_format
from artiq.coredevice import comm_analyzer
from artiq.coredevice.comm_analyzer import WaveformType
from artiq.gui.tools import LayoutWidget, get_open_file_name, get_save_file_name
from artiq.gui.models import DictSyncTreeSepModel
from artiq.gui.dndwidgets import VDragScrollArea, VDragDropSplitter
logger = logging.getLogger(__name__)
WAVEFORM_MIN_HEIGHT = 50
WAVEFORM_MAX_HEIGHT = 200
class ProxyClient():
def __init__(self, receive_cb, timeout=5, timer=5, timer_backoff=1.1):
self.receive_cb = receive_cb
self.receiver = None
self.addr = None
self.port_proxy = None
self.port = None
self._reconnect_event = asyncio.Event()
self.timeout = timeout
self.timer = timer
self.timer_cur = timer
self.timer_backoff = timer_backoff
self._reconnect_task = asyncio.ensure_future(self._reconnect())
def update_address(self, addr, port, port_proxy):
self.addr = addr
self.port = port
self.port_proxy = port_proxy
self._reconnect_event.set()
async def trigger_proxy_task(self):
remote = AsyncioClient()
try:
try:
if self.addr is None:
logger.error("missing core_analyzer host in device db")
return
await remote.connect_rpc(self.addr, self.port, "coreanalyzer_proxy_control")
except:
logger.error("error connecting to analyzer proxy control", exc_info=True)
return
await remote.trigger()
except:
logger.error("analyzer proxy reported failure", exc_info=True)
finally:
remote.close_rpc()
async def _reconnect(self):
while True:
await self._reconnect_event.wait()
self._reconnect_event.clear()
if self.receiver is not None:
await self.receiver.close()
self.receiver = None
new_receiver = comm_analyzer.AnalyzerProxyReceiver(
self.receive_cb, self.disconnect_cb)
try:
if self.addr is not None:
await asyncio.wait_for(new_receiver.connect(self.addr, self.port_proxy),
self.timeout)
logger.info("ARTIQ dashboard connected to analyzer proxy (%s)", self.addr)
self.timer_cur = self.timer
self.receiver = new_receiver
continue
except Exception:
logger.error("error connecting to analyzer proxy", exc_info=True)
try:
await asyncio.wait_for(self._reconnect_event.wait(), self.timer_cur)
except asyncio.TimeoutError:
self.timer_cur *= self.timer_backoff
self._reconnect_event.set()
else:
self.timer_cur = self.timer
async def close(self):
self._reconnect_task.cancel()
try:
await asyncio.wait_for(self._reconnect_task, None)
except asyncio.CancelledError:
pass
if self.receiver is not None:
await self.receiver.close()
def disconnect_cb(self):
logger.error("lost connection to analyzer proxy")
self._reconnect_event.set()
class _BackgroundItem(pg.GraphicsWidgetAnchor, pg.GraphicsWidget):
def __init__(self, parent, rect):
pg.GraphicsWidget.__init__(self, parent)
pg.GraphicsWidgetAnchor.__init__(self)
self.item = QtWidgets.QGraphicsRectItem(rect, self)
brush = QtGui.QBrush(QtGui.QColor(10, 10, 10, 140))
self.item.setBrush(brush)
class _BaseWaveform(pg.PlotWidget):
cursorMove = QtCore.pyqtSignal(float)
def __init__(self, name, width, precision, unit,
parent=None, pen="r", stepMode="right", connect="finite"):
pg.PlotWidget.__init__(self,
parent=parent,
x=None,
y=None,
pen=pen,
stepMode=stepMode,
connect=connect)
self.setMinimumHeight(WAVEFORM_MIN_HEIGHT)
self.setMaximumHeight(WAVEFORM_MAX_HEIGHT)
self.setMenuEnabled(False)
self.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
self.name = name
self.width = width
self.precision = precision
self.unit = unit
self.x_data = []
self.y_data = []
self.plot_item = self.getPlotItem()
self.plot_item.hideButtons()
self.plot_item.hideAxis("top")
self.plot_item.getAxis("bottom").setStyle(showValues=False, tickLength=0)
self.plot_item.getAxis("left").setStyle(showValues=False, tickLength=0)
self.plot_item.setRange(yRange=(0, 1), padding=0.1)
self.plot_item.showGrid(x=True, y=True)
self.plot_data_item = self.plot_item.listDataItems()[0]
self.plot_data_item.setClipToView(True)
self.view_box = self.plot_item.getViewBox()
self.view_box.setMouseEnabled(x=True, y=False)
self.view_box.disableAutoRange(axis=pg.ViewBox.YAxis)
self.view_box.setLimits(xMin=0, minXRange=20)
self.title_label = pg.LabelItem(self.name, parent=self.plot_item)
self.title_label.anchor(itemPos=(0, 0), parentPos=(0, 0), offset=(0, 0))
self.title_label.setAttr('justify', 'left')
self.title_label.setZValue(10)
rect = self.title_label.boundingRect()
rect.setHeight(rect.height() * 2)
rect.setWidth(225)
self.label_bg = _BackgroundItem(parent=self.plot_item, rect=rect)
self.label_bg.anchor(itemPos=(0, 0), parentPos=(0, 0), offset=(0, 0))
self.cursor = pg.InfiniteLine()
self.cursor_y = None
self.addItem(self.cursor)
self.cursor_label = pg.LabelItem('', parent=self.plot_item)
self.cursor_label.anchor(itemPos=(0, 0), parentPos=(0, 0), offset=(0, 20))
self.cursor_label.setAttr('justify', 'left')
self.cursor_label.setZValue(10)
def setStoppedX(self, stopped_x):
self.stopped_x = stopped_x
self.view_box.setLimits(xMax=stopped_x)
def setData(self, data):
if len(data) == 0:
self.x_data, self.y_data = [], []
else:
self.x_data, self.y_data = zip(*data)
def onDataChange(self, data):
raise NotImplementedError
def onCursorMove(self, x):
self.cursor.setValue(x)
if len(self.x_data) < 1:
return
ind = bisect.bisect_left(self.x_data, x) - 1
dr = self.plot_data_item.dataRect()
self.cursor_y = None
if dr is not None and 0 <= ind < len(self.y_data):
self.cursor_y = self.y_data[ind]
def mouseMoveEvent(self, e):
if e.buttons() == QtCore.Qt.LeftButton \
and e.modifiers() == QtCore.Qt.ShiftModifier:
drag = QtGui.QDrag(self)
mime = QtCore.QMimeData()
drag.setMimeData(mime)
pixmapi = QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_FileIcon)
drag.setPixmap(pixmapi.pixmap(32))
drag.exec_(QtCore.Qt.MoveAction)
else:
super().mouseMoveEvent(e)
def wheelEvent(self, e):
if e.modifiers() & QtCore.Qt.ControlModifier:
super().wheelEvent(e)
def mouseDoubleClickEvent(self, e):
pos = self.view_box.mapSceneToView(e.pos())
self.cursorMove.emit(pos.x())
class BitWaveform(_BaseWaveform):
def __init__(self, name, width, precision, unit, parent=None):
_BaseWaveform.__init__(self, name, width, precision, unit, parent)
self.plot_item.showGrid(x=True, y=False)
self._arrows = []
def onDataChange(self, data):
try:
self.setData(data)
for arw in self._arrows:
self.removeItem(arw)
self._arrows = []
l = len(data)
display_y = np.empty(l)
display_x = np.empty(l)
display_map = {
"X": 0.5,
"1": 1,
"0": 0
}
previous_y = None
for i, coord in enumerate(data):
x, y = coord
dis_y = display_map[y]
if previous_y == y:
arw = pg.ArrowItem(pxMode=True, angle=90)
self.addItem(arw)
self._arrows.append(arw)
arw.setPos(x, dis_y)
display_y[i] = dis_y
display_x[i] = x
previous_y = y
self.plot_data_item.setData(x=display_x, y=display_y)
except:
logger.error("Error when displaying waveform: %s", self.name, exc_info=True)
for arw in self._arrows:
self.removeItem(arw)
self.plot_data_item.setData(x=[], y=[])
def onCursorMove(self, x):
_BaseWaveform.onCursorMove(self, x)
if self.cursor_y is not None:
self.cursor_label.setText(self.cursor_y)
else:
self.cursor_label.setText("")
class AnalogWaveform(_BaseWaveform):
def __init__(self, name, width, precision, unit, parent=None):
_BaseWaveform.__init__(self, name, width, precision, unit, parent)
def onDataChange(self, data):
try:
self.setData(data)
self.plot_data_item.setData(x=self.x_data, y=self.y_data)
if len(data) > 0:
max_y = max(self.y_data)
min_y = min(self.y_data)
self.plot_item.setRange(yRange=(min_y, max_y), padding=0.1)
except:
logger.error("Error when displaying waveform: %s", self.name, exc_info=True)
self.plot_data_item.setData(x=[], y=[])
def onCursorMove(self, x):
_BaseWaveform.onCursorMove(self, x)
if self.cursor_y is not None:
t = short_format(self.cursor_y, {"precision": self.precision, "unit": self.unit})
else:
t = ""
self.cursor_label.setText(t)
class BitVectorWaveform(_BaseWaveform):
def __init__(self, name, width, precision, unit, parent=None):
_BaseWaveform.__init__(self, name, width, precision, parent)
self._labels = []
self._format_string = "{:0=" + str(math.ceil(width / 4)) + "X}"
self.view_box.sigTransformChanged.connect(self._update_labels)
self.plot_item.showGrid(x=True, y=False)
def _update_labels(self):
for label in self._labels:
self.removeItem(label)
xmin, xmax = self.view_box.viewRange()[0]
left_label_i = bisect.bisect_left(self.x_data, xmin)
right_label_i = bisect.bisect_right(self.x_data, xmax) + 1
for i, j in itertools.pairwise(range(left_label_i, right_label_i)):
x1 = self.x_data[i]
x2 = self.x_data[j] if j < len(self.x_data) else self.stopped_x
lbl = self._labels[i]
bounds = lbl.boundingRect()
bounds_view = self.view_box.mapSceneToView(bounds)
if bounds_view.boundingRect().width() < x2 - x1:
self.addItem(lbl)
def onDataChange(self, data):
try:
self.setData(data)
for lbl in self._labels:
self.plot_item.removeItem(lbl)
self._labels = []
l = len(data)
display_x = np.empty(l * 2)
display_y = np.empty(l * 2)
for i, coord in enumerate(data):
x, y = coord
display_x[i * 2] = x
display_x[i * 2 + 1] = x
display_y[i * 2] = 0
display_y[i * 2 + 1] = int(int(y) != 0)
lbl = pg.TextItem(
self._format_string.format(int(y, 2)), anchor=(0, 0.5))
lbl.setPos(x, 0.5)
lbl.setTextWidth(100)
self._labels.append(lbl)
self.plot_data_item.setData(x=display_x, y=display_y)
except:
logger.error("Error when displaying waveform: %s", self.name, exc_info=True)
for lbl in self._labels:
self.plot_item.removeItem(lbl)
self.plot_data_item.setData(x=[], y=[])
def onCursorMove(self, x):
_BaseWaveform.onCursorMove(self, x)
if self.cursor_y is not None:
t = self._format_string.format(int(self.cursor_y, 2))
else:
t = ""
self.cursor_label.setText(t)
class LogWaveform(_BaseWaveform):
def __init__(self, name, width, precision, unit, parent=None):
_BaseWaveform.__init__(self, name, width, precision, parent)
self.plot_data_item.opts['pen'] = None
self.plot_data_item.opts['symbol'] = 'x'
self._labels = []
self.plot_item.showGrid(x=True, y=False)
def onDataChange(self, data):
try:
self.setData(data)
for lbl in self._labels:
self.plot_item.removeItem(lbl)
self._labels = []
self.plot_data_item.setData(
x=self.x_data, y=np.ones(len(self.x_data)))
if len(data) == 0:
return
old_x = data[0][0]
old_msg = data[0][1]
for x, msg in data[1:]:
if x == old_x:
old_msg += "\n" + msg
else:
lbl = pg.TextItem(old_msg)
self.addItem(lbl)
self._labels.append(lbl)
lbl.setPos(old_x, 1)
old_msg = msg
old_x = x
lbl = pg.TextItem(old_msg)
self.addItem(lbl)
self._labels.append(lbl)
lbl.setPos(old_x, 1)
except:
logger.error("Error when displaying waveform: %s", self.name, exc_info=True)
for lbl in self._labels:
self.plot_item.removeItem(lbl)
self.plot_data_item.setData(x=[], y=[])
class _WaveformView(QtWidgets.QWidget):
cursorMove = QtCore.pyqtSignal(float)
def __init__(self, parent):
QtWidgets.QWidget.__init__(self, parent=parent)
self._stopped_x = None
self._timescale = 1
self._cursor_x = 0
layout = QtWidgets.QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
self.setLayout(layout)
self._ref_axis = pg.PlotWidget()
self._ref_axis.hideAxis("bottom")
self._ref_axis.hideAxis("left")
self._ref_axis.hideButtons()
self._ref_axis.setFixedHeight(45)
self._ref_axis.setMenuEnabled(False)
self._top = pg.AxisItem("top")
self._top.setScale(1e-12)
self._top.setLabel(units="s")
self._ref_axis.setAxisItems({"top": self._top})
layout.addWidget(self._ref_axis)
self._ref_vb = self._ref_axis.getPlotItem().getViewBox()
self._ref_vb.setFixedHeight(0)
self._ref_vb.setMouseEnabled(x=True, y=False)
self._ref_vb.setLimits(xMin=0)
scroll_area = VDragScrollArea(self)
scroll_area.setWidgetResizable(True)
scroll_area.setContentsMargins(0, 0, 0, 0)
scroll_area.setFrameShape(QtWidgets.QFrame.NoFrame)
scroll_area.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
layout.addWidget(scroll_area)
self._splitter = VDragDropSplitter(parent=scroll_area)
self._splitter.setHandleWidth(1)
scroll_area.setWidget(self._splitter)
self.cursorMove.connect(self.onCursorMove)
self.confirm_delete_dialog = QtWidgets.QMessageBox(self)
self.confirm_delete_dialog.setIcon(
QtWidgets.QMessageBox.Icon.Warning
)
self.confirm_delete_dialog.setText("Delete all waveforms?")
self.confirm_delete_dialog.setStandardButtons(
QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel
)
self.confirm_delete_dialog.setDefaultButton(
QtWidgets.QMessageBox.Ok
)
def setModel(self, model):
self._model = model
self._model.dataChanged.connect(self.onDataChange)
self._model.rowsInserted.connect(self.onInsert)
self._model.rowsRemoved.connect(self.onRemove)
self._model.rowsMoved.connect(self.onMove)
self._splitter.dropped.connect(self._model.move)
self.confirm_delete_dialog.accepted.connect(self._model.clear)
def setTimescale(self, timescale):
self._timescale = timescale
self._top.setScale(1e-12 * timescale)
def setStoppedX(self, stopped_x):
self._stopped_x = stopped_x
self._ref_vb.setLimits(xMax=stopped_x)
self._ref_vb.setRange(xRange=(0, stopped_x))
for i in range(self._model.rowCount()):
self._splitter.widget(i).setStoppedX(stopped_x)
def resetZoom(self):
if self._stopped_x is not None:
self._ref_vb.setRange(xRange=(0, self._stopped_x))
def onDataChange(self, top, bottom, roles):
self.cursorMove.emit(0)
first = top.row()
last = bottom.row()
data_row = self._model.headers.index("data")
for i in range(first, last + 1):
data = self._model.data(self._model.index(i, data_row))
self._splitter.widget(i).onDataChange(data)
def onInsert(self, parent, first, last):
for i in range(first, last + 1):
w = self._create_waveform(i)
self._splitter.insertWidget(i, w)
self._resize()
def onRemove(self, parent, first, last):
for i in reversed(range(first, last + 1)):
w = self._splitter.widget(i)
w.deleteLater()
self._splitter.refresh()
self._resize()
def onMove(self, src_parent, src_start, src_end, dest_parent, dest_row):
w = self._splitter.widget(src_start)
self._splitter.insertWidget(dest_row, w)
def onCursorMove(self, x):
self._cursor_x = x
for i in range(self._model.rowCount()):
self._splitter.widget(i).onCursorMove(x)
def _create_waveform(self, row):
name, ty, width, precision, unit = (
self._model.data(self._model.index(row, i)) for i in range(5))
waveform_cls = {
WaveformType.BIT: BitWaveform,
WaveformType.VECTOR: BitVectorWaveform,
WaveformType.ANALOG: AnalogWaveform,
WaveformType.LOG: LogWaveform
}[ty]
w = waveform_cls(name, width, precision, unit, parent=self._splitter)
w.setXLink(self._ref_vb)
w.setStoppedX(self._stopped_x)
w.cursorMove.connect(self.cursorMove)
w.onCursorMove(self._cursor_x)
action = QtWidgets.QAction("Delete waveform", w)
action.triggered.connect(lambda: self._delete_waveform(w))
w.addAction(action)
action = QtWidgets.QAction("Delete all waveforms", w)
action.triggered.connect(self.confirm_delete_dialog.open)
w.addAction(action)
return w
def _delete_waveform(self, waveform):
row = self._splitter.indexOf(waveform)
self._model.pop(row)
def _resize(self):
self._splitter.setFixedHeight(
int((WAVEFORM_MIN_HEIGHT + WAVEFORM_MAX_HEIGHT) * self._model.rowCount() / 2))
class _WaveformModel(QtCore.QAbstractTableModel):
def __init__(self):
self.backing_struct = []
self.headers = ["name", "type", "width", "precision", "unit", "data"]
QtCore.QAbstractTableModel.__init__(self)
def rowCount(self, parent=QtCore.QModelIndex()):
return len(self.backing_struct)
def columnCount(self, parent=QtCore.QModelIndex()):
return len(self.headers)
def data(self, index, role=QtCore.Qt.DisplayRole):
if index.isValid():
return self.backing_struct[index.row()][index.column()]
return None
def extend(self, data):
length = len(self.backing_struct)
len_data = len(data)
self.beginInsertRows(QtCore.QModelIndex(), length, length + len_data - 1)
self.backing_struct.extend(data)
self.endInsertRows()
def pop(self, row):
self.beginRemoveRows(QtCore.QModelIndex(), row, row)
self.backing_struct.pop(row)
self.endRemoveRows()
def move(self, src, dest):
if src == dest:
return
if src < dest:
dest, src = src, dest
self.beginMoveRows(QtCore.QModelIndex(), src, src, QtCore.QModelIndex(), dest)
self.backing_struct.insert(dest, self.backing_struct.pop(src))
self.endMoveRows()
def clear(self):
self.beginRemoveRows(QtCore.QModelIndex(), 0, len(self.backing_struct) - 1)
self.backing_struct.clear()
self.endRemoveRows()
def export_list(self):
return [[row[0], row[1].value, *row[2:5]] for row in self.backing_struct]
def import_list(self, channel_list):
self.clear()
data = [[row[0], WaveformType(row[1]), *row[2:5], []] for row in channel_list]
self.extend(data)
def update_data(self, waveform_data, top, bottom):
name_col = self.headers.index("name")
data_col = self.headers.index("data")
for i in range(top, bottom):
name = self.data(self.index(i, name_col))
self.backing_struct[i][data_col] = waveform_data.get(name, [])
self.dataChanged.emit(self.index(i, data_col),
self.index(i, data_col))
def update_all(self, waveform_data):
self.update_data(waveform_data, 0, self.rowCount())
class _CursorTimeControl(QtWidgets.QLineEdit):
submit = QtCore.pyqtSignal(float)
def __init__(self, parent):
QtWidgets.QLineEdit.__init__(self, parent=parent)
self._text = ""
self._value = 0
self._timescale = 1
self.setDisplayValue(0)
self.textChanged.connect(self._onTextChange)
self.returnPressed.connect(self._onReturnPress)
def setTimescale(self, timescale):
self._timescale = timescale
def _onTextChange(self, text):
self._text = text
def setDisplayValue(self, value):
self._value = value
self._text = pg.siFormat(value * 1e-12 * self._timescale,
suffix="s",
allowUnicode=False,
precision=15)
self.setText(self._text)
def _setValueFromText(self, text):
try:
self._value = pg.siEval(text) * (1e12 / self._timescale)
except:
logger.error("Error when parsing cursor time input", exc_info=True)
def _onReturnPress(self):
self._setValueFromText(self._text)
self.setDisplayValue(self._value)
self.submit.emit(self._value)
self.clearFocus()
class Model(DictSyncTreeSepModel):
def __init__(self, init):
DictSyncTreeSepModel.__init__(self, "/", ["Channels"], init)
def clear(self):
for k in self.backing_store:
self._del_item(self, k.split(self.separator))
self.backing_store.clear()
def update(self, d):
for k, v in d.items():
self[k] = v
class _AddChannelDialog(QtWidgets.QDialog):
def __init__(self, parent, model):
QtWidgets.QDialog.__init__(self, parent=parent)
self.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
self.setWindowTitle("Add channels")
layout = QtWidgets.QVBoxLayout()
self.setLayout(layout)
self._model = model
self._tree_view = QtWidgets.QTreeView()
self._tree_view.setHeaderHidden(True)
self._tree_view.setSelectionBehavior(
QtWidgets.QAbstractItemView.SelectItems)
self._tree_view.setSelectionMode(
QtWidgets.QAbstractItemView.ExtendedSelection)
self._tree_view.setModel(self._model)
layout.addWidget(self._tree_view)
self._button_box = QtWidgets.QDialogButtonBox(
QtWidgets.QDialogButtonBox.Ok | QtWidgets.QDialogButtonBox.Cancel
)
self._button_box.setCenterButtons(True)
self._button_box.accepted.connect(self.add_channels)
self._button_box.rejected.connect(self.reject)
layout.addWidget(self._button_box)
def add_channels(self):
selection = self._tree_view.selectedIndexes()
channels = []
for select in selection:
key = self._model.index_to_key(select)
if key is not None:
channels.append([key, *self._model[key].ref, []])
self.channels = channels
self.accept()
class WaveformDock(QtWidgets.QDockWidget):
def __init__(self, timeout, timer, timer_backoff):
QtWidgets.QDockWidget.__init__(self, "Waveform")
self.setObjectName("Waveform")
self.setFeatures(
QtWidgets.QDockWidget.DockWidgetMovable | QtWidgets.QDockWidget.DockWidgetFloatable)
self._channel_model = Model({})
self._waveform_model = _WaveformModel()
self._ddb = None
self._dump = None
self._waveform_data = {
"timescale": 1,
"stopped_x": None,
"logs": dict(),
"data": dict(),
}
self._current_dir = os.getcwd()
self.proxy_client = ProxyClient(self.on_dump_receive,
timeout,
timer,
timer_backoff)
grid = LayoutWidget()
self.setWidget(grid)
self._menu_btn = QtWidgets.QPushButton()
self._menu_btn.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_FileDialogStart))
grid.addWidget(self._menu_btn, 0, 0)
self._request_dump_btn = QtWidgets.QToolButton()
self._request_dump_btn.setToolTip("Fetch analyzer data from device")
self._request_dump_btn.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_BrowserReload))
self._request_dump_btn.clicked.connect(
lambda: asyncio.ensure_future(exc_to_warning(self.proxy_client.trigger_proxy_task())))
grid.addWidget(self._request_dump_btn, 0, 1)
self._add_channel_dialog = _AddChannelDialog(self, self._channel_model)
self._add_channel_dialog.accepted.connect(self._add_channels)
self._add_btn = QtWidgets.QToolButton()
self._add_btn.setToolTip("Add channels...")
self._add_btn.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_FileDialogListView))
self._add_btn.clicked.connect(self._add_channel_dialog.open)
grid.addWidget(self._add_btn, 0, 2)
self._file_menu = QtWidgets.QMenu()
self._add_async_action("Open trace...", self.load_trace)
self._add_async_action("Save trace...", self.save_trace)
self._add_async_action("Save trace as VCD...", self.save_vcd)
self._add_async_action("Open channel list...", self.load_channels)
self._add_async_action("Save channel list...", self.save_channels)
self._menu_btn.setMenu(self._file_menu)
self._waveform_view = _WaveformView(self)
self._waveform_view.setModel(self._waveform_model)
grid.addWidget(self._waveform_view, 1, 0, colspan=12)
self._reset_zoom_btn = QtWidgets.QToolButton()
self._reset_zoom_btn.setToolTip("Reset zoom")
self._reset_zoom_btn.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_TitleBarMaxButton))
self._reset_zoom_btn.clicked.connect(self._waveform_view.resetZoom)
grid.addWidget(self._reset_zoom_btn, 0, 3)
self._cursor_control = _CursorTimeControl(self)
self._waveform_view.cursorMove.connect(self._cursor_control.setDisplayValue)
self._cursor_control.submit.connect(self._waveform_view.onCursorMove)
grid.addWidget(self._cursor_control, 0, 4, colspan=6)
def _add_async_action(self, label, coro):
action = QtWidgets.QAction(label, self)
action.triggered.connect(
lambda: asyncio.ensure_future(exc_to_warning(coro())))
self._file_menu.addAction(action)
def _add_channels(self):
channels = self._add_channel_dialog.channels
count = self._waveform_model.rowCount()
self._waveform_model.extend(channels)
self._waveform_model.update_data(self._waveform_data['data'],
count,
count + len(channels))
def on_dump_receive(self, dump):
self._dump = dump
decoded_dump = comm_analyzer.decode_dump(dump)
waveform_data = comm_analyzer.decoded_dump_to_waveform_data(self._ddb, decoded_dump)
self._waveform_data.update(waveform_data)
self._channel_model.update(self._waveform_data['logs'])
self._waveform_model.update_all(self._waveform_data['data'])
self._waveform_view.setStoppedX(self._waveform_data['stopped_x'])
self._waveform_view.setTimescale(self._waveform_data['timescale'])
self._cursor_control.setTimescale(self._waveform_data['timescale'])
async def load_trace(self):
try:
filename = await get_open_file_name(
self,
"Load Analyzer Trace",
self._current_dir,
"All files (*.*)")
except asyncio.CancelledError:
return
self._current_dir = os.path.dirname(filename)
try:
with open(filename, 'rb') as f:
dump = f.read()
self.on_dump_receive(dump)
except:
logger.error("Failed to open analyzer trace", exc_info=True)
async def save_trace(self):
if self._dump is None:
logger.error("No analyzer trace stored in dashboard, "
"try loading from file or fetching from device")
return
try:
filename = await get_save_file_name(
self,
"Save Analyzer Trace",
self._current_dir,
"All files (*.*)")
except asyncio.CancelledError:
return
self._current_dir = os.path.dirname(filename)
try:
with open(filename, 'wb') as f:
f.write(self._dump)
except:
logger.error("Failed to save analyzer trace", exc_info=True)
async def save_vcd(self):
if self._dump is None:
logger.error("No analyzer trace stored in dashboard, "
"try loading from file or fetching from device")
return
try:
filename = await get_save_file_name(
self,
"Save VCD",
self._current_dir,
"All files (*.*)")
except asyncio.CancelledError:
return
self._current_dir = os.path.dirname(filename)
try:
decoded_dump = comm_analyzer.decode_dump(self._dump)
with open(filename, 'w') as f:
comm_analyzer.decoded_dump_to_vcd(f, self._ddb, decoded_dump)
except:
logger.error("Failed to save trace as VCD", exc_info=True)
async def load_channels(self):
try:
filename = await get_open_file_name(
self,
"Open channel list",
self._current_dir,
"PYON files (*.pyon);;All files (*.*)")
except asyncio.CancelledError:
return
self._current_dir = os.path.dirname(filename)
try:
channel_list = pyon.load_file(filename)
self._waveform_model.import_list(channel_list)
self._waveform_model.update_all(self._waveform_data['data'])
except:
logger.error("Failed to open channel list", exc_info=True)
async def save_channels(self):
try:
filename = await get_save_file_name(
self,
"Save channel list",
self._current_dir,
"PYON files (*.pyon);;All files (*.*)")
except asyncio.CancelledError:
return
self._current_dir = os.path.dirname(filename)
try:
channel_list = self._waveform_model.export_list()
pyon.store_file(filename, channel_list)
except:
logger.error("Failed to save channel list", exc_info=True)
def _process_ddb(self):
channel_list = comm_analyzer.get_channel_list(self._ddb)
self._channel_model.clear()
self._channel_model.update(channel_list)
desc = self._ddb.get("core_analyzer")
if desc is not None:
addr = desc["host"]
port_proxy = desc.get("port_proxy", 1385)
port = desc.get("port", 1386)
self.proxy_client.update_address(addr, port, port_proxy)
else:
self.proxy_client.update_address(None, None, None)
def init_ddb(self, ddb):
self._ddb = ddb
self._process_ddb()
return ddb
def notify_ddb(self, mod):
self._process_ddb()
async def stop(self):
if self.proxy_client is not None:
await self.proxy_client.close()

View File

@ -127,7 +127,7 @@
"# let's connect to the master\n",
"\n",
"schedule, exps, datasets = [\n",
" Client(\"::1\", 3251, \"master_\" + i) for i in\n",
" Client(\"::1\", 3251, i) for i in\n",
" \"schedule experiment_db dataset_db\".split()]\n",
"\n",
"print(\"current schedule\")\n",

View File

@ -0,0 +1,34 @@
from artiq.experiment import *
class InteractiveDemo(EnvExperiment):
def build(self):
pass
def run(self):
print("Waiting for user input...")
with self.interactive(title="Interactive Demo") as interactive:
interactive.setattr_argument("pyon_value",
PYONValue(self.get_dataset("foo", default=42)))
interactive.setattr_argument("number", NumberValue(42e-6,
unit="us",
precision=4))
interactive.setattr_argument("integer", NumberValue(42,
step=1, precision=0))
interactive.setattr_argument("string", StringValue("Hello World"))
interactive.setattr_argument("scan", Scannable(global_max=400,
default=NoScan(325),
precision=6))
interactive.setattr_argument("boolean", BooleanValue(True), "Group")
interactive.setattr_argument("enum",
EnumerationValue(["foo", "bar", "quux"], "foo"),
"Group")
print("Done! Values:")
print(interactive.pyon_value)
print(interactive.boolean)
print(interactive.enum)
print(interactive.number, type(interactive.number))
print(interactive.integer, type(interactive.integer))
print(interactive.string)
for i in interactive.scan:
print(i)

View File

@ -500,7 +500,7 @@ pub extern fn main() -> i32 {
println!(r"|_| |_|_|____/ \___/ \____|");
println!("");
println!("MiSoC Bootloader");
println!("Copyright (c) 2017-2023 M-Labs Limited");
println!("Copyright (c) 2017-2024 M-Labs Limited");
println!("");
#[cfg(has_ethmac)]

View File

@ -453,15 +453,42 @@ extern fn dma_playback(timestamp: i64, ptr: i32, _uses_ddma: bool) {
}
}
#[cfg(not(kernel_has_rtio_dma))]
#[cfg(all(not(kernel_has_rtio_dma), not(has_rtio_dma)))]
#[unwind(allowed)]
extern fn dma_playback(_timestamp: i64, _ptr: i32, _uses_ddma: bool) {
unimplemented!("not(kernel_has_rtio_dma)")
}
// for satellite (has_rtio_dma but not in kernel)
#[cfg(all(not(kernel_has_rtio_dma), has_rtio_dma))]
#[unwind(allowed)]
extern fn subkernel_load_run(id: u32, run: bool) {
send(&SubkernelLoadRunRequest { id: id, run: run });
extern fn dma_playback(timestamp: i64, ptr: i32, _uses_ddma: bool) {
// DDMA is always used on satellites, so the `uses_ddma` setting is ignored
// StartRemoteRequest reused as "normal" start request
send(&DmaStartRemoteRequest { id: ptr as i32, timestamp: timestamp });
// skip awaitremoterequest - it's a given
recv!(&DmaAwaitRemoteReply { timeout, error, channel, timestamp } => {
if timeout {
raise!("DMAError",
"Error running DMA on satellite device, timed out waiting for results");
}
if error & 1 != 0 {
raise!("RTIOUnderflow",
"RTIO underflow at channel {rtio_channel_info:0}, {1} mu",
channel as i64, timestamp as i64, 0);
}
if error & 2 != 0 {
raise!("RTIODestinationUnreachable",
"RTIO destination unreachable, output, at channel {rtio_channel_info:0}, {1} mu",
channel as i64, timestamp as i64, 0);
}
});
}
#[unwind(allowed)]
extern fn subkernel_load_run(id: u32, destination: u8, run: bool) {
send(&SubkernelLoadRunRequest { id: id, destination: destination, run: run });
recv!(&SubkernelLoadRunReply { succeeded } => {
if !succeeded {
raise!("SubkernelError",
@ -471,7 +498,7 @@ extern fn subkernel_load_run(id: u32, run: bool) {
}
#[unwind(allowed)]
extern fn subkernel_await_finish(id: u32, timeout: u64) {
extern fn subkernel_await_finish(id: u32, timeout: i64) {
send(&SubkernelAwaitFinishRequest { id: id, timeout: timeout });
recv!(SubkernelAwaitFinishReply { status } => {
match status {
@ -489,9 +516,11 @@ extern fn subkernel_await_finish(id: u32, timeout: u64) {
}
#[unwind(aborts)]
extern fn subkernel_send_message(id: u32, count: u8, tag: &CSlice<u8>, data: *const *const ()) {
extern fn subkernel_send_message(id: u32, is_return: bool, destination: u8,
count: u8, tag: &CSlice<u8>, data: *const *const ()) {
send(&SubkernelMsgSend {
id: id,
destination: if is_return { None } else { Some(destination) },
count: count,
tag: tag.as_ref(),
data: data
@ -499,7 +528,7 @@ extern fn subkernel_send_message(id: u32, count: u8, tag: &CSlice<u8>, data: *co
}
#[unwind(allowed)]
extern fn subkernel_await_message(id: u32, timeout: u64, tags: &CSlice<u8>, min: u8, max: u8) -> u8 {
extern fn subkernel_await_message(id: i32, timeout: i64, tags: &CSlice<u8>, min: u8, max: u8) -> u8 {
send(&SubkernelMsgRecvRequest { id: id, timeout: timeout, tags: tags.as_ref() });
recv!(SubkernelMsgRecvReply { status, count } => {
match status {

View File

@ -18,7 +18,7 @@ impl<T> From<IoError<T>> for Error<T> {
// used by satellite -> master analyzer, subkernel exceptions
pub const SAT_PAYLOAD_MAX_SIZE: usize = /*max size*/512 - /*CRC*/4 - /*packet ID*/1 - /*last*/1 - /*length*/2;
// used by DDMA, subkernel program data (need to provide extra ID and destination)
pub const MASTER_PAYLOAD_MAX_SIZE: usize = SAT_PAYLOAD_MAX_SIZE - /*destination*/1 - /*ID*/4;
pub const MASTER_PAYLOAD_MAX_SIZE: usize = SAT_PAYLOAD_MAX_SIZE - /*source*/1 - /*destination*/1 - /*ID*/4;
#[derive(PartialEq, Clone, Copy, Debug)]
#[repr(u8)]
@ -77,6 +77,8 @@ pub enum Packet {
RoutingSetPath { destination: u8, hops: [u8; 32] },
RoutingSetRank { rank: u8 },
RoutingRetrievePackets,
RoutingNoPackets,
RoutingAck,
MonitorRequest { destination: u8, channel: u16, probe: u8 },
@ -106,22 +108,26 @@ pub enum Packet {
AnalyzerDataRequest { destination: u8 },
AnalyzerData { last: bool, length: u16, data: [u8; SAT_PAYLOAD_MAX_SIZE]},
DmaAddTraceRequest { destination: u8, id: u32, status: PayloadStatus, length: u16, trace: [u8; MASTER_PAYLOAD_MAX_SIZE] },
DmaAddTraceReply { succeeded: bool },
DmaRemoveTraceRequest { destination: u8, id: u32 },
DmaRemoveTraceReply { succeeded: bool },
DmaPlaybackRequest { destination: u8, id: u32, timestamp: u64 },
DmaPlaybackReply { succeeded: bool },
DmaPlaybackStatus { destination: u8, id: u32, error: u8, channel: u32, timestamp: u64 },
DmaAddTraceRequest {
source: u8, destination: u8,
id: u32, status: PayloadStatus,
length: u16, trace: [u8; MASTER_PAYLOAD_MAX_SIZE]
},
DmaAddTraceReply { source: u8, destination: u8, id: u32, succeeded: bool },
DmaRemoveTraceRequest { source: u8, destination: u8, id: u32 },
DmaRemoveTraceReply { destination: u8, succeeded: bool },
DmaPlaybackRequest { source: u8, destination: u8, id: u32, timestamp: u64 },
DmaPlaybackReply { destination: u8, succeeded: bool },
DmaPlaybackStatus { source: u8, destination: u8, id: u32, error: u8, channel: u32, timestamp: u64 },
SubkernelAddDataRequest { destination: u8, id: u32, status: PayloadStatus, length: u16, data: [u8; MASTER_PAYLOAD_MAX_SIZE] },
SubkernelAddDataReply { succeeded: bool },
SubkernelLoadRunRequest { destination: u8, id: u32, run: bool },
SubkernelLoadRunReply { succeeded: bool },
SubkernelFinished { id: u32, with_exception: bool },
SubkernelLoadRunRequest { source: u8, destination: u8, id: u32, run: bool },
SubkernelLoadRunReply { destination: u8, succeeded: bool },
SubkernelFinished { destination: u8, id: u32, with_exception: bool, exception_src: u8 },
SubkernelExceptionRequest { destination: u8 },
SubkernelException { last: bool, length: u16, data: [u8; SAT_PAYLOAD_MAX_SIZE] },
SubkernelMessage { destination: u8, id: u32, status: PayloadStatus, length: u16, data: [u8; MASTER_PAYLOAD_MAX_SIZE] },
SubkernelMessage { source: u8, destination: u8, id: u32, status: PayloadStatus, length: u16, data: [u8; MASTER_PAYLOAD_MAX_SIZE] },
SubkernelMessageAck { destination: u8 },
}
@ -164,6 +170,8 @@ impl Packet {
rank: reader.read_u8()?
},
0x32 => Packet::RoutingAck,
0x33 => Packet::RoutingRetrievePackets,
0x34 => Packet::RoutingNoPackets,
0x40 => Packet::MonitorRequest {
destination: reader.read_u8()?,
@ -278,6 +286,7 @@ impl Packet {
},
0xb0 => {
let source = reader.read_u8()?;
let destination = reader.read_u8()?;
let id = reader.read_u32()?;
let status = reader.read_u8()?;
@ -285,6 +294,7 @@ impl Packet {
let mut trace: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
reader.read_exact(&mut trace[0..length as usize])?;
Packet::DmaAddTraceRequest {
source: source,
destination: destination,
id: id,
status: PayloadStatus::from(status),
@ -293,24 +303,32 @@ impl Packet {
}
},
0xb1 => Packet::DmaAddTraceReply {
source: reader.read_u8()?,
destination: reader.read_u8()?,
id: reader.read_u32()?,
succeeded: reader.read_bool()?
},
0xb2 => Packet::DmaRemoveTraceRequest {
source: reader.read_u8()?,
destination: reader.read_u8()?,
id: reader.read_u32()?
},
0xb3 => Packet::DmaRemoveTraceReply {
destination: reader.read_u8()?,
succeeded: reader.read_bool()?
},
0xb4 => Packet::DmaPlaybackRequest {
source: reader.read_u8()?,
destination: reader.read_u8()?,
id: reader.read_u32()?,
timestamp: reader.read_u64()?
},
0xb5 => Packet::DmaPlaybackReply {
destination: reader.read_u8()?,
succeeded: reader.read_bool()?
},
0xb6 => Packet::DmaPlaybackStatus {
source: reader.read_u8()?,
destination: reader.read_u8()?,
id: reader.read_u32()?,
error: reader.read_u8()?,
@ -337,16 +355,20 @@ impl Packet {
succeeded: reader.read_bool()?
},
0xc4 => Packet::SubkernelLoadRunRequest {
source: reader.read_u8()?,
destination: reader.read_u8()?,
id: reader.read_u32()?,
run: reader.read_bool()?
},
0xc5 => Packet::SubkernelLoadRunReply {
destination: reader.read_u8()?,
succeeded: reader.read_bool()?
},
0xc8 => Packet::SubkernelFinished {
destination: reader.read_u8()?,
id: reader.read_u32()?,
with_exception: reader.read_bool()?,
exception_src: reader.read_u8()?
},
0xc9 => Packet::SubkernelExceptionRequest {
destination: reader.read_u8()?
@ -363,6 +385,7 @@ impl Packet {
}
},
0xcb => {
let source = reader.read_u8()?;
let destination = reader.read_u8()?;
let id = reader.read_u32()?;
let status = reader.read_u8()?;
@ -370,6 +393,7 @@ impl Packet {
let mut data: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
reader.read_exact(&mut data[0..length as usize])?;
Packet::SubkernelMessage {
source: source,
destination: destination,
id: id,
status: PayloadStatus::from(status),
@ -432,6 +456,10 @@ impl Packet {
},
Packet::RoutingAck =>
writer.write_u8(0x32)?,
Packet::RoutingRetrievePackets =>
writer.write_u8(0x33)?,
Packet::RoutingNoPackets =>
writer.write_u8(0x34)?,
Packet::MonitorRequest { destination, channel, probe } => {
writer.write_u8(0x40)?;
@ -561,8 +589,9 @@ impl Packet {
writer.write_all(&data[0..length as usize])?;
},
Packet::DmaAddTraceRequest { destination, id, status, trace, length } => {
Packet::DmaAddTraceRequest { source, destination, id, status, trace, length } => {
writer.write_u8(0xb0)?;
writer.write_u8(source)?;
writer.write_u8(destination)?;
writer.write_u32(id)?;
writer.write_u8(status as u8)?;
@ -571,31 +600,39 @@ impl Packet {
writer.write_u16(length)?;
writer.write_all(&trace[0..length as usize])?;
},
Packet::DmaAddTraceReply { succeeded } => {
Packet::DmaAddTraceReply { source, destination, id, succeeded } => {
writer.write_u8(0xb1)?;
writer.write_u8(source)?;
writer.write_u8(destination)?;
writer.write_u32(id)?;
writer.write_bool(succeeded)?;
},
Packet::DmaRemoveTraceRequest { destination, id } => {
Packet::DmaRemoveTraceRequest { source, destination, id } => {
writer.write_u8(0xb2)?;
writer.write_u8(source)?;
writer.write_u8(destination)?;
writer.write_u32(id)?;
},
Packet::DmaRemoveTraceReply { succeeded } => {
Packet::DmaRemoveTraceReply { destination, succeeded } => {
writer.write_u8(0xb3)?;
writer.write_u8(destination)?;
writer.write_bool(succeeded)?;
},
Packet::DmaPlaybackRequest { destination, id, timestamp } => {
Packet::DmaPlaybackRequest { source, destination, id, timestamp } => {
writer.write_u8(0xb4)?;
writer.write_u8(source)?;
writer.write_u8(destination)?;
writer.write_u32(id)?;
writer.write_u64(timestamp)?;
},
Packet::DmaPlaybackReply { succeeded } => {
Packet::DmaPlaybackReply { destination, succeeded } => {
writer.write_u8(0xb5)?;
writer.write_u8(destination)?;
writer.write_bool(succeeded)?;
},
Packet::DmaPlaybackStatus { destination, id, error, channel, timestamp } => {
Packet::DmaPlaybackStatus { source, destination, id, error, channel, timestamp } => {
writer.write_u8(0xb6)?;
writer.write_u8(source)?;
writer.write_u8(destination)?;
writer.write_u32(id)?;
writer.write_u8(error)?;
@ -615,20 +652,24 @@ impl Packet {
writer.write_u8(0xc1)?;
writer.write_bool(succeeded)?;
},
Packet::SubkernelLoadRunRequest { destination, id, run } => {
Packet::SubkernelLoadRunRequest { source, destination, id, run } => {
writer.write_u8(0xc4)?;
writer.write_u8(source)?;
writer.write_u8(destination)?;
writer.write_u32(id)?;
writer.write_bool(run)?;
},
Packet::SubkernelLoadRunReply { succeeded } => {
Packet::SubkernelLoadRunReply { destination, succeeded } => {
writer.write_u8(0xc5)?;
writer.write_u8(destination)?;
writer.write_bool(succeeded)?;
},
Packet::SubkernelFinished { id, with_exception } => {
Packet::SubkernelFinished { destination, id, with_exception, exception_src } => {
writer.write_u8(0xc8)?;
writer.write_u8(destination)?;
writer.write_u32(id)?;
writer.write_bool(with_exception)?;
writer.write_u8(exception_src)?;
},
Packet::SubkernelExceptionRequest { destination } => {
writer.write_u8(0xc9)?;
@ -640,8 +681,9 @@ impl Packet {
writer.write_u16(length)?;
writer.write_all(&data[0..length as usize])?;
},
Packet::SubkernelMessage { destination, id, status, data, length } => {
Packet::SubkernelMessage { source, destination, id, status, data, length } => {
writer.write_u8(0xcb)?;
writer.write_u8(source)?;
writer.write_u8(destination)?;
writer.write_u32(id)?;
writer.write_u8(status as u8)?;
@ -655,4 +697,36 @@ impl Packet {
}
Ok(())
}
pub fn routable_destination(&self) -> Option<u8> {
// only for packets that could be re-routed, not only forwarded
match self {
Packet::DmaAddTraceRequest { destination, .. } => Some(*destination),
Packet::DmaAddTraceReply { destination, .. } => Some(*destination),
Packet::DmaRemoveTraceRequest { destination, .. } => Some(*destination),
Packet::DmaRemoveTraceReply { destination, .. } => Some(*destination),
Packet::DmaPlaybackRequest { destination, .. } => Some(*destination),
Packet::DmaPlaybackReply { destination, .. } => Some(*destination),
Packet::SubkernelLoadRunRequest { destination, .. } => Some(*destination),
Packet::SubkernelLoadRunReply { destination, .. } => Some(*destination),
Packet::SubkernelMessage { destination, .. } => Some(*destination),
Packet::SubkernelMessageAck { destination, .. } => Some(*destination),
Packet::DmaPlaybackStatus { destination, .. } => Some(*destination),
Packet::SubkernelFinished { destination, .. } => Some(*destination),
_ => None
}
}
pub fn expects_response(&self) -> bool {
// returns true if the routable packet should elicit a response
// e.g. reply, ACK packets end a conversation,
// and firmware should not wait for response
match self {
Packet::DmaAddTraceReply { .. } | Packet::DmaRemoveTraceReply { .. } |
Packet::DmaPlaybackReply { .. } | Packet::SubkernelLoadRunReply { .. } |
Packet::SubkernelMessageAck { .. } | Packet::DmaPlaybackStatus { .. } |
Packet::SubkernelFinished { .. } => false,
_ => true
}
}
}

View File

@ -103,12 +103,12 @@ pub enum Message<'a> {
SpiReadReply { succeeded: bool, data: u32 },
SpiBasicReply { succeeded: bool },
SubkernelLoadRunRequest { id: u32, run: bool },
SubkernelLoadRunRequest { id: u32, destination: u8, run: bool },
SubkernelLoadRunReply { succeeded: bool },
SubkernelAwaitFinishRequest { id: u32, timeout: u64 },
SubkernelAwaitFinishRequest { id: u32, timeout: i64 },
SubkernelAwaitFinishReply { status: SubkernelStatus },
SubkernelMsgSend { id: u32, count: u8, tag: &'a [u8], data: *const *const () },
SubkernelMsgRecvRequest { id: u32, timeout: u64, tags: &'a [u8] },
SubkernelMsgSend { id: u32, destination: Option<u8>, count: u8, tag: &'a [u8], data: *const *const () },
SubkernelMsgRecvRequest { id: i32, timeout: i64, tags: &'a [u8] },
SubkernelMsgRecvReply { status: SubkernelStatus, count: u8 },
Log(fmt::Arguments<'a>),

View File

@ -103,7 +103,7 @@ pub mod subkernel {
pub enum FinishStatus {
Ok,
CommLost,
Exception
Exception(u8) // exception source
}
#[derive(Debug, PartialEq, Clone, Copy)]
@ -216,7 +216,7 @@ pub mod subkernel {
Ok(())
}
pub fn subkernel_finished(io: &Io, subkernel_mutex: &Mutex, id: u32, with_exception: bool) {
pub fn subkernel_finished(io: &Io, subkernel_mutex: &Mutex, id: u32, with_exception: bool, exception_src: u8) {
// called upon receiving DRTIO SubkernelRunDone
let _lock = subkernel_mutex.lock(io).unwrap();
let subkernel = unsafe { SUBKERNELS.get_mut(&id) };
@ -226,7 +226,7 @@ pub mod subkernel {
if subkernel.state == SubkernelState::Running {
subkernel.state = SubkernelState::Finished {
status: match with_exception {
true => FinishStatus::Exception,
true => FinishStatus::Exception(exception_src),
false => FinishStatus::Ok,
}
}
@ -266,9 +266,9 @@ pub mod subkernel {
Ok(SubkernelFinished {
id: id,
comm_lost: status == FinishStatus::CommLost,
exception: if status == FinishStatus::Exception {
exception: if let FinishStatus::Exception(dest) = status {
Some(drtio::subkernel_retrieve_exception(io, aux_mutex,
routing_table, subkernel.destination)?)
routing_table, dest)?)
} else { None }
})
},
@ -279,7 +279,7 @@ pub mod subkernel {
}
pub fn await_finish(io: &Io, aux_mutex: &Mutex, subkernel_mutex: &Mutex,
routing_table: &RoutingTable, id: u32, timeout: u64) -> Result<SubkernelFinished, Error> {
routing_table: &RoutingTable, id: u32, timeout: i64) -> Result<SubkernelFinished, Error> {
{
let _lock = subkernel_mutex.lock(io)?;
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
@ -291,7 +291,7 @@ pub mod subkernel {
}
let max_time = clock::get_ms() + timeout as u64;
let _res = io.until(|| {
if clock::get_ms() > max_time {
if timeout > 0 && clock::get_ms() > max_time {
return true;
}
if subkernel_mutex.test_lock() {
@ -305,7 +305,7 @@ pub mod subkernel {
_ => false
}
})?;
if clock::get_ms() > max_time {
if timeout > 0 && clock::get_ms() > max_time {
error!("Remote subkernel finish await timed out");
return Err(Error::Timeout);
}
@ -332,8 +332,9 @@ pub mod subkernel {
Err(_) => return,
};
let subkernel = unsafe { SUBKERNELS.get(&id) };
if subkernel.is_none() || subkernel.unwrap().state != SubkernelState::Running {
// do not add messages for non-existing, non-running or deleted subkernels
if subkernel.is_some() && subkernel.unwrap().state != SubkernelState::Running {
warn!("received a message for a non-running subkernel #{}", id);
// do not add messages for non-running or deleted subkernels
return
}
if status.is_first() {
@ -359,19 +360,26 @@ pub mod subkernel {
}
}
pub fn message_await(io: &Io, subkernel_mutex: &Mutex, id: u32, timeout: u64
pub fn message_await(io: &Io, subkernel_mutex: &Mutex, id: u32, timeout: i64
) -> Result<Message, Error> {
{
let is_subkernel = {
let _lock = subkernel_mutex.lock(io)?;
let is_subkernel = unsafe { SUBKERNELS.get(&id).is_some() };
if is_subkernel {
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
SubkernelState::Finished { .. } => return Err(Error::SubkernelFinished),
SubkernelState::Finished { status: FinishStatus::Ok } |
SubkernelState::Running => (),
SubkernelState::Finished {
status: FinishStatus::CommLost,
} => return Err(Error::SubkernelFinished),
_ => return Err(Error::IncorrectState)
}
}
is_subkernel
};
let max_time = clock::get_ms() + timeout as u64;
let message = io.until_ok(|| {
if clock::get_ms() > max_time {
if timeout > 0 && clock::get_ms() > max_time {
return Ok(None);
}
if subkernel_mutex.test_lock() {
@ -384,10 +392,13 @@ pub mod subkernel {
return Ok(Some(unsafe { MESSAGE_QUEUE.remove(i) }));
}
}
if is_subkernel {
match unsafe { SUBKERNELS.get(&id).unwrap().state } {
SubkernelState::Finished { .. } => return Ok(None),
SubkernelState::Finished { status: FinishStatus::CommLost } |
SubkernelState::Finished { status: FinishStatus::Exception(_) } => return Ok(None),
_ => ()
}
}
Err(())
});
match message {
@ -408,15 +419,17 @@ pub mod subkernel {
}
pub fn message_send<'a>(io: &Io, aux_mutex: &Mutex, subkernel_mutex: &Mutex,
routing_table: &RoutingTable, id: u32, count: u8, tag: &'a [u8], message: *const *const ()
routing_table: &RoutingTable, id: u32, destination: Option<u8>, count: u8, tag: &'a [u8], message: *const *const ()
) -> Result<(), Error> {
let mut writer = Cursor::new(Vec::new());
let _lock = subkernel_mutex.lock(io)?;
let destination = unsafe { SUBKERNELS.get(&id).unwrap().destination };
// reuse rpc code for sending arbitrary data
rpc::send_args(&mut writer, 0, tag, message, false)?;
// skip service tag, but overwrite first byte with tag count
let destination = destination.unwrap_or_else(|| {
let _lock = subkernel_mutex.lock(io).unwrap();
unsafe { SUBKERNELS.get(&id).unwrap().destination }
}
);
let data = &mut writer.into_inner()[3..];
data[0] = count;
Ok(drtio::subkernel_send_message(

View File

@ -167,10 +167,10 @@ pub mod remote_dma {
}
pub fn playback_done(io: &Io, ddma_mutex: &Mutex,
id: u32, destination: u8, error: u8, channel: u32, timestamp: u64) {
id: u32, source: u8, error: u8, channel: u32, timestamp: u64) {
// called upon receiving PlaybackDone aux packet
let _lock = ddma_mutex.lock(io).unwrap();
let mut trace = unsafe { TRACES.get_mut(&id).unwrap().get_mut(&destination).unwrap() };
let mut trace = unsafe { TRACES.get_mut(&id).unwrap().get_mut(&source).unwrap() };
trace.state = RemoteState::PlaybackEnded {
error: error,
channel: channel,

View File

@ -78,6 +78,16 @@ pub mod drtio {
}
}
fn link_has_async_ready(linkno: u8) -> bool {
let linkno = linkno as usize;
let async_ready;
unsafe {
async_ready = (csr::DRTIO[linkno].async_messages_ready_read)() == 1;
(csr::DRTIO[linkno].async_messages_ready_write)(1);
}
async_ready
}
fn recv_aux_timeout(io: &Io, linkno: u8, timeout: u32) -> Result<drtioaux::Packet, Error> {
let max_time = clock::get_ms() + timeout as u64;
loop {
@ -96,27 +106,62 @@ pub mod drtio {
}
}
fn process_async_packets(io: &Io, ddma_mutex: &Mutex, subkernel_mutex: &Mutex, linkno: u8,
packet: drtioaux::Packet) -> Option<drtioaux::Packet> {
// returns None if an async packet has been consumed
fn process_async_packets(io: &Io, aux_mutex: &Mutex, ddma_mutex: &Mutex, subkernel_mutex: &Mutex,
routing_table: &drtio_routing::RoutingTable, linkno: u8)
{
if link_has_async_ready(linkno) {
loop {
let reply = aux_transact(io, aux_mutex, linkno, &drtioaux::Packet::RoutingRetrievePackets);
if let Ok(packet) = reply {
match packet {
drtioaux::Packet::DmaPlaybackStatus { id, destination, error, channel, timestamp } => {
remote_dma::playback_done(io, ddma_mutex, id, destination, error, channel, timestamp);
None
// packets to be consumed locally
drtioaux::Packet::DmaPlaybackStatus { id, source, destination: 0, error, channel, timestamp } => {
remote_dma::playback_done(io, ddma_mutex, id, source, error, channel, timestamp);
},
drtioaux::Packet::SubkernelFinished { id, with_exception } => {
subkernel::subkernel_finished(io, subkernel_mutex, id, with_exception);
None
drtioaux::Packet::SubkernelFinished { id, destination: 0, with_exception, exception_src } => {
subkernel::subkernel_finished(io, subkernel_mutex, id, with_exception, exception_src);
},
drtioaux::Packet::SubkernelMessage { id, destination: from, status, length, data } => {
drtioaux::Packet::SubkernelMessage { id, source: from, destination: 0, status, length, data } => {
subkernel::message_handle_incoming(io, subkernel_mutex, id, status, length as usize, &data);
// acknowledge receiving part of the message
drtioaux::send(linkno,
&drtioaux::Packet::SubkernelMessageAck { destination: from }
).unwrap();
None
// give the satellite some time to process the message
io.sleep(10).unwrap();
},
// routable packets
drtioaux::Packet::DmaAddTraceRequest { destination, .. } |
drtioaux::Packet::DmaAddTraceReply { destination, .. } |
drtioaux::Packet::DmaRemoveTraceRequest { destination, .. } |
drtioaux::Packet::DmaRemoveTraceReply { destination, .. } |
drtioaux::Packet::DmaPlaybackRequest { destination, .. } |
drtioaux::Packet::DmaPlaybackReply { destination, .. } |
drtioaux::Packet::SubkernelLoadRunRequest { destination, .. } |
drtioaux::Packet::SubkernelLoadRunReply { destination, .. } |
drtioaux::Packet::SubkernelMessage { destination, .. } |
drtioaux::Packet::SubkernelMessageAck { destination, .. } |
drtioaux::Packet::DmaPlaybackStatus { destination, .. } |
drtioaux::Packet::SubkernelFinished { destination, .. } => {
let dest_link = routing_table.0[destination as usize][0] - 1;
if dest_link == linkno {
warn!("[LINK#{}] Re-routed packet would return to the same link, dropping: {:?}", linkno, packet);
} else if destination == 0 {
warn!("[LINK#{}] Received invalid routable packet: {:?}", linkno, packet)
} else {
drtioaux::send(dest_link, &packet).unwrap();
}
}
drtioaux::Packet::RoutingNoPackets => break,
other => warn!("[LINK#{}] Received an unroutable packet: {:?}", linkno, other)
}
} else {
warn!("[LINK#{}] Error handling async packets ({})", linkno, reply.unwrap_err());
return;
}
}
other => Some(other)
}
}
@ -223,14 +268,10 @@ pub mod drtio {
}
}
fn process_unsolicited_aux(io: &Io, aux_mutex: &Mutex, ddma_mutex: &Mutex, subkernel_mutex: &Mutex, linkno: u8) {
fn process_unsolicited_aux(io: &Io, aux_mutex: &Mutex, linkno: u8) {
let _lock = aux_mutex.lock(io).unwrap();
match drtioaux::recv(linkno) {
Ok(Some(packet)) => {
if let Some(packet) = process_async_packets(io, ddma_mutex, subkernel_mutex, linkno, packet) {
warn!("[LINK#{}] unsolicited aux packet: {:?}", linkno, packet);
}
}
Ok(Some(packet)) => warn!("[LINK#{}] unsolicited aux packet: {:?}", linkno, packet),
Ok(None) => (),
Err(_) => warn!("[LINK#{}] aux packet error", linkno)
}
@ -293,45 +334,36 @@ pub mod drtio {
let linkno = hop - 1;
if destination_up(up_destinations, destination) {
if up_links[linkno as usize] {
loop {
let reply = aux_transact(io, aux_mutex, linkno,
&drtioaux::Packet::DestinationStatusRequest {
destination: destination
});
if let Ok(reply) = reply {
let reply = process_async_packets(io, ddma_mutex, subkernel_mutex, linkno, reply);
match reply {
Some(drtioaux::Packet::DestinationDownReply) => {
drtioaux::Packet::DestinationDownReply => {
destination_set_up(routing_table, up_destinations, destination, false);
remote_dma::destination_changed(io, aux_mutex, ddma_mutex, routing_table, destination, false);
subkernel::destination_changed(io, aux_mutex, subkernel_mutex, routing_table, destination, false);
}
Some(drtioaux::Packet::DestinationOkReply) => (),
Some(drtioaux::Packet::DestinationSequenceErrorReply { channel }) => {
drtioaux::Packet::DestinationOkReply => (),
drtioaux::Packet::DestinationSequenceErrorReply { channel } => {
error!("[DEST#{}] RTIO sequence error involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_SEQUENCE_ERROR };
}
Some(drtioaux::Packet::DestinationCollisionReply { channel }) => {
drtioaux::Packet::DestinationCollisionReply { channel } => {
error!("[DEST#{}] RTIO collision involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_COLLISION };
}
Some(drtioaux::Packet::DestinationBusyReply { channel }) => {
drtioaux::Packet::DestinationBusyReply { channel } => {
error!("[DEST#{}] RTIO busy error involving channel 0x{:04x}:{}", destination, channel, resolve_channel_name(channel as u32));
unsafe { SEEN_ASYNC_ERRORS |= ASYNC_ERROR_BUSY };
}
Some(packet) => error!("[DEST#{}] received unexpected aux packet: {:?}", destination, packet),
None => {
// continue asking until we get Destination...Reply or error out
// wait a bit not to overwhelm the receiver causing gateway errors
io.sleep(10).unwrap();
continue;
}
packet => error!("[DEST#{}] received unexpected aux packet: {:?}", destination, packet),
}
} else {
error!("[DEST#{}] communication failed ({:?})", destination, reply.unwrap_err());
}
break;
}
} else {
destination_set_up(routing_table, up_destinations, destination, false);
remote_dma::destination_changed(io, aux_mutex, ddma_mutex, routing_table, destination, false);
@ -371,7 +403,8 @@ pub mod drtio {
if up_links[linkno as usize] {
/* link was previously up */
if link_rx_up(linkno) {
process_unsolicited_aux(&io, aux_mutex, ddma_mutex, subkernel_mutex, linkno);
process_async_packets(&io, aux_mutex, ddma_mutex, subkernel_mutex, routing_table, linkno);
process_unsolicited_aux(&io, aux_mutex, linkno);
process_local_errors(linkno);
} else {
info!("[LINK#{}] link is down", linkno);
@ -456,10 +489,10 @@ pub mod drtio {
partition_data(trace, |slice, status, len: usize| {
let reply = aux_transact(io, aux_mutex, linkno,
&drtioaux::Packet::DmaAddTraceRequest {
id: id, destination: destination, status: status, length: len as u16, trace: *slice})?;
id: id, source: 0, destination: destination, status: status, length: len as u16, trace: *slice})?;
match reply {
drtioaux::Packet::DmaAddTraceReply { succeeded: true } => Ok(()),
drtioaux::Packet::DmaAddTraceReply { succeeded: false } => Err(Error::DmaAddTraceFail(destination)),
drtioaux::Packet::DmaAddTraceReply { destination: 0, succeeded: true, .. } => Ok(()),
drtioaux::Packet::DmaAddTraceReply { destination: 0, succeeded: false, .. } => Err(Error::DmaAddTraceFail(destination)),
packet => Err(Error::UnexpectedPacket(packet)),
}
})
@ -469,10 +502,10 @@ pub mod drtio {
id: u32, destination: u8) -> Result<(), Error> {
let linkno = routing_table.0[destination as usize][0] - 1;
let reply = aux_transact(io, aux_mutex, linkno,
&drtioaux::Packet::DmaRemoveTraceRequest { id: id, destination: destination })?;
&drtioaux::Packet::DmaRemoveTraceRequest { id: id, source: 0, destination: destination })?;
match reply {
drtioaux::Packet::DmaRemoveTraceReply { succeeded: true } => Ok(()),
drtioaux::Packet::DmaRemoveTraceReply { succeeded: false } => Err(Error::DmaEraseFail(destination)),
drtioaux::Packet::DmaRemoveTraceReply { destination: 0, succeeded: true } => Ok(()),
drtioaux::Packet::DmaRemoveTraceReply { destination: 0, succeeded: false } => Err(Error::DmaEraseFail(destination)),
packet => Err(Error::UnexpectedPacket(packet)),
}
}
@ -481,10 +514,10 @@ pub mod drtio {
id: u32, destination: u8, timestamp: u64) -> Result<(), Error> {
let linkno = routing_table.0[destination as usize][0] - 1;
let reply = aux_transact(io, aux_mutex, linkno,
&drtioaux::Packet::DmaPlaybackRequest{ id: id, destination: destination, timestamp: timestamp })?;
&drtioaux::Packet::DmaPlaybackRequest{ id: id, source: 0, destination: destination, timestamp: timestamp })?;
match reply {
drtioaux::Packet::DmaPlaybackReply { succeeded: true } => Ok(()),
drtioaux::Packet::DmaPlaybackReply { succeeded: false } =>
drtioaux::Packet::DmaPlaybackReply { destination: 0, succeeded: true } => Ok(()),
drtioaux::Packet::DmaPlaybackReply { destination: 0, succeeded: false } =>
Err(Error::DmaPlaybackFail(destination)),
packet => Err(Error::UnexpectedPacket(packet)),
}
@ -559,10 +592,10 @@ pub mod drtio {
id: u32, destination: u8, run: bool) -> Result<(), Error> {
let linkno = routing_table.0[destination as usize][0] - 1;
let reply = aux_transact(io, aux_mutex, linkno,
&drtioaux::Packet::SubkernelLoadRunRequest{ id: id, destination: destination, run: run })?;
&drtioaux::Packet::SubkernelLoadRunRequest{ id: id, source: 0, destination: destination, run: run })?;
match reply {
drtioaux::Packet::SubkernelLoadRunReply { succeeded: true } => Ok(()),
drtioaux::Packet::SubkernelLoadRunReply { succeeded: false } =>
drtioaux::Packet::SubkernelLoadRunReply { destination: 0, succeeded: true } => Ok(()),
drtioaux::Packet::SubkernelLoadRunReply { destination: 0, succeeded: false } =>
Err(Error::SubkernelRunFail(destination)),
packet => Err(Error::UnexpectedPacket(packet)),
}
@ -595,7 +628,8 @@ pub mod drtio {
partition_data(message, |slice, status, len: usize| {
let reply = aux_transact(io, aux_mutex, linkno,
&drtioaux::Packet::SubkernelMessage {
destination: destination, id: id, status: status, length: len as u16, data: *slice})?;
source: 0, destination: destination,
id: id, status: status, length: len as u16, data: *slice})?;
match reply {
drtioaux::Packet::SubkernelMessageAck { .. } => Ok(()),
packet => Err(Error::UnexpectedPacket(packet)),

View File

@ -471,6 +471,7 @@ fn process_host_message(io: &Io, _aux_mutex: &Mutex, _ddma_mutex: &Mutex, _subke
match subkernel::upload(io, _aux_mutex, _subkernel_mutex, _routing_table, _id) {
Ok(_) => host_write(stream, host::Reply::LoadCompleted)?,
Err(error) => {
subkernel::clear_subkernels(io, _subkernel_mutex)?;
let mut description = String::new();
write!(&mut description, "{}", error).unwrap();
host_write(stream, host::Reply::LoadFailed(&description))?
@ -631,6 +632,8 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex,
unsafe { kernel::stop() }
session.kernel_state = KernelState::Absent;
unsafe { session.congress.cache.unborrow() }
#[cfg(has_drtio)]
subkernel::clear_subkernels(io, _subkernel_mutex)?;
match stream {
None => return Ok(true),
@ -648,6 +651,8 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex,
unsafe { kernel::stop() }
session.kernel_state = KernelState::Absent;
unsafe { session.congress.cache.unborrow() }
#[cfg(has_drtio)]
subkernel::clear_subkernels(io, _subkernel_mutex)?;
match stream {
None => {
@ -668,7 +673,7 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex,
}
}
#[cfg(has_drtio)]
&kern::SubkernelLoadRunRequest { id, run } => {
&kern::SubkernelLoadRunRequest { id, destination: _, run } => {
let succeeded = match subkernel::load(
io, aux_mutex, _subkernel_mutex, routing_table, id, run) {
Ok(()) => true,
@ -699,20 +704,20 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex,
kern_send(io, &kern::SubkernelAwaitFinishReply { status: status })
}
#[cfg(has_drtio)]
&kern::SubkernelMsgSend { id, count, tag, data } => {
subkernel::message_send(io, aux_mutex, _subkernel_mutex, routing_table, id, count, tag, data)?;
&kern::SubkernelMsgSend { id, destination, count, tag, data } => {
subkernel::message_send(io, aux_mutex, _subkernel_mutex, routing_table, id, destination, count, tag, data)?;
kern_acknowledge()
}
#[cfg(has_drtio)]
&kern::SubkernelMsgRecvRequest { id, timeout, tags } => {
let message_received = subkernel::message_await(io, _subkernel_mutex, id, timeout);
let message_received = subkernel::message_await(io, _subkernel_mutex, id as u32, timeout);
let (status, count) = match message_received {
Ok(ref message) => (kern::SubkernelStatus::NoError, message.count),
Err(SubkernelError::Timeout) => (kern::SubkernelStatus::Timeout, 0),
Err(SubkernelError::IncorrectState) => (kern::SubkernelStatus::IncorrectState, 0),
Err(SubkernelError::SubkernelFinished) => {
let res = subkernel::retrieve_finish_status(io, aux_mutex, _subkernel_mutex,
routing_table, id)?;
routing_table, id as u32)?;
if res.comm_lost {
(kern::SubkernelStatus::CommLost, 0)
} else if let Some(exception) = &res.exception {
@ -971,7 +976,13 @@ pub fn thread(io: Io, aux_mutex: &Mutex,
drtio::clear_buffers(&io, &aux_mutex);
}
}
stream.close().expect("session: close socket");
loop {
match stream.close() {
Ok(_) => break,
Err(SchedError::Interrupted) => (),
Err(e) => panic!("session: close socket: {:?}", e)
};
}
});
}

View File

@ -1,41 +1,190 @@
use alloc::{vec::Vec, collections::btree_map::BTreeMap, string::String};
use core::mem;
use board_artiq::{drtioaux, drtio_routing::RoutingTable};
use board_misoc::{csr, cache::flush_l2_cache};
use proto_artiq::drtioaux_proto::PayloadStatus;
use alloc::{vec::Vec, collections::btree_map::BTreeMap};
use ::{cricon_select, RtioMaster};
use routing::{Router, Sliceable};
use kernel::Manager as KernelManager;
use ::{cricon_select, cricon_read, RtioMaster, MASTER_PAYLOAD_MAX_SIZE};
const ALIGNMENT: usize = 64;
#[derive(Debug, PartialEq)]
#[derive(PartialEq)]
enum ManagerState {
Idle,
Playback
}
pub struct RtioStatus {
pub source: u8,
pub id: u32,
pub error: u8,
pub channel: u32,
pub timestamp: u64
}
#[derive(Debug)]
pub enum Error {
IdNotFound,
PlaybackInProgress,
EntryNotComplete
EntryNotComplete,
MasterDmaFound,
UploadFail,
}
#[derive(Debug)]
struct Entry {
trace: Vec<u8>,
padding_len: usize,
complete: bool
complete: bool,
duration: u64, // relevant for locally ran DMA
}
impl Entry {
pub fn from_vec(data: Vec<u8>, duration: u64) -> Entry {
let mut entry = Entry {
trace: data,
padding_len: 0,
complete: true,
duration: duration,
};
entry.realign();
entry
}
pub fn id(&self) -> u32 {
self.trace[self.padding_len..].as_ptr() as u32
}
pub fn realign(&mut self) {
self.trace.push(0);
let data_len = self.trace.len();
self.trace.reserve(ALIGNMENT - 1);
let padding = ALIGNMENT - self.trace.as_ptr() as usize % ALIGNMENT;
let padding = if padding == ALIGNMENT { 0 } else { padding };
for _ in 0..padding {
// Vec guarantees that this will not reallocate
self.trace.push(0)
}
for i in 1..data_len + 1 {
self.trace[data_len + padding - i] = self.trace[data_len - i]
}
self.complete = true;
self.padding_len = padding;
}
}
enum RemoteTraceState {
Unsent,
Sending(usize),
Ready,
Running(usize),
}
struct RemoteTraces {
remote_traces: BTreeMap<u8, Sliceable>,
state: RemoteTraceState,
}
impl RemoteTraces {
pub fn new(traces: BTreeMap<u8, Sliceable>) -> RemoteTraces {
RemoteTraces {
remote_traces: traces,
state: RemoteTraceState::Unsent
}
}
// on subkernel request
pub fn upload_traces(&mut self, id: u32, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) -> usize {
let len = self.remote_traces.len();
if len > 0 {
self.state = RemoteTraceState::Sending(self.remote_traces.len());
for (dest, trace) in self.remote_traces.iter_mut() {
// queue up the first packet for all destinations, rest will be sent after first ACK
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
let meta = trace.get_slice_master(&mut data_slice);
router.route(drtioaux::Packet::DmaAddTraceRequest {
source: self_destination, destination: *dest, id: id,
status: meta.status, length: meta.len, trace: data_slice
}, routing_table, rank, self_destination);
}
}
len
}
// on incoming Packet::DmaAddTraceReply
pub fn ack_upload(&mut self, kernel_manager: &mut KernelManager, source: u8, id: u32, succeeded: bool, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
if let RemoteTraceState::Sending(count) = self.state {
if let Some(trace) = self.remote_traces.get_mut(&source) {
if trace.at_end() {
if count - 1 == 0 {
self.state = RemoteTraceState::Ready;
kernel_manager.ddma_remote_uploaded(succeeded);
} else {
self.state = RemoteTraceState::Sending(count - 1);
}
} else {
// send next slice
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
let meta = trace.get_slice_master(&mut data_slice);
router.route(drtioaux::Packet::DmaAddTraceRequest {
source: self_destination, destination: meta.destination, id: id,
status: meta.status, length: meta.len, trace: data_slice
}, routing_table, rank, self_destination);
}
}
}
}
// on subkernel request
pub fn playback(&mut self, id: u32, timestamp: u64, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
// route all the playback requests
// remote traces + local trace
self.state = RemoteTraceState::Running(self.remote_traces.len() + 1);
for (dest, _) in self.remote_traces.iter() {
router.route(drtioaux::Packet::DmaPlaybackRequest {
source: self_destination, destination: *dest, id: id, timestamp: timestamp
}, routing_table, rank, self_destination);
// response will be ignored (succeeded = false handled by the main thread)
}
}
// on incoming Packet::DmaPlaybackDone
pub fn remote_finished(&mut self, kernel_manager: &mut KernelManager, error: u8, channel: u32, timestamp: u64) {
if let RemoteTraceState::Running(count) = self.state {
if error != 0 || count - 1 == 0 {
// notify the kernel about a DDMA error or finish
kernel_manager.ddma_finished(error, channel, timestamp);
self.state = RemoteTraceState::Ready;
// further messages will be ignored (if there was an error)
} else { // no error and not the last one awaited
self.state = RemoteTraceState::Running(count - 1);
}
}
}
pub fn erase(&mut self, id: u32, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
for (dest, _) in self.remote_traces.iter() {
router.route(drtioaux::Packet::DmaRemoveTraceRequest {
source: self_destination, destination: *dest, id: id
}, routing_table, rank, self_destination);
// response will be ignored as this object will stop existing too
}
}
}
#[derive(Debug)]
pub struct Manager {
entries: BTreeMap<u32, Entry>,
entries: BTreeMap<(u8, u32), Entry>,
state: ManagerState,
currentid: u32
current_id: u32,
current_source: u8,
previous_cri_master: RtioMaster,
remote_entries: BTreeMap<u32, RemoteTraces>,
name_map: BTreeMap<String, u32>,
recording_trace: Vec<u8>,
recording_name: String
}
impl Manager {
@ -47,74 +196,197 @@ impl Manager {
}
Manager {
entries: BTreeMap::new(),
currentid: 0,
current_id: 0,
current_source: 0,
previous_cri_master: RtioMaster::Drtio,
state: ManagerState::Idle,
remote_entries: BTreeMap::new(),
name_map: BTreeMap::new(),
recording_trace: Vec::new(),
recording_name: String::new(),
}
}
pub fn add(&mut self, id: u32, status: PayloadStatus, trace: &[u8], trace_len: usize) -> Result<(), Error> {
pub fn add(&mut self, source: u8, id: u32, status: PayloadStatus, trace: &[u8], trace_len: usize) -> Result<(), Error> {
if status.is_first() {
self.entries.remove(&id);
self.entries.remove(&(source, id));
}
let entry = match self.entries.get_mut(&id) {
let entry = match self.entries.get_mut(&(source, id)) {
Some(entry) => {
if entry.complete {
// replace entry
self.entries.remove(&id);
self.entries.insert(id, Entry {
self.entries.remove(&(source, id));
self.entries.insert((source, id), Entry {
trace: Vec::new(),
padding_len: 0,
complete: false });
self.entries.get_mut(&id).unwrap()
complete: false,
duration: 0
});
self.entries.get_mut(&(source, id)).unwrap()
} else {
entry
}
},
None => {
self.entries.insert(id, Entry {
self.entries.insert((source, id), Entry {
trace: Vec::new(),
padding_len: 0,
complete: false });
self.entries.get_mut(&id).unwrap()
complete: false,
duration: 0,
});
self.entries.get_mut(&(source, id)).unwrap()
},
};
entry.trace.extend(&trace[0..trace_len]);
if status.is_last() {
entry.trace.push(0);
let data_len = entry.trace.len();
// Realign.
entry.trace.reserve(ALIGNMENT - 1);
let padding = ALIGNMENT - entry.trace.as_ptr() as usize % ALIGNMENT;
let padding = if padding == ALIGNMENT { 0 } else { padding };
for _ in 0..padding {
// Vec guarantees that this will not reallocate
entry.trace.push(0)
}
for i in 1..data_len + 1 {
entry.trace[data_len + padding - i] = entry.trace[data_len - i]
}
entry.complete = true;
entry.padding_len = padding;
entry.realign();
flush_l2_cache();
}
Ok(())
}
// API for subkernel
pub fn record_start(&mut self, name: &str) {
self.recording_name = String::from(name);
self.recording_trace = Vec::new();
}
pub fn erase(&mut self, id: u32) -> Result<(), Error> {
match self.entries.remove(&id) {
// API for subkernel
pub fn record_append(&mut self, data: &[u8]) {
self.recording_trace.extend_from_slice(data);
}
// API for subkernel
pub fn record_stop(&mut self, duration: u64, self_destination: u8) -> Result<u32, Error> {
let mut trace = Vec::new();
mem::swap(&mut self.recording_trace, &mut trace);
trace.push(0);
let mut local_trace = Vec::new();
let mut remote_traces: BTreeMap<u8, Sliceable> = BTreeMap::new();
// analyze each entry and put in proper buckets, as the kernel core
// sends whole chunks, to limit comms/kernel CPU communication,
// and as only comms core has access to varios DMA buffers.
let mut ptr = 0;
while trace[ptr] != 0 {
// ptr + 3 = tgt >> 24 (destination)
let len = trace[ptr] as usize;
let destination = trace[ptr+3];
if destination == 0 {
return Err(Error::MasterDmaFound);
} else if destination == self_destination {
local_trace.extend(&trace[ptr..ptr+len]);
}
else {
if let Some(remote_trace) = remote_traces.get_mut(&destination) {
remote_trace.extend(&trace[ptr..ptr+len]);
} else {
remote_traces.insert(destination, Sliceable::new(destination, trace[ptr..ptr+len].to_vec()));
}
}
// and jump to the next event
ptr += len;
}
let local_entry = Entry::from_vec(local_trace, duration);
let id = local_entry.id();
self.entries.insert((self_destination, id), local_entry);
self.remote_entries.insert(id, RemoteTraces::new(remote_traces));
let mut name = String::new();
mem::swap(&mut self.recording_name, &mut name);
self.name_map.insert(name, id);
flush_l2_cache();
Ok(id)
}
pub fn upload_traces(&mut self, id: u32, router: &mut Router, rank: u8, self_destination: u8,
routing_table: &RoutingTable) -> Result<usize, Error> {
let remote_traces = self.remote_entries.get_mut(&id);
let mut len = 0;
if let Some(traces) = remote_traces {
len = traces.upload_traces(id, router, rank, self_destination, routing_table);
}
Ok(len)
}
pub fn with_trace<F, R>(&self, self_destination: u8, name: &str, f: F) -> R
where F: FnOnce(Option<&[u8]>, u64) -> R {
if let Some(ptr) = self.name_map.get(name) {
match self.entries.get(&(self_destination, *ptr)) {
Some(entry) => f(Some(&entry.trace[entry.padding_len..]), entry.duration),
None => f(None, 0)
}
} else {
f(None, 0)
}
}
// API for subkernel
pub fn playback_remote(&mut self, id: u32, timestamp: u64,
router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable
) -> Result<(), Error> {
if let Some(traces) = self.remote_entries.get_mut(&id) {
traces.playback(id, timestamp, router, rank, self_destination, routing_table);
Ok(())
} else {
Err(Error::IdNotFound)
}
}
// API for subkernel
pub fn erase_name(&mut self, name: &str, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
if let Some(id) = self.name_map.get(name) {
if let Some(traces) = self.remote_entries.get_mut(&id) {
traces.erase(*id, router, rank, self_destination, routing_table);
self.remote_entries.remove(&id);
}
self.entries.remove(&(self_destination, *id));
self.name_map.remove(name);
}
}
// API for incoming DDMA (drtio)
pub fn erase(&mut self, source: u8, id: u32) -> Result<(), Error> {
match self.entries.remove(&(source, id)) {
Some(_) => Ok(()),
None => Err(Error::IdNotFound)
}
}
pub fn playback(&mut self, id: u32, timestamp: u64) -> Result<(), Error> {
pub fn remote_finished(&mut self, kernel_manager: &mut KernelManager,
id: u32, error: u8, channel: u32, timestamp: u64) {
if let Some(entry) = self.remote_entries.get_mut(&id) {
entry.remote_finished(kernel_manager, error, channel, timestamp);
}
}
pub fn ack_upload(&mut self, kernel_manager: &mut KernelManager, source: u8, id: u32, succeeded: bool,
router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
if let Some(entry) = self.remote_entries.get_mut(&id) {
entry.ack_upload(kernel_manager, source, id, succeeded, router, rank, self_destination, routing_table);
}
}
pub fn cleanup(&mut self, router: &mut Router, rank: u8, self_destination: u8, routing_table: &RoutingTable) {
// after subkernel ends, remove all self-generated traces
for (_, id) in self.name_map.iter_mut() {
if let Some(traces) = self.remote_entries.get_mut(&id) {
traces.erase(*id, router, rank, self_destination, routing_table);
self.remote_entries.remove(&id);
}
self.entries.remove(&(self_destination, *id));
}
self.name_map.clear();
}
// API for both incoming DDMA (drtio) and subkernel
pub fn playback(&mut self, source: u8, id: u32, timestamp: u64) -> Result<(), Error> {
if self.state != ManagerState::Idle {
return Err(Error::PlaybackInProgress);
}
let entry = match self.entries.get(&id){
let entry = match self.entries.get(&(source, id)){
Some(entry) => entry,
None => { return Err(Error::IdNotFound); }
};
@ -125,7 +397,9 @@ impl Manager {
assert!(ptr as u32 % 64 == 0);
self.state = ManagerState::Playback;
self.currentid = id;
self.current_id = id;
self.current_source = source;
self.previous_cri_master = cricon_read();
unsafe {
csr::rtio_dma::base_address_write(ptr as u64);
@ -149,7 +423,7 @@ impl Manager {
} else {
self.state = ManagerState::Idle;
unsafe {
cricon_select(RtioMaster::Drtio);
cricon_select(self.previous_cri_master);
let error = csr::rtio_dma::error_read();
let channel = csr::rtio_dma::error_channel_read();
let timestamp = csr::rtio_dma::error_timestamp_read();
@ -157,7 +431,8 @@ impl Manager {
csr::rtio_dma::error_write(1);
}
return Some(RtioStatus {
id: self.currentid,
source: self.current_source,
id: self.current_id,
error: error,
channel: channel,
timestamp: timestamp });

View File

@ -1,8 +1,8 @@
use core::{mem, option::NoneError, cmp::min};
use alloc::{string::String, format, vec::Vec, collections::{btree_map::BTreeMap, vec_deque::VecDeque}};
use core::{mem, option::NoneError};
use alloc::{string::String, format, vec::Vec, collections::btree_map::BTreeMap};
use cslice::AsCSlice;
use board_artiq::{mailbox, spi};
use board_artiq::{drtioaux, drtio_routing::RoutingTable, mailbox, spi};
use board_misoc::{csr, clock, i2c};
use proto_artiq::{
drtioaux_proto::PayloadStatus,
@ -15,6 +15,8 @@ use kernel::eh_artiq::StackPointerBacktrace;
use ::{cricon_select, RtioMaster};
use cache::Cache;
use dma::{Manager as DmaManager, Error as DmaError};
use routing::{Router, Sliceable, SliceMeta};
use SAT_PAYLOAD_MAX_SIZE;
use MASTER_PAYLOAD_MAX_SIZE;
@ -61,8 +63,12 @@ enum KernelState {
Absent,
Loaded,
Running,
MsgAwait { max_time: u64, tags: Vec<u8> },
MsgSending
MsgAwait { id: u32, max_time: i64, tags: Vec<u8> },
MsgSending,
SubkernelAwaitLoad,
SubkernelAwaitFinish { max_time: i64, id: u32 },
DmaUploading { max_time: u64 },
DmaAwait { max_time: u64 },
}
#[derive(Debug)]
@ -74,7 +80,9 @@ pub enum Error {
NoMessage,
AwaitingMessage,
SubkernelIoError,
KernelException(Sliceable)
DrtioError,
KernelException(Sliceable),
DmaError(DmaError),
}
impl From<NoneError> for Error {
@ -89,19 +97,25 @@ impl From<io::Error<!>> for Error {
}
}
impl From<drtioaux::Error<!>> for Error {
fn from(_value: drtioaux::Error<!>) -> Error {
Error::DrtioError
}
}
impl From<DmaError> for Error {
fn from(value: DmaError) -> Error {
Error::DmaError(value)
}
}
macro_rules! unexpected {
($($arg:tt)*) => (return Err(Error::Unexpected(format!($($arg)*))));
}
/* represents data that has to be sent to Master */
#[derive(Debug)]
pub struct Sliceable {
it: usize,
data: Vec<u8>
}
/* represents interkernel messages */
struct Message {
id: u32,
count: u8,
data: Vec<u8>
}
@ -109,7 +123,6 @@ struct Message {
#[derive(PartialEq)]
enum OutMessageState {
NoMessage,
MessageReady,
MessageBeingSent,
MessageSent,
MessageAcknowledged
@ -119,7 +132,7 @@ enum OutMessageState {
struct MessageManager {
out_message: Option<Sliceable>,
out_state: OutMessageState,
in_queue: VecDeque<Message>,
in_queue: Vec<Message>,
in_buffer: Option<Message>,
}
@ -128,7 +141,9 @@ struct Session {
kernel_state: KernelState,
log_buffer: String,
last_exception: Option<Sliceable>,
messages: MessageManager
source: u8, // which destination requested running the kernel
messages: MessageManager,
subkernels_finished: Vec<u32> // ids of subkernels finished
}
#[derive(Debug)]
@ -147,42 +162,9 @@ pub struct Manager {
pub struct SubkernelFinished {
pub id: u32,
pub with_exception: bool
}
pub struct SliceMeta {
pub len: u16,
pub status: PayloadStatus
}
macro_rules! get_slice_fn {
( $name:tt, $size:expr ) => {
pub fn $name(&mut self, data_slice: &mut [u8; $size]) -> SliceMeta {
let first = self.it == 0;
let len = min($size, self.data.len() - self.it);
let last = self.it + len == self.data.len();
let status = PayloadStatus::from_status(first, last);
data_slice[..len].clone_from_slice(&self.data[self.it..self.it+len]);
self.it += len;
SliceMeta {
len: len as u16,
status: status
}
}
};
}
impl Sliceable {
pub fn new(data: Vec<u8>) -> Sliceable {
Sliceable {
it: 0,
data: data
}
}
get_slice_fn!(get_slice_sat, SAT_PAYLOAD_MAX_SIZE);
get_slice_fn!(get_slice_master, MASTER_PAYLOAD_MAX_SIZE);
pub with_exception: bool,
pub exception_source: u8,
pub source: u8
}
impl MessageManager {
@ -190,12 +172,12 @@ impl MessageManager {
MessageManager {
out_message: None,
out_state: OutMessageState::NoMessage,
in_queue: VecDeque::new(),
in_queue: Vec::new(),
in_buffer: None
}
}
pub fn handle_incoming(&mut self, status: PayloadStatus, length: usize, data: &[u8; MASTER_PAYLOAD_MAX_SIZE]) {
pub fn handle_incoming(&mut self, status: PayloadStatus, length: usize, id: u32, data: &[u8; MASTER_PAYLOAD_MAX_SIZE]) {
// called when receiving a message from master
if status.is_first() {
// clear the buffer for first message
@ -205,6 +187,7 @@ impl MessageManager {
Some(message) => message.data.extend(&data[..length]),
None => {
self.in_buffer = Some(Message {
id: id,
count: data[0],
data: data[1..length].to_vec()
});
@ -212,18 +195,7 @@ impl MessageManager {
};
if status.is_last() {
// when done, remove from working queue
self.in_queue.push_back(self.in_buffer.take().unwrap());
}
}
pub fn is_outgoing_ready(&mut self) -> bool {
// called by main loop, to see if there's anything to send, will send it afterwards
match self.out_state {
OutMessageState::MessageReady => {
self.out_state = OutMessageState::MessageBeingSent;
true
},
_ => false
self.in_queue.push(self.in_buffer.take().unwrap());
}
}
@ -266,19 +238,42 @@ impl MessageManager {
}
}
pub fn accept_outgoing(&mut self, count: u8, tag: &[u8], data: *const *const ()) -> Result<(), Error> {
pub fn accept_outgoing(&mut self, id: u32, self_destination: u8, destination: u8,
count: u8, tag: &[u8], data: *const *const (),
routing_table: &RoutingTable, rank: u8, router: &mut Router
) -> Result<(), Error> {
let mut writer = Cursor::new(Vec::new());
rpc::send_args(&mut writer, 0, tag, data, false)?;
// skip service tag, but write the count
let mut data = writer.into_inner().split_off(3);
data[0] = count;
self.out_message = Some(Sliceable::new(data));
self.out_state = OutMessageState::MessageReady;
self.out_message = Some(Sliceable::new(destination, data));
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
self.out_state = OutMessageState::MessageBeingSent;
let meta = self.get_outgoing_slice(&mut data_slice).unwrap();
router.route(drtioaux::Packet::SubkernelMessage {
source: self_destination, destination: destination, id: id,
status: meta.status, length: meta.len as u16, data: data_slice
}, routing_table, rank, self_destination);
Ok(())
}
pub fn get_incoming(&mut self) -> Option<Message> {
self.in_queue.pop_front()
pub fn get_incoming(&mut self, id: u32) -> Option<Message> {
for i in 0..self.in_queue.len() {
if self.in_queue[i].id == id {
return Some(self.in_queue.remove(i));
}
}
None
}
pub fn pending_ids(&self) -> Vec<u32> {
let mut pending_ids: Vec<u32> = Vec::new();
for msg in self.in_queue.iter() {
pending_ids.push(msg.id);
}
pending_ids
}
}
@ -288,15 +283,16 @@ impl Session {
kernel_state: KernelState::Absent,
log_buffer: String::new(),
last_exception: None,
messages: MessageManager::new()
source: 0,
messages: MessageManager::new(),
subkernels_finished: Vec::new()
}
}
fn running(&self) -> bool {
match self.kernel_state {
KernelState::Absent | KernelState::Loaded => false,
KernelState::Running | KernelState::MsgAwait { .. } |
KernelState::MsgSending => true
_ => true
}
}
@ -369,23 +365,24 @@ impl Manager {
unsafe { self.cache.unborrow() }
}
pub fn run(&mut self, id: u32) -> Result<(), Error> {
pub fn run(&mut self, source: u8, id: u32) -> Result<(), Error> {
info!("starting subkernel #{}", id);
if self.session.kernel_state != KernelState::Loaded
|| self.current_id != id {
self.load(id)?;
}
self.session.source = source;
self.session.kernel_state = KernelState::Running;
cricon_select(RtioMaster::Kernel);
kern_acknowledge()
}
pub fn message_handle_incoming(&mut self, status: PayloadStatus, length: usize, slice: &[u8; MASTER_PAYLOAD_MAX_SIZE]) {
pub fn message_handle_incoming(&mut self, status: PayloadStatus, length: usize, id: u32, slice: &[u8; MASTER_PAYLOAD_MAX_SIZE]) {
if !self.is_running() {
return;
}
self.session.messages.handle_incoming(status, length, slice);
self.session.messages.handle_incoming(status, length, id, slice);
}
pub fn message_get_slice(&mut self, slice: &mut [u8; MASTER_PAYLOAD_MAX_SIZE]) -> Option<SliceMeta> {
@ -403,14 +400,6 @@ impl Manager {
self.session.messages.ack_slice()
}
pub fn message_is_ready(&mut self) -> bool {
self.session.messages.is_outgoing_ready()
}
pub fn get_last_finished(&mut self) -> Option<SubkernelFinished> {
self.last_finished.take()
}
pub fn load(&mut self, id: u32) -> Result<(), Error> {
if self.current_id == id && self.session.kernel_state == KernelState::Loaded {
return Ok(())
@ -434,6 +423,7 @@ impl Manager {
}
kern::LoadReply(Err(error)) => {
kernel_cpu::stop();
error!("load error: {:?}", error);
Err(Error::Load(format!("{}", error)))
}
other => {
@ -447,7 +437,7 @@ impl Manager {
pub fn exception_get_slice(&mut self, data_slice: &mut [u8; SAT_PAYLOAD_MAX_SIZE]) -> SliceMeta {
match self.session.last_exception.as_mut() {
Some(exception) => exception.get_slice_sat(data_slice),
None => SliceMeta { len: 0, status: PayloadStatus::FirstAndLast }
None => SliceMeta { destination: 0, len: 0, status: PayloadStatus::FirstAndLast }
}
}
@ -472,12 +462,63 @@ impl Manager {
backtrace: &[],
async_errors: 0
}).write_to(&mut writer) {
Ok(_) => self.session.last_exception = Some(Sliceable::new(writer.into_inner())),
Ok(_) => self.session.last_exception = Some(Sliceable::new(0, writer.into_inner())),
Err(_) => error!("Error writing exception data")
}
}
pub fn process_kern_requests(&mut self, rank: u8) {
pub fn ddma_finished(&mut self, error: u8, channel: u32, timestamp: u64) {
if let KernelState::DmaAwait { .. } = self.session.kernel_state {
kern_send(&kern::DmaAwaitRemoteReply {
timeout: false, error: error, channel: channel, timestamp: timestamp
}).unwrap();
self.session.kernel_state = KernelState::Running;
}
}
pub fn ddma_nack(&mut self) {
// for simplicity treat it as a timeout for now...
if let KernelState::DmaAwait { .. } = self.session.kernel_state {
kern_send(&kern::DmaAwaitRemoteReply {
timeout: true, error: 0, channel: 0, timestamp: 0
}).unwrap();
self.session.kernel_state = KernelState::Running;
}
}
pub fn ddma_remote_uploaded(&mut self, succeeded: bool) {
if let KernelState::DmaUploading { .. } = self.session.kernel_state {
if succeeded {
self.session.kernel_state = KernelState::Running;
kern_acknowledge().unwrap();
} else {
self.stop();
self.runtime_exception(Error::DmaError(DmaError::UploadFail));
}
}
}
pub fn process_kern_requests(&mut self, router: &mut Router, routing_table: &RoutingTable, rank: u8, destination: u8, dma_manager: &mut DmaManager) {
macro_rules! finished {
($with_exception:expr) => {{ Some(SubkernelFinished {
source: self.session.source, id: self.current_id,
with_exception: $with_exception, exception_source: destination
}) }}
}
if let Some(subkernel_finished) = self.last_finished.take() {
info!("subkernel {} finished, with exception: {}", subkernel_finished.id, subkernel_finished.with_exception);
let pending = self.session.messages.pending_ids();
if pending.len() > 0 {
warn!("subkernel terminated with messages still pending: {:?}", pending);
}
router.route(drtioaux::Packet::SubkernelFinished {
destination: subkernel_finished.source, id: subkernel_finished.id,
with_exception: subkernel_finished.with_exception, exception_src: subkernel_finished.exception_source
}, &routing_table, rank, destination);
dma_manager.cleanup(router, rank, destination, routing_table);
}
if !self.is_running() {
return;
}
@ -490,39 +531,39 @@ impl Manager {
self.session.kernel_state = KernelState::Absent;
unsafe { self.cache.unborrow() }
self.session.last_exception = Some(exception);
self.last_finished = Some(SubkernelFinished { id: self.current_id, with_exception: true })
self.last_finished = finished!(true);
},
Err(e) => {
error!("Error while running processing external messages: {:?}", e);
self.stop();
self.runtime_exception(e);
self.last_finished = Some(SubkernelFinished { id: self.current_id, with_exception: true })
self.last_finished = finished!(true);
}
}
match self.process_kern_message(rank) {
match self.process_kern_message(router, routing_table, rank, destination, dma_manager) {
Ok(Some(with_exception)) => {
self.last_finished = Some(SubkernelFinished { id: self.current_id, with_exception: with_exception })
self.last_finished = finished!(with_exception)
},
Ok(None) | Err(Error::NoMessage) => (),
Err(e) => {
error!("Error while running kernel: {:?}", e);
self.stop();
self.runtime_exception(e);
self.last_finished = Some(SubkernelFinished { id: self.current_id, with_exception: true })
self.last_finished = finished!(true);
}
}
}
fn process_external_messages(&mut self) -> Result<(), Error> {
match &self.session.kernel_state {
KernelState::MsgAwait { max_time, tags } => {
if clock::get_ms() > *max_time {
KernelState::MsgAwait { id, max_time, tags } => {
if *max_time > 0 && clock::get_ms() > *max_time as u64 {
kern_send(&kern::SubkernelMsgRecvReply { status: kern::SubkernelStatus::Timeout, count: 0 })?;
self.session.kernel_state = KernelState::Running;
return Ok(())
}
if let Some(message) = self.session.messages.get_incoming() {
if let Some(message) = self.session.messages.get_incoming(*id) {
kern_send(&kern::SubkernelMsgRecvReply { status: kern::SubkernelStatus::NoError, count: message.count })?;
let tags = tags.clone();
self.session.kernel_state = KernelState::Running;
@ -539,16 +580,88 @@ impl Manager {
Err(Error::AwaitingMessage)
}
},
KernelState::SubkernelAwaitFinish { max_time, id } => {
if *max_time > 0 && clock::get_ms() > *max_time as u64 {
kern_send(&kern::SubkernelAwaitFinishReply { status: kern::SubkernelStatus::Timeout })?;
self.session.kernel_state = KernelState::Running;
} else {
let mut i = 0;
for status in &self.session.subkernels_finished {
if *status == *id {
kern_send(&kern::SubkernelAwaitFinishReply { status: kern::SubkernelStatus::NoError })?;
self.session.kernel_state = KernelState::Running;
self.session.subkernels_finished.swap_remove(i);
break;
}
i += 1;
}
}
Ok(())
}
KernelState::DmaAwait { max_time } => {
if clock::get_ms() > *max_time {
kern_send(&kern::DmaAwaitRemoteReply { timeout: true, error: 0, channel: 0, timestamp: 0 })?;
self.session.kernel_state = KernelState::Running;
}
// ddma_finished() and nack() covers the other case
Ok(())
}
KernelState::DmaUploading { max_time } => {
if clock::get_ms() > *max_time {
unexpected!("DMAError: Timed out sending traces to remote");
}
Ok(())
}
_ => Ok(())
}
}
fn process_kern_message(&mut self, rank: u8) -> Result<Option<bool>, Error> {
pub fn subkernel_load_run_reply(&mut self, succeeded: bool, self_destination: u8) {
if self.session.kernel_state == KernelState::SubkernelAwaitLoad {
if let Err(e) = kern_send(&kern::SubkernelLoadRunReply { succeeded: succeeded }) {
self.stop();
self.runtime_exception(e);
self.last_finished = Some(SubkernelFinished {
source: self.session.source, id: self.current_id,
with_exception: true, exception_source: self_destination
})
} else {
self.session.kernel_state = KernelState::Running;
}
} else {
warn!("received unsolicited SubkernelLoadRunReply");
}
}
pub fn remote_subkernel_finished(&mut self, id: u32, with_exception: bool, exception_source: u8) {
if with_exception {
unsafe { kernel_cpu::stop() }
self.session.kernel_state = KernelState::Absent;
unsafe { self.cache.unborrow() }
self.last_finished = Some(SubkernelFinished {
source: self.session.source, id: self.current_id,
with_exception: true, exception_source: exception_source
})
} else {
self.session.subkernels_finished.push(id);
}
}
fn process_kern_message(&mut self, router: &mut Router,
routing_table: &RoutingTable,
rank: u8, destination: u8,
dma_manager: &mut DmaManager
) -> Result<Option<bool>, Error> {
// returns Ok(with_exception) on finish
// None if the kernel is still running
kern_recv(|request| {
match (request, &self.session.kernel_state) {
(&kern::LoadReply(_), KernelState::Loaded) => {
(&kern::LoadReply(_), KernelState::Loaded) |
(_, KernelState::DmaUploading { .. }) |
(_, KernelState::DmaAwait { .. }) |
(_, KernelState::MsgSending) |
(_, KernelState::SubkernelAwaitLoad) |
(_, KernelState::SubkernelAwaitFinish { .. }) => {
// We're standing by; ignore the message.
return Ok(None)
}
@ -559,7 +672,7 @@ impl Manager {
},
}
if process_kern_hwreq(request, rank)? {
if process_kern_hwreq(request, destination)? {
return Ok(None)
}
@ -613,19 +726,93 @@ impl Manager {
return Ok(Some(true))
}
&kern::SubkernelMsgSend { id: _, count, tag, data } => {
self.session.messages.accept_outgoing(count, tag, data)?;
&kern::DmaRecordStart(name) => {
dma_manager.record_start(name);
kern_acknowledge()
}
&kern::DmaRecordAppend(data) => {
dma_manager.record_append(data);
kern_acknowledge()
}
&kern::DmaRecordStop { duration, enable_ddma: _ } => {
// ddma is always used on satellites
if let Ok(id) = dma_manager.record_stop(duration, destination) {
let remote_count = dma_manager.upload_traces(id, router, rank, destination, routing_table)?;
if remote_count > 0 {
let max_time = clock::get_ms() + 10_000 as u64;
self.session.kernel_state = KernelState::DmaUploading { max_time: max_time };
Ok(())
} else {
kern_acknowledge()
}
} else {
unexpected!("DMAError: found an unsupported call to RTIO devices on master")
}
}
&kern::DmaEraseRequest { name } => {
dma_manager.erase_name(name, router, rank, destination, routing_table);
kern_acknowledge()
}
&kern::DmaRetrieveRequest { name } => {
dma_manager.with_trace(destination, name, |trace, duration| {
kern_send(&kern::DmaRetrieveReply {
trace: trace,
duration: duration,
uses_ddma: true,
})
})
}
&kern::DmaStartRemoteRequest { id, timestamp } => {
let max_time = clock::get_ms() + 10_000 as u64;
self.session.kernel_state = KernelState::DmaAwait { max_time: max_time };
dma_manager.playback_remote(id as u32, timestamp as u64, router, rank, destination, routing_table)?;
dma_manager.playback(destination, id as u32, timestamp as u64)?;
Ok(())
}
&kern::SubkernelMsgSend { id, destination: msg_dest, count, tag, data } => {
let message_destination;
let message_id;
if let Some(dest) = msg_dest {
message_destination = dest;
message_id = id;
} else {
// return message, return to source
message_destination = self.session.source;
message_id = self.current_id;
}
self.session.messages.accept_outgoing(message_id, destination,
message_destination, count, tag, data,
routing_table, rank, router)?;
// acknowledge after the message is sent
self.session.kernel_state = KernelState::MsgSending;
Ok(())
}
&kern::SubkernelMsgRecvRequest { id: _, timeout, tags } => {
let max_time = clock::get_ms() + timeout as u64;
self.session.kernel_state = KernelState::MsgAwait { max_time: max_time, tags: tags.to_vec() };
&kern::SubkernelMsgRecvRequest { id, timeout, tags } => {
// negative timeout value means no timeout
let max_time = if timeout > 0 { clock::get_ms() as i64 + timeout } else { timeout };
// ID equal to -1 indicates wildcard for receiving arguments
let id = if id == -1 { self.current_id } else { id as u32 };
self.session.kernel_state = KernelState::MsgAwait {
id: id, max_time: max_time, tags: tags.to_vec() };
Ok(())
},
&kern::SubkernelLoadRunRequest { id, destination: sk_destination, run } => {
self.session.kernel_state = KernelState::SubkernelAwaitLoad;
router.route(drtioaux::Packet::SubkernelLoadRunRequest {
source: destination, destination: sk_destination, id: id, run: run
}, routing_table, rank, destination);
Ok(())
}
&kern::SubkernelAwaitFinishRequest{ id, timeout } => {
let max_time = if timeout > 0 { clock::get_ms() as i64 + timeout } else { timeout };
self.session.kernel_state = KernelState::SubkernelAwaitFinish { max_time: max_time, id: id };
Ok(())
}
request => unexpected!("unexpected request {:?} from kernel CPU", request)
}.and(Ok(None))
})
@ -699,7 +886,7 @@ fn slice_kernel_exception(exceptions: &[Option<eh_artiq::Exception>],
async_errors: 0
}).write_to(&mut writer) {
// save last exception data to be received by master
Ok(_) => Ok(Sliceable::new(writer.into_inner())),
Ok(_) => Ok(Sliceable::new(0, writer.into_inner())),
Err(_) => Err(Error::SubkernelIoError)
}
}
@ -759,7 +946,7 @@ fn pass_message_to_kernel(message: &Message, tags: &[u8]) -> Result<(), Error> {
Ok(())
}
fn process_kern_hwreq(request: &kern::Message, rank: u8) -> Result<bool, Error> {
fn process_kern_hwreq(request: &kern::Message, self_destination: u8) -> Result<bool, Error> {
match request {
&kern::RtioInitRequest => {
unsafe {
@ -774,7 +961,7 @@ fn process_kern_hwreq(request: &kern::Message, rank: u8) -> Result<bool, Error>
// only local destination is considered "up"
// no access to other DRTIO destinations
kern_send(&kern::RtioDestinationStatusReply {
up: destination == rank })
up: destination == self_destination })
}
&kern::I2cStartRequest { busno } => {

View File

@ -32,6 +32,7 @@ use analyzer::Analyzer;
static mut ALLOC: alloc_list::ListAlloc = alloc_list::EMPTY;
mod repeater;
mod routing;
mod dma;
mod analyzer;
mod kernel;
@ -65,6 +66,13 @@ fn drtiosat_tsc_loaded() -> bool {
}
}
fn drtiosat_async_ready() {
unsafe {
csr::drtiosat::async_messages_ready_write(1);
}
}
#[derive(Clone, Copy)]
pub enum RtioMaster {
Drtio,
Dma,
@ -82,6 +90,16 @@ pub fn cricon_select(master: RtioMaster) {
}
}
pub fn cricon_read() -> RtioMaster {
let val = unsafe { csr::cri_con::selected_read() };
match val {
0 => RtioMaster::Drtio,
1 => RtioMaster::Dma,
2 => RtioMaster::Kernel,
_ => unreachable!()
}
}
#[cfg(has_drtio_routing)]
macro_rules! forward {
($routing_table:expr, $destination:expr, $rank:expr, $repeaters:expr, $packet:expr) => {{
@ -89,7 +107,14 @@ macro_rules! forward {
if hop != 0 {
let repno = (hop - 1) as usize;
if repno < $repeaters.len() {
if $packet.expects_response() {
return $repeaters[repno].aux_forward($packet);
} else {
let res = $repeaters[repno].aux_send($packet);
// allow the satellite to parse the packet before next
clock::spin_us(10_000);
return res;
}
} else {
return Err(drtioaux::Error::RoutingError);
}
@ -103,8 +128,9 @@ macro_rules! forward {
}
fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmgr: &mut KernelManager,
_repeaters: &mut [repeater::Repeater], _routing_table: &mut drtio_routing::RoutingTable, _rank: &mut u8,
packet: drtioaux::Packet) -> Result<(), drtioaux::Error<!>> {
_repeaters: &mut [repeater::Repeater], _routing_table: &mut drtio_routing::RoutingTable, rank: &mut u8,
router: &mut routing::Router, self_destination: &mut u8, packet: drtioaux::Packet
) -> Result<(), drtioaux::Error<!>> {
// In the code below, *_chan_sel_write takes an u8 if there are fewer than 256 channels,
// and u16 otherwise; hence the `as _` conversion.
match packet {
@ -125,29 +151,12 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
drtioaux::Packet::DestinationStatusRequest { destination } => {
#[cfg(has_drtio_routing)]
let hop = _routing_table.0[destination as usize][*_rank as usize];
let hop = _routing_table.0[destination as usize][*rank as usize];
#[cfg(not(has_drtio_routing))]
let hop = 0;
if hop == 0 {
// async messages
if let Some(status) = dmamgr.get_status() {
info!("playback done, error: {}, channel: {}, timestamp: {}", status.error, status.channel, status.timestamp);
drtioaux::send(0, &drtioaux::Packet::DmaPlaybackStatus {
destination: destination, id: status.id, error: status.error, channel: status.channel, timestamp: status.timestamp })?;
} else if let Some(subkernel_finished) = kernelmgr.get_last_finished() {
info!("subkernel {} finished, with exception: {}", subkernel_finished.id, subkernel_finished.with_exception);
drtioaux::send(0, &drtioaux::Packet::SubkernelFinished {
id: subkernel_finished.id, with_exception: subkernel_finished.with_exception
})?;
} else if kernelmgr.message_is_ready() {
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
let meta = kernelmgr.message_get_slice(&mut data_slice).unwrap();
drtioaux::send(0, &drtioaux::Packet::SubkernelMessage {
destination: destination, id: kernelmgr.get_current_id().unwrap(),
status: meta.status, length: meta.len as u16, data: data_slice
})?;
} else {
*self_destination = destination;
let errors;
unsafe {
errors = csr::drtiosat::rtio_error_read();
@ -181,7 +190,6 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
drtioaux::send(0, &drtioaux::Packet::DestinationOkReply)?;
}
}
}
#[cfg(has_drtio_routing)]
{
@ -204,7 +212,6 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
}
}
}
Ok(())
}
@ -219,18 +226,18 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
}
#[cfg(has_drtio_routing)]
drtioaux::Packet::RoutingSetRank { rank } => {
*_rank = rank;
drtio_routing::interconnect_enable_all(_routing_table, rank);
drtioaux::Packet::RoutingSetRank { rank: new_rank } => {
*rank = new_rank;
drtio_routing::interconnect_enable_all(_routing_table, new_rank);
let rep_rank = rank + 1;
let rep_rank = new_rank + 1;
for rep in _repeaters.iter() {
if let Err(e) = rep.set_rank(rep_rank) {
error!("failed to set rank ({})", e);
}
}
info!("rank: {}", rank);
info!("rank: {}", new_rank);
info!("routing table: {}", _routing_table);
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
@ -245,8 +252,14 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
drtioaux::send(0, &drtioaux::Packet::RoutingAck)
}
drtioaux::Packet::RoutingRetrievePackets => {
let packet = router.get_upstream_packet().or(
Some(drtioaux::Packet::RoutingNoPackets)).unwrap();
drtioaux::send(0, &packet)
}
drtioaux::Packet::MonitorRequest { destination: _destination, channel, probe } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let value;
#[cfg(has_rtio_moninj)]
unsafe {
@ -263,7 +276,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
drtioaux::send(0, &reply)
},
drtioaux::Packet::InjectionRequest { destination: _destination, channel, overrd, value } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
#[cfg(has_rtio_moninj)]
unsafe {
csr::rtio_moninj::inj_chan_sel_write(channel as _);
@ -273,7 +286,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
Ok(())
},
drtioaux::Packet::InjectionStatusRequest { destination: _destination, channel, overrd } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let value;
#[cfg(has_rtio_moninj)]
unsafe {
@ -289,22 +302,22 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
},
drtioaux::Packet::I2cStartRequest { destination: _destination, busno } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let succeeded = i2c::start(busno).is_ok();
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
}
drtioaux::Packet::I2cRestartRequest { destination: _destination, busno } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let succeeded = i2c::restart(busno).is_ok();
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
}
drtioaux::Packet::I2cStopRequest { destination: _destination, busno } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let succeeded = i2c::stop(busno).is_ok();
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
}
drtioaux::Packet::I2cWriteRequest { destination: _destination, busno, data } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
match i2c::write(busno, data) {
Ok(ack) => drtioaux::send(0,
&drtioaux::Packet::I2cWriteReply { succeeded: true, ack: ack }),
@ -313,7 +326,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
}
}
drtioaux::Packet::I2cReadRequest { destination: _destination, busno, ack } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
match i2c::read(busno, ack) {
Ok(data) => drtioaux::send(0,
&drtioaux::Packet::I2cReadReply { succeeded: true, data: data }),
@ -322,25 +335,25 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
}
}
drtioaux::Packet::I2cSwitchSelectRequest { destination: _destination, busno, address, mask } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let succeeded = i2c::switch_select(busno, address, mask).is_ok();
drtioaux::send(0, &drtioaux::Packet::I2cBasicReply { succeeded: succeeded })
}
drtioaux::Packet::SpiSetConfigRequest { destination: _destination, busno, flags, length, div, cs } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let succeeded = spi::set_config(busno, flags, length, div, cs).is_ok();
drtioaux::send(0,
&drtioaux::Packet::SpiBasicReply { succeeded: succeeded })
},
drtioaux::Packet::SpiWriteRequest { destination: _destination, busno, data } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let succeeded = spi::write(busno, data).is_ok();
drtioaux::send(0,
&drtioaux::Packet::SpiBasicReply { succeeded: succeeded })
}
drtioaux::Packet::SpiReadRequest { destination: _destination, busno } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
match spi::read(busno) {
Ok(data) => drtioaux::send(0,
&drtioaux::Packet::SpiReadReply { succeeded: true, data: data }),
@ -350,7 +363,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
}
drtioaux::Packet::AnalyzerHeaderRequest { destination: _destination } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let header = analyzer.get_header();
drtioaux::send(0, &drtioaux::Packet::AnalyzerHeader {
total_byte_count: header.total_byte_count,
@ -360,7 +373,7 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
}
drtioaux::Packet::AnalyzerDataRequest { destination: _destination } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let mut data_slice: [u8; SAT_PAYLOAD_MAX_SIZE] = [0; SAT_PAYLOAD_MAX_SIZE];
let meta = analyzer.get_data(&mut data_slice);
drtioaux::send(0, &drtioaux::Packet::AnalyzerData {
@ -370,34 +383,56 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
})
}
drtioaux::Packet::DmaAddTraceRequest { destination: _destination, id, status, length, trace } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
let succeeded = dmamgr.add(id, status, &trace, length as usize).is_ok();
drtioaux::send(0,
&drtioaux::Packet::DmaAddTraceReply { succeeded: succeeded })
drtioaux::Packet::DmaAddTraceRequest { source, destination, id, status, length, trace } => {
forward!(_routing_table, destination, *rank, _repeaters, &packet);
*self_destination = destination;
let succeeded = dmamgr.add(source, id, status, &trace, length as usize).is_ok();
router.send(drtioaux::Packet::DmaAddTraceReply {
source: *self_destination, destination: source, id: id, succeeded: succeeded
}, _routing_table, *rank, *self_destination)
}
drtioaux::Packet::DmaRemoveTraceRequest { destination: _destination, id } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
let succeeded = dmamgr.erase(id).is_ok();
drtioaux::send(0,
&drtioaux::Packet::DmaRemoveTraceReply { succeeded: succeeded })
drtioaux::Packet::DmaAddTraceReply { source, destination: _destination, id, succeeded } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
dmamgr.ack_upload(kernelmgr, source, id, succeeded, router, *rank, *self_destination, _routing_table);
Ok(())
}
drtioaux::Packet::DmaPlaybackRequest { destination: _destination, id, timestamp } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
drtioaux::Packet::DmaRemoveTraceRequest { source, destination: _destination, id } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let succeeded = dmamgr.erase(source, id).is_ok();
router.send(drtioaux::Packet::DmaRemoveTraceReply {
destination: source, succeeded: succeeded
}, _routing_table, *rank, *self_destination)
}
drtioaux::Packet::DmaPlaybackRequest { source, destination: _destination, id, timestamp } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
// no DMA with a running kernel
let succeeded = !kernelmgr.is_running() && dmamgr.playback(id, timestamp).is_ok();
drtioaux::send(0,
&drtioaux::Packet::DmaPlaybackReply { succeeded: succeeded })
let succeeded = !kernelmgr.is_running() && dmamgr.playback(source, id, timestamp).is_ok();
router.send(drtioaux::Packet::DmaPlaybackReply {
destination: source, succeeded: succeeded
}, _routing_table, *rank, *self_destination)
}
drtioaux::Packet::DmaPlaybackReply { destination: _destination, succeeded } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
if !succeeded {
kernelmgr.ddma_nack();
}
Ok(())
}
drtioaux::Packet::DmaPlaybackStatus { source: _, destination: _destination, id, error, channel, timestamp } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
dmamgr.remote_finished(kernelmgr, id, error, channel, timestamp);
Ok(())
}
drtioaux::Packet::SubkernelAddDataRequest { destination: _destination, id, status, length, data } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
drtioaux::Packet::SubkernelAddDataRequest { destination, id, status, length, data } => {
forward!(_routing_table, destination, *rank, _repeaters, &packet);
*self_destination = destination;
let succeeded = kernelmgr.add(id, status, &data, length as usize).is_ok();
drtioaux::send(0,
&drtioaux::Packet::SubkernelAddDataReply { succeeded: succeeded })
}
drtioaux::Packet::SubkernelLoadRunRequest { destination: _destination, id, run } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
drtioaux::Packet::SubkernelLoadRunRequest { source, destination: _destination, id, run } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let mut succeeded = kernelmgr.load(id).is_ok();
// allow preloading a kernel with delayed run
if run {
@ -405,14 +440,27 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
// cannot run kernel while DDMA is running
succeeded = false;
} else {
succeeded |= kernelmgr.run(id).is_ok();
succeeded |= kernelmgr.run(source, id).is_ok();
}
}
drtioaux::send(0,
&drtioaux::Packet::SubkernelLoadRunReply { succeeded: succeeded })
router.send(drtioaux::Packet::SubkernelLoadRunReply {
destination: source, succeeded: succeeded
},
_routing_table, *rank, *self_destination)
}
drtioaux::Packet::SubkernelLoadRunReply { destination: _destination, succeeded } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
// received if local subkernel started another, remote subkernel
kernelmgr.subkernel_load_run_reply(succeeded, *self_destination);
Ok(())
}
drtioaux::Packet::SubkernelFinished { destination: _destination, id, with_exception, exception_src } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
kernelmgr.remote_subkernel_finished(id, with_exception, exception_src);
Ok(())
}
drtioaux::Packet::SubkernelExceptionRequest { destination: _destination } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
let mut data_slice: [u8; SAT_PAYLOAD_MAX_SIZE] = [0; SAT_PAYLOAD_MAX_SIZE];
let meta = kernelmgr.exception_get_slice(&mut data_slice);
drtioaux::send(0, &drtioaux::Packet::SubkernelException {
@ -421,22 +469,23 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
data: data_slice,
})
}
drtioaux::Packet::SubkernelMessage { destination, id: _id, status, length, data } => {
forward!(_routing_table, destination, *_rank, _repeaters, &packet);
kernelmgr.message_handle_incoming(status, length as usize, &data);
drtioaux::send(0, &drtioaux::Packet::SubkernelMessageAck {
destination: destination
})
drtioaux::Packet::SubkernelMessage { source, destination: _destination, id, status, length, data } => {
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
kernelmgr.message_handle_incoming(status, length as usize, id, &data);
router.send(drtioaux::Packet::SubkernelMessageAck {
destination: source
}, _routing_table, *rank, *self_destination)
}
drtioaux::Packet::SubkernelMessageAck { destination: _destination } => {
forward!(_routing_table, _destination, *_rank, _repeaters, &packet);
forward!(_routing_table, _destination, *rank, _repeaters, &packet);
if kernelmgr.message_ack_slice() {
let mut data_slice: [u8; MASTER_PAYLOAD_MAX_SIZE] = [0; MASTER_PAYLOAD_MAX_SIZE];
if let Some(meta) = kernelmgr.message_get_slice(&mut data_slice) {
drtioaux::send(0, &drtioaux::Packet::SubkernelMessage {
destination: *_rank, id: kernelmgr.get_current_id().unwrap(),
// route and not send immediately as ACKs are not a beginning of a transaction
router.route(drtioaux::Packet::SubkernelMessage {
source: *self_destination, destination: meta.destination, id: kernelmgr.get_current_id().unwrap(),
status: meta.status, length: meta.len as u16, data: data_slice
})?
}, _routing_table, *rank, *self_destination);
} else {
error!("Error receiving message slice");
}
@ -453,18 +502,19 @@ fn process_aux_packet(dmamgr: &mut DmaManager, analyzer: &mut Analyzer, kernelmg
fn process_aux_packets(dma_manager: &mut DmaManager, analyzer: &mut Analyzer,
kernelmgr: &mut KernelManager, repeaters: &mut [repeater::Repeater],
routing_table: &mut drtio_routing::RoutingTable, rank: &mut u8) {
routing_table: &mut drtio_routing::RoutingTable, rank: &mut u8, router: &mut routing::Router,
destination: &mut u8) {
let result =
drtioaux::recv(0).and_then(|packet| {
if let Some(packet) = packet {
process_aux_packet(dma_manager, analyzer, kernelmgr, repeaters, routing_table, rank, packet)
if let Some(packet) = packet.or_else(|| router.get_local_packet()) {
process_aux_packet(dma_manager, analyzer, kernelmgr,
repeaters, routing_table, rank, router, destination, packet)
} else {
Ok(())
}
});
match result {
Ok(()) => (),
Err(e) => warn!("aux packet error ({})", e)
if let Err(e) = result {
warn!("aux packet error ({})", e);
}
}
@ -670,6 +720,7 @@ pub extern fn main() -> i32 {
}
let mut routing_table = drtio_routing::RoutingTable::default_empty();
let mut rank = 1;
let mut destination = 1;
let mut hardware_tick_ts = 0;
@ -677,10 +728,12 @@ pub extern fn main() -> i32 {
ad9117::init().expect("AD9117 initialization failed");
loop {
let mut router = routing::Router::new();
while !drtiosat_link_rx_up() {
drtiosat_process_errors();
for rep in repeaters.iter_mut() {
rep.service(&routing_table, rank);
rep.service(&routing_table, rank, destination, &mut router);
}
#[cfg(all(soc_platform = "kasli", hw_rev = "v2.0"))]
{
@ -714,10 +767,10 @@ pub extern fn main() -> i32 {
while drtiosat_link_rx_up() {
drtiosat_process_errors();
process_aux_packets(&mut dma_manager, &mut analyzer,
&mut kernelmgr, &mut repeaters,
&mut routing_table, &mut rank);
&mut kernelmgr, &mut repeaters, &mut routing_table,
&mut rank, &mut router, &mut destination);
for rep in repeaters.iter_mut() {
rep.service(&routing_table, rank);
rep.service(&routing_table, rank, destination, &mut router);
}
#[cfg(all(soc_platform = "kasli", hw_rev = "v2.0"))]
{
@ -738,7 +791,26 @@ pub extern fn main() -> i32 {
error!("aux packet error: {}", e);
}
}
kernelmgr.process_kern_requests(rank);
if let Some(status) = dma_manager.get_status() {
info!("playback done, error: {}, channel: {}, timestamp: {}", status.error, status.channel, status.timestamp);
router.route(drtioaux::Packet::DmaPlaybackStatus {
source: destination, destination: status.source, id: status.id,
error: status.error, channel: status.channel, timestamp: status.timestamp
}, &routing_table, rank, destination);
}
kernelmgr.process_kern_requests(&mut router, &routing_table, rank, destination, &mut dma_manager);
#[cfg(has_drtio_routing)]
if let Some((repno, packet)) = router.get_downstream_packet() {
if let Err(e) = repeaters[repno].aux_send(&packet) {
warn!("[REP#{}] Error when sending packet to satellite ({:?})", repno, e)
}
}
if router.any_upstream_waiting() {
drtiosat_async_ready();
}
}
drtiosat_reset_phy(true);

View File

@ -1,6 +1,7 @@
use board_artiq::{drtioaux, drtio_routing};
#[cfg(has_drtio_routing)]
use board_misoc::{csr, clock};
use routing::Router;
#[cfg(has_drtio_routing)]
fn rep_link_rx_up(repno: u8) -> bool {
@ -48,7 +49,7 @@ impl Repeater {
self.state == RepeaterState::Up
}
pub fn service(&mut self, routing_table: &drtio_routing::RoutingTable, rank: u8) {
pub fn service(&mut self, routing_table: &drtio_routing::RoutingTable, rank: u8, destination: u8, router: &mut Router) {
self.process_local_errors();
match self.state {
@ -111,6 +112,11 @@ impl Repeater {
info!("[REP#{}] link is down", self.repno);
self.state = RepeaterState::Down;
}
if self.async_messages_ready() {
if let Err(e) = self.handle_async(routing_table, rank, destination, router) {
warn!("[REP#{}] Error handling async messages ({})", self.repno, e);
}
}
}
RepeaterState::Failed => {
if !rep_link_rx_up(self.repno) {
@ -179,16 +185,42 @@ impl Repeater {
}
}
pub fn aux_forward(&self, request: &drtioaux::Packet) -> Result<(), drtioaux::Error<!>> {
if self.state != RepeaterState::Up {
return Err(drtioaux::Error::LinkDown);
fn async_messages_ready(&self) -> bool {
let async_rdy;
unsafe {
async_rdy = (csr::DRTIOREP[self.repno as usize].async_messages_ready_read)();
(csr::DRTIOREP[self.repno as usize].async_messages_ready_write)(0);
}
drtioaux::send(self.auxno, request).unwrap();
async_rdy == 1
}
fn handle_async(&self, routing_table: &drtio_routing::RoutingTable, rank: u8, self_destination: u8, router: &mut Router
) -> Result<(), drtioaux::Error<!>> {
loop {
drtioaux::send(self.auxno, &drtioaux::Packet::RoutingRetrievePackets).unwrap();
let reply = self.recv_aux_timeout(200)?;
match reply {
drtioaux::Packet::RoutingNoPackets => break,
packet => router.route(packet, routing_table, rank, self_destination)
}
}
Ok(())
}
pub fn aux_forward(&self, request: &drtioaux::Packet) -> Result<(), drtioaux::Error<!>> {
self.aux_send(request)?;
let reply = self.recv_aux_timeout(200)?;
drtioaux::send(0, &reply).unwrap();
Ok(())
}
pub fn aux_send(&self, request: &drtioaux::Packet) -> Result<(), drtioaux::Error<!>> {
if self.state != RepeaterState::Up {
return Err(drtioaux::Error::LinkDown);
}
drtioaux::send(self.auxno, request)
}
pub fn sync_tsc(&self) -> Result<(), drtioaux::Error<!>> {
if self.state != RepeaterState::Up {
return Ok(());
@ -199,7 +231,6 @@ impl Repeater {
(csr::DRTIOREP[repno].set_time_write)(1);
while (csr::DRTIOREP[repno].set_time_read)() == 1 {}
}
// TSCAck is the only aux packet that is sent spontaneously
// by the satellite, in response to a TSC set on the RT link.
let reply = self.recv_aux_timeout(10000)?;
@ -275,7 +306,7 @@ pub struct Repeater {
impl Repeater {
pub fn new(_repno: u8) -> Repeater { Repeater::default() }
pub fn service(&self, _routing_table: &drtio_routing::RoutingTable, _rank: u8) { }
pub fn service(&self, _routing_table: &drtio_routing::RoutingTable, _rank: u8, _destination: u8, _router: &mut Router) { }
pub fn sync_tsc(&self) -> Result<(), drtioaux::Error<!>> { Ok(()) }

View File

@ -0,0 +1,184 @@
use alloc::{vec::Vec, collections::vec_deque::VecDeque};
use board_artiq::{drtioaux, drtio_routing};
#[cfg(has_drtio_routing)]
use board_misoc::csr;
use core::cmp::min;
use proto_artiq::drtioaux_proto::PayloadStatus;
use SAT_PAYLOAD_MAX_SIZE;
use MASTER_PAYLOAD_MAX_SIZE;
/* represents data that has to be sent with the aux protocol */
#[derive(Debug)]
pub struct Sliceable {
it: usize,
data: Vec<u8>,
destination: u8
}
pub struct SliceMeta {
pub destination: u8,
pub len: u16,
pub status: PayloadStatus
}
macro_rules! get_slice_fn {
( $name:tt, $size:expr ) => {
pub fn $name(&mut self, data_slice: &mut [u8; $size]) -> SliceMeta {
let first = self.it == 0;
let len = min($size, self.data.len() - self.it);
let last = self.it + len == self.data.len();
let status = PayloadStatus::from_status(first, last);
data_slice[..len].clone_from_slice(&self.data[self.it..self.it+len]);
self.it += len;
SliceMeta {
destination: self.destination,
len: len as u16,
status: status
}
}
};
}
impl Sliceable {
pub fn new(destination: u8, data: Vec<u8>) -> Sliceable {
Sliceable {
it: 0,
data: data,
destination: destination
}
}
pub fn at_end(&self) -> bool {
self.it == self.data.len()
}
pub fn extend(&mut self, data: &[u8]) {
self.data.extend(data);
}
get_slice_fn!(get_slice_sat, SAT_PAYLOAD_MAX_SIZE);
get_slice_fn!(get_slice_master, MASTER_PAYLOAD_MAX_SIZE);
}
// Packets from downstream (further satellites) are received and routed appropriately.
// they're passed as soon as possible downstream (within the subtree), or sent upstream,
// which is notified about pending packets.
// for rank 1 (connected to master) satellites, these packets are passed as an answer to DestinationStatusRequest;
// for higher ranks, after getting a notification, it will transact with downstream to get the pending packets.
// forward! macro is not deprecated, as routable packets are only these that can originate
// from both master and satellite, e.g. DDMA and Subkernel.
pub struct Router {
upstream_queue: VecDeque<drtioaux::Packet>,
local_queue: VecDeque<drtioaux::Packet>,
#[cfg(has_drtio_routing)]
downstream_queue: VecDeque<(usize, drtioaux::Packet)>,
upstream_notified: bool,
}
impl Router {
pub fn new() -> Router {
Router {
upstream_queue: VecDeque::new(),
local_queue: VecDeque::new(),
#[cfg(has_drtio_routing)]
downstream_queue: VecDeque::new(),
upstream_notified: false,
}
}
// called by local sources (DDMA, kernel) and by repeaters on receiving async data
// messages are always buffered for both upstream and downstream
pub fn route(&mut self, packet: drtioaux::Packet,
_routing_table: &drtio_routing::RoutingTable, _rank: u8,
self_destination: u8
) {
let destination = packet.routable_destination();
#[cfg(has_drtio_routing)]
{
if let Some(destination) = destination {
let hop = _routing_table.0[destination as usize][_rank as usize] as usize;
if destination == self_destination {
self.local_queue.push_back(packet);
} else if hop > 0 && hop < csr::DRTIOREP.len() {
let repno = (hop - 1) as usize;
self.downstream_queue.push_back((repno, packet));
} else {
self.upstream_queue.push_back(packet);
}
} else {
error!("Received an unroutable packet: {:?}", packet);
}
}
#[cfg(not(has_drtio_routing))]
{
if destination == Some(self_destination) {
self.local_queue.push_back(packet);
} else {
self.upstream_queue.push_back(packet);
}
}
}
// Sends a packet to a required destination, routing if it's necessary
pub fn send(&mut self, packet: drtioaux::Packet,
_routing_table: &drtio_routing::RoutingTable,
_rank: u8, _destination: u8
) -> Result<(), drtioaux::Error<!>> {
#[cfg(has_drtio_routing)]
{
let destination = packet.routable_destination();
if let Some(destination) = destination {
let hop = _routing_table.0[destination as usize][_rank as usize] as usize;
if destination == 0 {
// response is needed immediately if master required it
drtioaux::send(0, &packet)?;
} else if !(hop > 0 && hop < csr::DRTIOREP.len()) {
// higher rank can wait
self.upstream_queue.push_back(packet);
} else {
let repno = (hop - 1) as usize;
// transaction will occur at closest possible opportunity
self.downstream_queue.push_back((repno, packet));
}
Ok(())
} else {
// packet not supported in routing, fallback - sent directly
drtioaux::send(0, &packet)
}
}
#[cfg(not(has_drtio_routing))]
{
drtioaux::send(0, &packet)
}
}
pub fn any_upstream_waiting(&mut self) -> bool {
let empty = self.upstream_queue.is_empty();
if !empty && !self.upstream_notified {
self.upstream_notified = true; // so upstream will not get spammed with notifications
true
} else {
false
}
}
pub fn get_upstream_packet(&mut self) -> Option<drtioaux::Packet> {
let packet = self.upstream_queue.pop_front();
if packet.is_none() {
self.upstream_notified = false;
}
packet
}
#[cfg(has_drtio_routing)]
pub fn get_downstream_packet(&mut self) -> Option<(usize, drtioaux::Packet)> {
self.downstream_queue.pop_front()
}
pub fn get_local_packet(&mut self) -> Option<drtioaux::Packet> {
self.local_queue.pop_front()
}
}

View File

@ -9,7 +9,7 @@ from sipyco.asyncio_tools import AsyncioServer, SignalHandler, atexit_register_c
from sipyco.pc_rpc import Server
from sipyco import common_args
from artiq.coredevice.comm_analyzer import get_analyzer_dump
from artiq.coredevice.comm_analyzer import get_analyzer_dump, ANALYZER_MAGIC
logger = logging.getLogger(__name__)
@ -24,6 +24,7 @@ class ProxyServer(AsyncioServer):
async def _handle_connection_cr(self, reader, writer):
try:
writer.write(ANALYZER_MAGIC)
queue = asyncio.Queue(self._queue_limit)
self._recipients.add(queue)
try:
@ -60,9 +61,7 @@ class ProxyControl:
self.distribute_cb(dump)
except:
logger.warning("Trigger failed:", exc_info=True)
return False
else:
return True
raise
def get_argparser():

View File

@ -22,6 +22,7 @@ from sipyco.pc_rpc import Client
from sipyco.sync_struct import Subscriber
from sipyco.broadcast import Receiver
from sipyco import common_args, pyon
from sipyco.asyncio_tools import SignalHandler
from artiq.tools import (scale_from_metadata, short_format, parse_arguments,
parse_devarg_override)
@ -112,11 +113,25 @@ def get_argparser():
"del-dataset", help="delete a dataset")
parser_del_dataset.add_argument("name", help="name of the dataset")
parser_supply_interactive = subparsers.add_parser(
"supply-interactive", help="supply interactive arguments")
parser_supply_interactive.add_argument(
"rid", metavar="RID", type=int, help="RID of target experiment")
parser_supply_interactive.add_argument(
"arguments", metavar="ARGUMENTS", nargs="*",
help="interactive arguments")
parser_cancel_interactive = subparsers.add_parser(
"cancel-interactive", help="cancel interactive arguments")
parser_cancel_interactive.add_argument(
"rid", metavar="RID", type=int, help="RID of target experiment")
parser_show = subparsers.add_parser(
"show", help="show schedule, log, devices or datasets")
parser_show.add_argument(
"what", metavar="WHAT",
choices=["schedule", "log", "ccb", "devices", "datasets"],
choices=["schedule", "log", "ccb", "devices", "datasets",
"interactive-args"],
help="select object to show: %(choices)s")
subparsers.add_parser(
@ -135,8 +150,7 @@ def get_argparser():
"ls", help="list a directory on the master")
parser_ls.add_argument("directory", default="", nargs="?")
subparsers.add_parser(
"terminate", help="terminate the ARTIQ master")
subparsers.add_parser("terminate", help="terminate the ARTIQ master")
common_args.verbosity_args(parser)
return parser
@ -208,6 +222,15 @@ def _action_scan_devices(remote, args):
remote.scan()
def _action_supply_interactive(remote, args):
arguments = parse_arguments(args.arguments)
remote.supply(args.rid, arguments)
def _action_cancel_interactive(remote, args):
remote.cancel(args.rid)
def _action_scan_repository(remote, args):
if getattr(args, "async"):
remote.scan_repository_async(args.revision)
@ -274,17 +297,34 @@ def _show_datasets(datasets):
print(table)
def _show_interactive_args(interactive_args):
clear_screen()
table = PrettyTable(["RID", "Title", "Key", "Type", "Group", "Tooltip"])
for rid, input_request in sorted(interactive_args.items(), key=itemgetter(0)):
title = input_request["title"]
for key, procdesc, group, tooltip in input_request["arglist_desc"]:
table.add_row([rid, title, key, procdesc["ty"], group, tooltip])
print(table)
def _run_subscriber(host, port, subscriber):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
signal_handler = SignalHandler()
signal_handler.setup()
try:
loop.run_until_complete(subscriber.connect(host, port))
try:
loop.run_until_complete(asyncio.wait_for(subscriber.receive_task,
None))
print("Connection to master lost")
_, pending = loop.run_until_complete(asyncio.wait(
[loop.create_task(signal_handler.wait_terminate()), subscriber.receive_task],
return_when=asyncio.FIRST_COMPLETED))
for task in pending:
task.cancel()
finally:
loop.run_until_complete(subscriber.close())
finally:
signal_handler.teardown()
finally:
loop.close()
@ -338,18 +378,22 @@ def main():
_show_dict(args, "devices", _show_devices)
elif args.what == "datasets":
_show_dict(args, "datasets", _show_datasets)
elif args.what == "interactive-args":
_show_dict(args, "interactive_args", _show_interactive_args)
else:
raise ValueError
else:
port = 3251 if args.port is None else args.port
target_name = {
"submit": "master_schedule",
"delete": "master_schedule",
"set_dataset": "master_dataset_db",
"del_dataset": "master_dataset_db",
"scan_devices": "master_device_db",
"scan_repository": "master_experiment_db",
"ls": "master_experiment_db",
"submit": "schedule",
"delete": "schedule",
"set_dataset": "dataset_db",
"del_dataset": "dataset_db",
"scan_devices": "device_db",
"supply_interactive": "interactive_arg_db",
"cancel_interactive": "interactive_arg_db",
"scan_repository": "experiment_db",
"ls": "experiment_db",
"terminate": "master_management",
}[action]
remote = Client(args.server, port, target_name)

View File

@ -67,12 +67,21 @@ def main():
core.compile(exp.run, [exp_inst], {},
attribute_writeback=False, print_as_rpc=False)
subkernels = {}
for sid, subkernel_fn in object_map.subkernels().items():
destination, subkernel_library = core.compile_subkernel(
subkernels = object_map.subkernels()
compiled_subkernels = {}
while True:
new_subkernels = {}
for sid, subkernel_fn in subkernels.items():
if sid in compiled_subkernels.keys():
continue
destination, subkernel_library, embedding_map = core.compile_subkernel(
sid, subkernel_fn, object_map,
[exp_inst], subkernel_arg_types)
subkernels[sid] = (destination, subkernel_library)
[exp_inst], subkernel_arg_types, subkernels)
compiled_subkernels[sid] = (destination, subkernel_library)
new_subkernels.update(embedding_map.subkernels())
if new_subkernels == subkernels:
break
subkernels.update(new_subkernels)
except CompileError as error:
return
finally:
@ -107,7 +116,7 @@ def main():
tar.addfile(main_kernel_info, fileobj=main_kernel_fileobj)
# subkernels as "<sid> <destination>.elf"
for sid, (destination, subkernel_library) in subkernels.items():
for sid, (destination, subkernel_library) in compiled_subkernels.items():
subkernel_fileobj = io.BytesIO(subkernel_library)
subkernel_info = tarfile.TarInfo(name="{} {}.elf".format(sid, destination))
subkernel_info.size = len(subkernel_library)

View File

@ -6,7 +6,6 @@ import atexit
import importlib
import os
import logging
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
from qasync import QEventLoop
@ -15,13 +14,15 @@ from sipyco.pc_rpc import AsyncioClient, Client
from sipyco.broadcast import Receiver
from sipyco import common_args
from sipyco.asyncio_tools import atexit_register_coroutine
from sipyco.sync_struct import Subscriber
from artiq import __artiq_dir__ as artiq_dir, __version__ as artiq_version
from artiq.tools import get_user_config_dir
from artiq.gui.models import ModelSubscriber
from artiq.gui import state, log
from artiq.dashboard import (experiments, shortcuts, explorer,
moninj, datasets, schedule, applets_ccb)
moninj, datasets, schedule, applets_ccb,
waveform, interactive_args)
def get_argparser():
@ -47,6 +48,15 @@ def get_argparser():
parser.add_argument(
"-p", "--load-plugin", dest="plugin_modules", action="append",
help="Python module to load on startup")
parser.add_argument(
"--analyzer-proxy-timeout", default=5, type=float,
help="connection timeout to core analyzer proxy")
parser.add_argument(
"--analyzer-proxy-timer", default=5, type=float,
help="retry timer to core analyzer proxy")
parser.add_argument(
"--analyzer-proxy-timer-backoff", default=1.1, type=float,
help="retry timer backoff multiplier to core analyzer proxy")
common_args.verbosity_args(parser)
return parser
@ -60,7 +70,7 @@ class MainWindow(QtWidgets.QMainWindow):
self.setWindowTitle("ARTIQ Dashboard - {}".format(server))
qfm = QtGui.QFontMetrics(self.font())
self.resize(140*qfm.averageCharWidth(), 38*qfm.lineSpacing())
self.resize(140 * qfm.averageCharWidth(), 38 * qfm.lineSpacing())
self.exit_request = asyncio.Event()
@ -100,8 +110,8 @@ class MdiArea(QtWidgets.QMdiArea):
def paintEvent(self, event):
QtWidgets.QMdiArea.paintEvent(self, event)
painter = QtGui.QPainter(self.viewport())
x = (self.width() - self.pixmap.width())//2
y = (self.height() - self.pixmap.height())//2
x = (self.width() - self.pixmap.width()) // 2
y = (self.height() - self.pixmap.height()) // 2
painter.setOpacity(0.5)
painter.drawPixmap(x, y, self.pixmap)
@ -119,7 +129,7 @@ def main():
if args.db_file is None:
args.db_file = os.path.join(get_user_config_dir(),
"artiq_dashboard_{server}_{port}.pyon".format(
server=args.server.replace(":","."),
server=args.server.replace(":", "."),
port=args.port_notify))
app = QtWidgets.QApplication(["ARTIQ Dashboard"])
@ -130,10 +140,10 @@ def main():
# create connections to master
rpc_clients = dict()
for target in "schedule", "experiment_db", "dataset_db", "device_db":
for target in "schedule", "experiment_db", "dataset_db", "device_db", "interactive_arg_db":
client = AsyncioClient()
loop.run_until_complete(client.connect_rpc(
args.server, args.port_control, "master_" + target))
args.server, args.port_control, target))
atexit.register(client.close_rpc)
rpc_clients[target] = client
@ -144,6 +154,7 @@ def main():
master_management.close_rpc()
disconnect_reported = False
def report_disconnect():
nonlocal disconnect_reported
if not disconnect_reported:
@ -155,9 +166,9 @@ def main():
for notifier_name, modelf in (("explist", explorer.Model),
("explist_status", explorer.StatusUpdater),
("datasets", datasets.Model),
("schedule", schedule.Model)):
subscriber = ModelSubscriber(notifier_name, modelf,
report_disconnect)
("schedule", schedule.Model),
("interactive_args", interactive_args.Model)):
subscriber = ModelSubscriber(notifier_name, modelf, report_disconnect)
loop.run_until_complete(subscriber.connect(
args.server, args.port_notify))
atexit_register_coroutine(subscriber.close, loop=loop)
@ -215,10 +226,29 @@ def main():
smgr.register(d_applets)
broadcast_clients["ccb"].notify_cbs.append(d_applets.ccb_notify)
d_ttl_dds = moninj.MonInj(rpc_clients["schedule"])
loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
d_ttl_dds = moninj.MonInjDock(rpc_clients["schedule"])
atexit_register_coroutine(d_ttl_dds.stop, loop=loop)
d_waveform = waveform.WaveformDock(
args.analyzer_proxy_timeout,
args.analyzer_proxy_timer,
args.analyzer_proxy_timer_backoff
)
atexit_register_coroutine(d_waveform.stop, loop=loop)
def init_cbs(ddb):
d_ttl_dds.dm.init_ddb(ddb)
d_waveform.init_ddb(ddb)
return ddb
devices_sub = Subscriber("devices", init_cbs, [d_ttl_dds.dm.notify_ddb, d_waveform.notify_ddb])
loop.run_until_complete(devices_sub.connect(args.server, args.port_notify))
atexit_register_coroutine(devices_sub.close, loop=loop)
d_interactive_args = interactive_args.InteractiveArgsDock(
sub_clients["interactive_args"],
rpc_clients["interactive_arg_db"]
)
d_schedule = schedule.ScheduleDock(
rpc_clients["schedule"], sub_clients["schedule"])
smgr.register(d_schedule)
@ -230,9 +260,8 @@ def main():
# lay out docks
right_docks = [
d_explorer, d_shortcuts,
d_ttl_dds.ttl_dock, d_ttl_dds.dds_dock, d_ttl_dds.dac_dock,
d_datasets, d_applets
d_explorer, d_shortcuts, d_ttl_dds,
d_datasets, d_applets, d_waveform, d_interactive_args
]
main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, right_docks[0])
for d1, d2 in zip(right_docks, right_docks[1:]):
@ -249,16 +278,11 @@ def main():
smgr.start(loop=loop)
atexit_register_coroutine(smgr.stop, loop=loop)
# work around for https://github.com/m-labs/artiq/issues/1307
d_ttl_dds.ttl_dock.show()
d_ttl_dds.dds_dock.show()
# create first log dock if not already in state
d_log0 = logmgr.first_log_dock()
if d_log0 is not None:
main_window.tabifyDockWidget(d_schedule, d_log0)
if server_name is not None:
server_description = server_name + " ({})".format(args.server)
else:
@ -269,5 +293,6 @@ def main():
main_window.show()
loop.run_until_complete(main_window.exit_request.wait())
if __name__ == "__main__":
main()

View File

@ -211,6 +211,11 @@ class PeripheralManager:
urukul_name = self.get_name("urukul")
synchronization = peripheral["synchronization"]
channel = count(0)
pll_en = peripheral["pll_en"]
clk_div = peripheral.get("clk_div")
if clk_div is None:
clk_div = 0 if pll_en else 1
self.gen("""
device_db["eeprom_{name}"] = {{
"type": "local",
@ -277,7 +282,7 @@ class PeripheralManager:
sync_device="\"ttl_{name}_sync\"".format(name=urukul_name) if synchronization else "None",
refclk=peripheral.get("refclk", self.primary_description["rtio_frequency"]),
clk_sel=peripheral["clk_sel"],
clk_div=peripheral["clk_div"])
clk_div=clk_div)
dds = peripheral["dds"]
pll_vco = peripheral.get("pll_vco")
for i in range(4):
@ -299,7 +304,7 @@ class PeripheralManager:
uchn=i,
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
pll_n=peripheral.get("pll_n", 32), pll_en=peripheral["pll_en"],
pll_n=peripheral.get("pll_n", 32), pll_en=pll_en,
sync_delay_seed=",\n \"sync_delay_seed\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "",
io_update_delay=",\n \"io_update_delay\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "")
elif dds == "ad9912":
@ -320,7 +325,7 @@ class PeripheralManager:
uchn=i,
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
pll_n=peripheral.get("pll_n", 8), pll_en=peripheral["pll_en"])
pll_n=peripheral.get("pll_n", 8), pll_en=pll_en)
else:
raise ValueError
return next(channel)

View File

@ -15,7 +15,8 @@ from sipyco.asyncio_tools import atexit_register_coroutine, SignalHandler
from artiq import __version__ as artiq_version
from artiq.master.log import log_args, init_log
from artiq.master.databases import DeviceDB, DatasetDB
from artiq.master.databases import (DeviceDB, DatasetDB,
InteractiveArgDB)
from artiq.master.scheduler import Scheduler
from artiq.master.rid_counter import RIDCounter
from artiq.master.experiments import (FilesystemBackend, GitBackend,
@ -60,7 +61,7 @@ def get_argparser():
help="friendly name, displayed in dashboards "
"to identify master instead of server address")
parser.add_argument("--log-submissions", default=None,
help="set the filename to create the experiment subimission")
help="log experiment submissions to specified file")
return parser
@ -81,8 +82,7 @@ def main():
bind, args.port_broadcast))
atexit_register_coroutine(server_broadcast.stop, loop=loop)
log_forwarder.callback = (lambda msg:
server_broadcast.broadcast("log", msg))
log_forwarder.callback = lambda msg: server_broadcast.broadcast("log", msg)
def ccb_issue(service, *args, **kwargs):
msg = {
"service": service,
@ -96,6 +96,7 @@ def main():
atexit.register(dataset_db.close_db)
dataset_db.start(loop=loop)
atexit_register_coroutine(dataset_db.stop, loop=loop)
interactive_arg_db = InteractiveArgDB()
worker_handlers = dict()
if args.git:
@ -106,15 +107,21 @@ def main():
repo_backend, worker_handlers, args.experiment_subdir)
atexit.register(experiment_db.close)
scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db, args.log_submissions)
scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db,
args.log_submissions)
scheduler.start(loop=loop)
atexit_register_coroutine(scheduler.stop, loop=loop)
# Python doesn't allow writing attributes to bound methods.
def get_interactive_arguments(*args, **kwargs):
return interactive_arg_db.get(*args, **kwargs)
get_interactive_arguments._worker_pass_rid = True
worker_handlers.update({
"get_device_db": device_db.get_device_db,
"get_device": device_db.get,
"get_dataset": dataset_db.get,
"update_dataset": dataset_db.update,
"get_interactive_arguments": get_interactive_arguments,
"scheduler_submit": scheduler.submit,
"scheduler_delete": scheduler.delete,
"scheduler_request_termination": scheduler.request_termination,
@ -133,10 +140,11 @@ def main():
server_control = RPCServer({
"master_management": master_management,
"master_device_db": device_db,
"master_dataset_db": dataset_db,
"master_schedule": scheduler,
"master_experiment_db": experiment_db,
"device_db": device_db,
"dataset_db": dataset_db,
"interactive_arg_db": interactive_arg_db,
"schedule": scheduler,
"experiment_db": experiment_db,
}, allow_parallel=True)
loop.run_until_complete(server_control.start(
bind, args.port_control))
@ -146,8 +154,9 @@ def main():
"schedule": scheduler.notifier,
"devices": device_db.data,
"datasets": dataset_db.data,
"interactive_args": interactive_arg_db.pending,
"explist": experiment_db.explist,
"explist_status": experiment_db.status
"explist_status": experiment_db.status,
})
loop.run_until_complete(server_notify.start(
bind, args.port_notify))

View File

@ -13,7 +13,7 @@ import h5py
from llvmlite import binding as llvm
from sipyco import common_args
from sipyco import common_args, pyon
from artiq import __version__ as artiq_version
from artiq.language.environment import EnvExperiment, ProcessArgumentManager
@ -166,9 +166,30 @@ def get_argparser(with_file=True):
return parser
class ArgumentManager(ProcessArgumentManager):
def get_interactive(self, interactive_arglist, title):
print(title)
result = dict()
for key, processor, group, tooltip in interactive_arglist:
success = False
while not success:
user_input = input("{}:{} (group={}, tooltip={}): ".format(
key, type(processor).__name__, group, tooltip))
try:
user_input_deser = pyon.decode(user_input)
value = processor.process(user_input_deser)
except:
logger.error("failed to process user input, retrying",
exc_info=True)
else:
success = True
result[key] = value
return result
def _build_experiment(device_mgr, dataset_mgr, args):
arguments = parse_arguments(args.arguments)
argument_mgr = ProcessArgumentManager(arguments)
argument_mgr = ArgumentManager(arguments)
managers = (device_mgr, dataset_mgr, argument_mgr, {})
if hasattr(args, "file"):
is_tar = tarfile.is_tarfile(args.file)

View File

@ -78,6 +78,7 @@ class DRTIOSatellite(Module):
self.reset = CSRStorage(reset=1)
self.reset_phy = CSRStorage(reset=1)
self.tsc_loaded = CSR()
self.async_messages_ready = CSR()
# master interface in the sys domain
self.cri = cri.Interface()
self.async_errors = Record(async_errors_layout)
@ -129,6 +130,9 @@ class DRTIOSatellite(Module):
link_layer_sync, interface=self.cri)
self.comb += self.rt_packet.reset.eq(self.cd_rio.rst)
self.sync += If(self.async_messages_ready.re, self.rt_packet.async_msg_stb.eq(1))
self.comb += self.async_messages_ready.w.eq(self.rt_packet.async_msg_ack)
self.comb += [
tsc.load.eq(self.rt_packet.tsc_load),
tsc.load_value.eq(self.rt_packet.tsc_load_value)
@ -136,14 +140,14 @@ class DRTIOSatellite(Module):
self.sync += [
If(self.tsc_loaded.re, self.tsc_loaded.w.eq(0)),
If(self.rt_packet.tsc_load, self.tsc_loaded.w.eq(1))
If(self.rt_packet.tsc_load, self.tsc_loaded.w.eq(1)),
]
self.submodules.rt_errors = rt_errors_satellite.RTErrorsSatellite(
self.rt_packet, tsc, self.async_errors)
def get_csrs(self):
return ([self.reset, self.reset_phy, self.tsc_loaded] +
return ([self.reset, self.reset_phy, self.tsc_loaded, self.async_messages_ready] +
self.link_layer.get_csrs() + self.link_stats.get_csrs() +
self.rt_errors.get_csrs())

View File

@ -17,6 +17,7 @@ class _CSRs(AutoCSR):
self.set_time = CSR()
self.underflow_margin = CSRStorage(16, reset=300)
self.async_messages_ready = CSR()
self.force_destination = CSRStorage()
self.destination = CSRStorage(8)
@ -60,6 +61,11 @@ class RTController(Module):
If(self.csrs.set_time.re, rt_packet.set_time_stb.eq(1))
]
self.sync += [
If(rt_packet.async_messages_ready, self.csrs.async_messages_ready.w.eq(1)),
If(self.csrs.async_messages_ready.re, self.csrs.async_messages_ready.w.eq(0))
]
# chan_sel forcing
chan_sel = Signal(24)
self.comb += chan_sel.eq(Mux(self.csrs.force_destination.storage,

View File

@ -14,6 +14,7 @@ class RTController(Module, AutoCSR):
self.command_missed_cmd = CSRStatus(2)
self.command_missed_chan_sel = CSRStatus(24)
self.buffer_space_timeout_dest = CSRStatus(8)
self.async_messages_ready = CSR()
self.sync += rt_packet.reset.eq(self.reset.storage)
@ -23,6 +24,12 @@ class RTController(Module, AutoCSR):
]
self.comb += self.set_time.w.eq(rt_packet.set_time_stb)
self.sync += [
If(rt_packet.async_messages_ready, self.async_messages_ready.w.eq(1)),
If(self.async_messages_ready.re, self.async_messages_ready.w.eq(0))
]
errors = [
(rt_packet.err_unknown_packet_type, "rtio_rx", None, None),
(rt_packet.err_packet_truncated, "rtio_rx", None, None),

View File

@ -61,6 +61,9 @@ class RTPacketMaster(Module):
# a set_time request pending
self.tsc_value = Signal(64)
# async aux messages interface, only received
self.async_messages_ready = Signal()
# rx errors
self.err_unknown_packet_type = Signal()
self.err_packet_truncated = Signal()
@ -283,12 +286,16 @@ class RTPacketMaster(Module):
echo_received_now = Signal()
self.sync.rtio_rx += self.echo_received_now.eq(echo_received_now)
async_messages_ready = Signal()
self.sync.rtio_rx += self.async_messages_ready.eq(async_messages_ready)
rx_fsm.act("INPUT",
If(rx_dp.frame_r,
rx_dp.packet_buffer_load.eq(1),
If(rx_dp.packet_last,
Case(rx_dp.packet_type, {
rx_plm.types["echo_reply"]: echo_received_now.eq(1),
rx_plm.types["async_messages_ready"]: async_messages_ready.eq(1),
rx_plm.types["buffer_space_reply"]: NextState("BUFFER_SPACE"),
rx_plm.types["read_reply"]: NextState("READ_REPLY"),
rx_plm.types["read_reply_noevent"]: NextState("READ_REPLY_NOEVENT"),

View File

@ -19,6 +19,7 @@ class RTPacketRepeater(Module):
# in rtio_rx domain
self.err_unknown_packet_type = Signal()
self.err_packet_truncated = Signal()
self.async_messages_ready = Signal()
# in rtio domain
self.err_command_missed = Signal()
@ -304,6 +305,7 @@ class RTPacketRepeater(Module):
rx_dp.packet_buffer_load.eq(1),
If(rx_dp.packet_last,
Case(rx_dp.packet_type, {
rx_plm.types["async_messages_ready"]: self.async_messages_ready.eq(1),
rx_plm.types["buffer_space_reply"]: NextState("BUFFER_SPACE"),
rx_plm.types["read_reply"]: NextState("READ_REPLY"),
rx_plm.types["read_reply_noevent"]: NextState("READ_REPLY_NOEVENT"),

View File

@ -19,6 +19,9 @@ class RTPacketSatellite(Module):
self.tsc_load = Signal()
self.tsc_load_value = Signal(64)
self.async_msg_stb = Signal()
self.async_msg_ack = Signal()
if interface is None:
interface = cri.Interface()
self.cri = interface
@ -78,6 +81,8 @@ class RTPacketSatellite(Module):
)
]
self.sync += If(self.async_msg_ack, self.async_msg_stb.eq(0))
# RX FSM
cri_read = Signal()
cri_buffer_space = Signal()
@ -197,6 +202,7 @@ class RTPacketSatellite(Module):
tx_fsm.act("IDLE",
If(echo_req, NextState("ECHO")),
If(self.async_msg_stb, NextState("ASYNC_MESSAGES_READY")),
If(buffer_space_req, NextState("BUFFER_SPACE")),
If(read_request_pending & ~self.cri.i_status[2],
NextState("READ"),
@ -210,6 +216,12 @@ class RTPacketSatellite(Module):
If(tx_dp.packet_last, NextState("IDLE"))
)
tx_fsm.act("ASYNC_MESSAGES_READY",
self.async_msg_ack.eq(1),
tx_dp.send("async_messages_ready"),
If(tx_dp.packet_last, NextState("IDLE"))
)
tx_fsm.act("BUFFER_SPACE",
buffer_space_ack.eq(1),
tx_dp.send("buffer_space_reply", space=buffer_space),

View File

@ -69,6 +69,7 @@ def get_s2m_layouts(alignment):
plm.add_type("read_reply", ("timestamp", 64), ("data", 32))
plm.add_type("read_reply_noevent", ("overflow", 1)) # overflow=0→timeout
plm.add_type("async_messages_ready")
return plm

View File

@ -7,6 +7,7 @@ import os
import subprocess
from functools import partial
from itertools import count
from types import SimpleNamespace
from PyQt5 import QtCore, QtGui, QtWidgets
@ -14,43 +15,16 @@ from sipyco.pipe_ipc import AsyncioParentComm
from sipyco.logging_tools import LogParser
from sipyco import pyon
from artiq.gui.entries import procdesc_to_entry
from artiq.gui.tools import (QDockWidgetCloseDetect, LayoutWidget,
WheelFilter)
from artiq.gui.entries import procdesc_to_entry, EntryTreeWidget
from artiq.gui.tools import QDockWidgetCloseDetect, LayoutWidget
logger = logging.getLogger(__name__)
class EntryArea(QtWidgets.QTreeWidget):
class EntryArea(EntryTreeWidget):
def __init__(self):
QtWidgets.QTreeWidget.__init__(self)
self.setColumnCount(3)
self.header().setStretchLastSection(False)
if hasattr(self.header(), "setSectionResizeMode"):
set_resize_mode = self.header().setSectionResizeMode
else:
set_resize_mode = self.header().setResizeMode
set_resize_mode(0, QtWidgets.QHeaderView.ResizeToContents)
set_resize_mode(1, QtWidgets.QHeaderView.Stretch)
self.header().setVisible(False)
self.setSelectionMode(self.NoSelection)
self.setHorizontalScrollMode(self.ScrollPerPixel)
self.setVerticalScrollMode(self.ScrollPerPixel)
self.setStyleSheet("QTreeWidget {background: " +
self.palette().midlight().color().name() + " ;}")
self.viewport().installEventFilter(WheelFilter(self.viewport(), True))
self._groups = dict()
self._arg_to_widgets = dict()
self._arguments = dict()
self.gradient = QtGui.QLinearGradient(
0, 0, 0, QtGui.QFontMetrics(self.font()).lineSpacing()*2.5)
self.gradient.setColorAt(0, self.palette().base().color())
self.gradient.setColorAt(1, self.palette().midlight().color())
EntryTreeWidget.__init__(self)
reset_all_button = QtWidgets.QPushButton("Restore defaults")
reset_all_button.setToolTip("Reset all to default values")
reset_all_button.setIcon(
@ -62,125 +36,57 @@ class EntryArea(QtWidgets.QTreeWidget):
buttons.layout.setColumnStretch(1, 0)
buttons.layout.setColumnStretch(2, 1)
buttons.addWidget(reset_all_button, 0, 1)
self.bottom_item = QtWidgets.QTreeWidgetItem()
self.addTopLevelItem(self.bottom_item)
self.setItemWidget(self.bottom_item, 1, buttons)
self.bottom_item.setHidden(True)
self._processors = dict()
def setattr_argument(self, name, proc, group=None, tooltip=None):
def setattr_argument(self, key, processor, group=None, tooltip=None):
argument = dict()
desc = proc.describe()
desc = processor.describe()
self._processors[key] = processor
argument["desc"] = desc
argument["group"] = group
argument["tooltip"] = tooltip
self._arguments[name] = argument
widgets = dict()
self._arg_to_widgets[name] = widgets
entry_class = procdesc_to_entry(argument["desc"])
argument["state"] = entry_class.default_state(argument["desc"])
entry = entry_class(argument)
widget_item = QtWidgets.QTreeWidgetItem([name])
if argument["tooltip"]:
widget_item.setToolTip(0, argument["tooltip"])
widgets["entry"] = entry
widgets["widget_item"] = widget_item
self.set_argument(key, argument)
if len(self._arguments) > 1:
self.bottom_item.setHidden(False)
def __getattr__(self, key):
return self.get_value(key)
for col in range(3):
widget_item.setBackground(col, self.gradient)
font = widget_item.font(0)
font.setBold(True)
widget_item.setFont(0, font)
def get_value(self, key):
entry = self._arg_to_widgets[key]["entry"]
argument = self._arguments[key]
processor = self._processors[key]
return processor.process(entry.state_to_value(argument["state"]))
if argument["group"] is None:
self.insertTopLevelItem(self.indexFromItem(self.bottom_item).row(), widget_item)
else:
self._get_group(argument["group"]).addChild(widget_item)
self.bottom_item.setHidden(False)
fix_layout = LayoutWidget()
widgets["fix_layout"] = fix_layout
fix_layout.addWidget(entry)
self.setItemWidget(widget_item, 1, fix_layout)
reset_value = QtWidgets.QToolButton()
reset_value.setToolTip("Reset to default value")
reset_value.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_BrowserReload))
reset_value.clicked.connect(partial(self.reset_value, name))
tool_buttons = LayoutWidget()
tool_buttons.addWidget(reset_value, 0)
self.setItemWidget(widget_item, 2, tool_buttons)
def _get_group(self, name):
if name in self._groups:
return self._groups[name]
group = QtWidgets.QTreeWidgetItem([name])
for col in range(3):
group.setBackground(col, self.palette().mid())
group.setForeground(col, self.palette().brightText())
font = group.font(col)
font.setBold(True)
group.setFont(col, font)
self.insertTopLevelItem(self.indexFromItem(self.bottom_item).row(), group)
self._groups[name] = group
return group
def __getattr__(self, name):
return self.get_value(name)
def get_value(self, name):
entry = self._arg_to_widgets[name]["entry"]
argument = self._arguments[name]
return entry.state_to_value(argument["state"])
def set_value(self, name, value):
ty = self._arguments[name]["desc"]["ty"]
def set_value(self, key, value):
ty = self._arguments[key]["desc"]["ty"]
if ty == "Scannable":
desc = value.describe()
self._arguments[name]["state"][desc["ty"]] = desc
self._arguments[name]["state"]["selected"] = desc["ty"]
self._arguments[key]["state"][desc["ty"]] = desc
self._arguments[key]["state"]["selected"] = desc["ty"]
else:
self._arguments[name]["state"] = value
self.update_value(name)
self._arguments[key]["state"] = value
self.update_value(key)
def get_values(self):
d = dict()
for name in self._arguments.keys():
d[name] = self.get_value(name)
d = SimpleNamespace()
for key in self._arguments.keys():
setattr(d, key, self.get_value(key))
return d
def set_values(self, values):
for name, value in values.items():
self.set_value(name, value)
for key, value in values.items():
self.set_value(key, value)
def update_value(self, name):
widgets = self._arg_to_widgets[name]
argument = self._arguments[name]
def update_value(self, key):
argument = self._arguments[key]
self.update_argument(key, argument)
# Qt needs a setItemWidget() to handle layout correctly,
# simply replacing the entry inside the LayoutWidget
# results in a bug.
widgets["entry"].deleteLater()
widgets["entry"] = procdesc_to_entry(argument["desc"])(argument)
widgets["fix_layout"].deleteLater()
widgets["fix_layout"] = LayoutWidget()
widgets["fix_layout"].addWidget(widgets["entry"])
self.setItemWidget(widgets["widget_item"], 1, widgets["fix_layout"])
self.updateGeometries()
def reset_value(self, name):
procdesc = self._arguments[name]["desc"]
self._arguments[name]["state"] = procdesc_to_entry(procdesc).default_state(procdesc)
self.update_value(name)
def reset_value(self, key):
self.reset_entry(key)
def reset_all(self):
for name in self._arguments.keys():
self.reset_value(name)
for key in self._arguments.keys():
self.reset_entry(key)
class AppletIPCServer(AsyncioParentComm):
@ -255,7 +161,7 @@ class AppletIPCServer(AsyncioParentComm):
elif action == "update_dataset":
await self.dataset_ctl.update(obj["mod"])
elif action == "set_argument_value":
self.expmgr.set_argument_value(obj["expurl"], obj["name"], obj["value"])
self.expmgr.set_argument_value(obj["expurl"], obj["key"], obj["value"])
else:
raise ValueError("unknown action in applet message")
except:

100
artiq/gui/dndwidgets.py Normal file
View File

@ -0,0 +1,100 @@
from PyQt5 import QtCore, QtWidgets
class VDragDropSplitter(QtWidgets.QSplitter):
dropped = QtCore.pyqtSignal(int, int)
def __init__(self, parent):
QtWidgets.QSplitter.__init__(self, parent=parent)
self.setAcceptDrops(True)
self.setContentsMargins(0, 0, 0, 0)
self.setOrientation(QtCore.Qt.Vertical)
self.setChildrenCollapsible(False)
def resetSizes(self):
self.setSizes(self.count() * [1])
def dragEnterEvent(self, e):
e.accept()
def dragLeaveEvent(self, e):
self.setRubberBand(-1)
e.accept()
def dragMoveEvent(self, e):
pos = e.pos()
src = e.source()
src_i = self.indexOf(src)
self.setRubberBand(self.height())
# case 0: smaller than source widget
if pos.y() < src.y():
for n in range(src_i):
w = self.widget(n)
if pos.y() < w.y() + w.size().height():
self.setRubberBand(w.y())
break
# case 2: greater than source widget
elif pos.y() > src.y() + src.size().height():
for n in range(src_i + 1, self.count()):
w = self.widget(n)
if pos.y() < w.y():
self.setRubberBand(w.y())
break
else:
self.setRubberBand(-1)
e.accept()
def dropEvent(self, e):
self.setRubberBand(-1)
pos = e.pos()
src = e.source()
src_i = self.indexOf(src)
for n in range(self.count()):
w = self.widget(n)
if pos.y() < w.y() + w.size().height():
self.dropped.emit(src_i, n)
break
e.accept()
# Scroll area with auto-scroll on vertical drag
class VDragScrollArea(QtWidgets.QScrollArea):
def __init__(self, parent):
QtWidgets.QScrollArea.__init__(self, parent)
self.installEventFilter(self)
self._margin = 40
self._timer = QtCore.QTimer(self)
self._timer.setInterval(20)
self._timer.timeout.connect(self._on_auto_scroll)
self._direction = 0
self._speed = 10
def setAutoScrollMargin(self, margin):
self._margin = margin
def setAutoScrollSpeed(self, speed):
self._speed = speed
def eventFilter(self, obj, e):
if e.type() == QtCore.QEvent.DragMove:
val = self.verticalScrollBar().value()
height = self.viewport().height()
y = e.pos().y()
self._direction = 0
if y < val + self._margin:
self._direction = -1
elif y > height + val - self._margin:
self._direction = 1
if not self._timer.isActive():
self._timer.start()
elif e.type() in (QtCore.QEvent.Drop, QtCore.QEvent.DragLeave):
self._timer.stop()
return False
def _on_auto_scroll(self):
val = self.verticalScrollBar().value()
min_ = self.verticalScrollBar().minimum()
max_ = self.verticalScrollBar().maximum()
dy = self._direction * self._speed
new_val = min(max_, max(min_, val + dy))
self.verticalScrollBar().setValue(new_val)

View File

@ -1,9 +1,10 @@
import logging
from collections import OrderedDict
from functools import partial
from PyQt5 import QtCore, QtGui, QtWidgets
from artiq.gui.tools import LayoutWidget, disable_scroll_wheel
from artiq.gui.tools import LayoutWidget, disable_scroll_wheel, WheelFilter
from artiq.gui.scanwidget import ScanWidget
from artiq.gui.scientific_spinbox import ScientificSpinBox
@ -11,6 +12,157 @@ from artiq.gui.scientific_spinbox import ScientificSpinBox
logger = logging.getLogger(__name__)
class EntryTreeWidget(QtWidgets.QTreeWidget):
quickStyleClicked = QtCore.pyqtSignal()
def __init__(self):
QtWidgets.QTreeWidget.__init__(self)
self.setColumnCount(3)
self.header().setStretchLastSection(False)
if hasattr(self.header(), "setSectionResizeMode"):
set_resize_mode = self.header().setSectionResizeMode
else:
set_resize_mode = self.header().setResizeMode
set_resize_mode(0, QtWidgets.QHeaderView.ResizeToContents)
set_resize_mode(1, QtWidgets.QHeaderView.Stretch)
set_resize_mode(2, QtWidgets.QHeaderView.ResizeToContents)
self.header().setVisible(False)
self.setSelectionMode(self.NoSelection)
self.setHorizontalScrollMode(self.ScrollPerPixel)
self.setVerticalScrollMode(self.ScrollPerPixel)
self.setStyleSheet("QTreeWidget {background: " +
self.palette().midlight().color().name() + " ;}")
self.viewport().installEventFilter(WheelFilter(self.viewport(), True))
self._groups = dict()
self._arg_to_widgets = dict()
self._arguments = dict()
self.gradient = QtGui.QLinearGradient(
0, 0, 0, QtGui.QFontMetrics(self.font()).lineSpacing() * 2.5)
self.gradient.setColorAt(0, self.palette().base().color())
self.gradient.setColorAt(1, self.palette().midlight().color())
self.bottom_item = QtWidgets.QTreeWidgetItem()
self.addTopLevelItem(self.bottom_item)
def set_argument(self, key, argument):
self._arguments[key] = argument
widgets = dict()
self._arg_to_widgets[key] = widgets
entry_class = procdesc_to_entry(argument["desc"])
argument["state"] = entry_class.default_state(argument["desc"])
entry = entry_class(argument)
if argument["desc"].get("quickstyle"):
entry.quickStyleClicked.connect(self.quickStyleClicked)
widget_item = QtWidgets.QTreeWidgetItem([key])
if argument["tooltip"]:
widget_item.setToolTip(0, argument["tooltip"])
widgets["entry"] = entry
widgets["widget_item"] = widget_item
for col in range(3):
widget_item.setBackground(col, self.gradient)
font = widget_item.font(0)
font.setBold(True)
widget_item.setFont(0, font)
if argument["group"] is None:
self.insertTopLevelItem(self.indexFromItem(self.bottom_item).row(), widget_item)
else:
self._get_group(argument["group"]).addChild(widget_item)
fix_layout = LayoutWidget()
widgets["fix_layout"] = fix_layout
fix_layout.addWidget(entry)
self.setItemWidget(widget_item, 1, fix_layout)
reset_entry = QtWidgets.QToolButton()
reset_entry.setToolTip("Reset to default value")
reset_entry.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_BrowserReload))
reset_entry.clicked.connect(partial(self.reset_entry, key))
disable_other_scans = QtWidgets.QToolButton()
widgets["disable_other_scans"] = disable_other_scans
disable_other_scans.setIcon(
QtWidgets.QApplication.style().standardIcon(
QtWidgets.QStyle.SP_DialogResetButton))
disable_other_scans.setToolTip("Disable other scans")
disable_other_scans.clicked.connect(
partial(self._disable_other_scans, key))
if not isinstance(entry, ScanEntry):
disable_other_scans.setVisible(False)
tool_buttons = LayoutWidget()
tool_buttons.layout.setRowStretch(0, 1)
tool_buttons.layout.setRowStretch(3, 1)
tool_buttons.addWidget(reset_entry, 1)
tool_buttons.addWidget(disable_other_scans, 2)
self.setItemWidget(widget_item, 2, tool_buttons)
def _get_group(self, key):
if key in self._groups:
return self._groups[key]
group = QtWidgets.QTreeWidgetItem([key])
for col in range(3):
group.setBackground(col, self.palette().mid())
group.setForeground(col, self.palette().brightText())
font = group.font(col)
font.setBold(True)
group.setFont(col, font)
self.insertTopLevelItem(self.indexFromItem(self.bottom_item).row(), group)
self._groups[key] = group
return group
def _disable_other_scans(self, current_key):
for key, widgets in self._arg_to_widgets.items():
if (key != current_key and isinstance(widgets["entry"], ScanEntry)):
widgets["entry"].disable()
def update_argument(self, key, argument):
widgets = self._arg_to_widgets[key]
# Qt needs a setItemWidget() to handle layout correctly,
# simply replacing the entry inside the LayoutWidget
# results in a bug.
widgets["entry"].deleteLater()
widgets["entry"] = procdesc_to_entry(argument["desc"])(argument)
widgets["disable_other_scans"].setVisible(
isinstance(widgets["entry"], ScanEntry))
widgets["fix_layout"].deleteLater()
widgets["fix_layout"] = LayoutWidget()
widgets["fix_layout"].addWidget(widgets["entry"])
self.setItemWidget(widgets["widget_item"], 1, widgets["fix_layout"])
self.updateGeometries()
def reset_entry(self, key):
procdesc = self._arguments[key]["desc"]
self._arguments[key]["state"] = procdesc_to_entry(procdesc).default_state(procdesc)
self.update_argument(key, self._arguments[key])
def save_state(self):
expanded = []
for k, v in self._groups.items():
if v.isExpanded():
expanded.append(k)
return {
"expanded": expanded,
"scroll": self.verticalScrollBar().value()
}
def restore_state(self, state):
for e in state["expanded"]:
try:
self._groups[e].setExpanded(True)
except KeyError:
pass
self.verticalScrollBar().setValue(state["scroll"])
class StringEntry(QtWidgets.QLineEdit):
def __init__(self, argument):
QtWidgets.QLineEdit.__init__(self)
@ -45,17 +197,38 @@ class BooleanEntry(QtWidgets.QCheckBox):
return procdesc.get("default", False)
class EnumerationEntry(QtWidgets.QComboBox):
class EnumerationEntry(QtWidgets.QWidget):
quickStyleClicked = QtCore.pyqtSignal()
def __init__(self, argument):
QtWidgets.QComboBox.__init__(self)
disable_scroll_wheel(self)
choices = argument["desc"]["choices"]
self.addItems(choices)
QtWidgets.QWidget.__init__(self)
layout = QtWidgets.QHBoxLayout()
self.setLayout(layout)
procdesc = argument["desc"]
choices = procdesc["choices"]
if procdesc["quickstyle"]:
self.btn_group = QtWidgets.QButtonGroup()
for i, choice in enumerate(choices):
button = QtWidgets.QPushButton(choice)
self.btn_group.addButton(button)
self.btn_group.setId(button, i)
layout.addWidget(button)
def submit(index):
argument["state"] = choices[index]
self.quickStyleClicked.emit()
self.btn_group.idClicked.connect(submit)
else:
self.combo_box = QtWidgets.QComboBox()
disable_scroll_wheel(self.combo_box)
self.combo_box.addItems(choices)
idx = choices.index(argument["state"])
self.setCurrentIndex(idx)
self.combo_box.setCurrentIndex(idx)
layout.addWidget(self.combo_box)
def update(index):
argument["state"] = choices[index]
self.currentIndexChanged.connect(update)
self.combo_box.currentIndexChanged.connect(update)
@staticmethod
def state_to_value(state):

View File

@ -69,6 +69,23 @@ async def get_open_file_name(parent, caption, dir, filter):
return await fut
async def get_save_file_name(parent, caption, dir, filter, suffix=None):
"""like QtWidgets.QFileDialog.getSaveFileName(), but a coroutine"""
dialog = QtWidgets.QFileDialog(parent, caption, dir, filter)
dialog.setFileMode(dialog.AnyFile)
dialog.setAcceptMode(dialog.AcceptSave)
if suffix is not None:
dialog.setDefaultSuffix(suffix)
fut = asyncio.Future()
def on_accept():
fut.set_result(dialog.selectedFiles()[0])
dialog.accepted.connect(on_accept)
dialog.rejected.connect(fut.cancel)
dialog.open()
return await fut
# Based on:
# http://stackoverflow.com/questions/250890/using-qsortfilterproxymodel-with-a-tree-model
class QRecursiveFilterProxyModel(QtCore.QSortFilterProxyModel):

View File

@ -72,8 +72,8 @@ def subkernel(arg=None, destination=0, flags={}):
Subkernels behave similarly to kernels, with few key differences:
- they are started from main kernels,
- they do not support RPCs, or running subsequent subkernels on other devices,
- but they can call other kernels or subkernels with the same destination.
- they do not support RPCs,
- but they can call other kernels or subkernels.
Subkernels can accept arguments and return values. However, they must be fully
annotated with ARTIQ types.

View File

@ -1,5 +1,7 @@
from collections import OrderedDict
from inspect import isclass
from contextlib import contextmanager
from types import SimpleNamespace
from sipyco import pyon
@ -10,7 +12,8 @@ from artiq.language.core import rpc
__all__ = ["NoDefault", "DefaultMissing",
"PYONValue", "BooleanValue", "EnumerationValue",
"NumberValue", "StringValue",
"HasEnvironment", "Experiment", "EnvExperiment"]
"HasEnvironment", "Experiment", "EnvExperiment",
"CancelledArgsError"]
class NoDefault:
@ -24,6 +27,12 @@ class DefaultMissing(Exception):
pass
class CancelledArgsError(Exception):
"""Raised by the ``interactive`` context manager when an interactive
arguments request is cancelled."""
pass
class _SimpleArgProcessor:
def __init__(self, default=NoDefault):
# If default is a list, it means multiple defaults are specified, with
@ -80,9 +89,12 @@ class EnumerationValue(_SimpleArgProcessor):
:param choices: A list of string representing the possible values of the
argument.
:param quickstyle: Enables the choices to be displayed in the GUI as a
list of buttons that submit the experiment when clicked.
"""
def __init__(self, choices, default=NoDefault):
def __init__(self, choices, default=NoDefault, quickstyle=False):
self.choices = choices
self.quickstyle = quickstyle
super().__init__(default)
def process(self, x):
@ -93,6 +105,7 @@ class EnumerationValue(_SimpleArgProcessor):
def describe(self):
d = _SimpleArgProcessor.describe(self)
d["choices"] = self.choices
d["quickstyle"] = self.quickstyle
return d
@ -212,6 +225,9 @@ class TraceArgumentManager:
self.requested_args[key] = processor, group, tooltip
return None
def get_interactive(self, interactive_arglist, title):
raise NotImplementedError
class ProcessArgumentManager:
def __init__(self, unprocessed_arguments):
@ -233,6 +249,10 @@ class ProcessArgumentManager:
raise AttributeError("Supplied argument(s) not queried in experiment: " +
", ".join(unprocessed))
def get_interactive(self, interactive_arglist, title):
raise NotImplementedError
class HasEnvironment:
"""Provides methods to manage the environment of an experiment (arguments,
devices, datasets)."""
@ -322,6 +342,33 @@ class HasEnvironment:
kernel_invariants = getattr(self, "kernel_invariants", set())
self.kernel_invariants = kernel_invariants | {key}
@contextmanager
def interactive(self, title=""):
"""Request arguments from the user interactively.
This context manager returns a namespace object on which the method
`setattr_argument` should be called, with the usual semantics.
When the context manager terminates, the experiment is blocked
and the user is presented with the requested argument widgets.
After the user enters values, the experiment is resumed and
the namespace contains the values of the arguments.
If the interactive arguments request is cancelled, raises
``CancelledArgsError``."""
interactive_arglist = []
namespace = SimpleNamespace()
def setattr_argument(key, processor=None, group=None, tooltip=None):
interactive_arglist.append((key, processor, group, tooltip))
namespace.setattr_argument = setattr_argument
yield namespace
del namespace.setattr_argument
argdict = self.__argument_mgr.get_interactive(interactive_arglist, title)
if argdict is None:
raise CancelledArgsError
for key, value in argdict.items():
setattr(namespace, key, value)
def get_device_db(self):
"""Returns the full contents of the device database."""
return self.__device_mgr.get_device_db()

View File

@ -2,7 +2,8 @@ import asyncio
import lmdb
from sipyco.sync_struct import Notifier, process_mod, ModAction, update_from_dict
from sipyco.sync_struct import (Notifier, process_mod, ModAction,
update_from_dict)
from sipyco import pyon
from sipyco.asyncio_tools import TaskObject
@ -60,7 +61,8 @@ class DatasetDB(TaskObject):
def save(self):
with self.lmdb.begin(write=True) as txn:
for key in self.pending_keys:
if key not in self.data.raw_view or not self.data.raw_view[key][0]:
if (key not in self.data.raw_view
or not self.data.raw_view[key][0]):
txn.delete(key.encode())
else:
value_and_metadata = (self.data.raw_view[key][1],
@ -87,7 +89,8 @@ class DatasetDB(TaskObject):
if mod["path"]:
key = mod["path"][0]
else:
assert(mod["action"] == ModAction.setitem.value or mod["action"] == ModAction.delitem.value)
assert (mod["action"] == ModAction.setitem.value
or mod["action"] == ModAction.delitem.value)
key = mod["key"]
self.pending_keys.add(key)
process_mod(self.data, mod)
@ -111,3 +114,34 @@ class DatasetDB(TaskObject):
del self.data[key]
self.pending_keys.add(key)
#
class InteractiveArgDB:
def __init__(self):
self.pending = Notifier(dict())
self.futures = dict()
async def get(self, rid, arglist_desc, title):
self.pending[rid] = {"title": title, "arglist_desc": arglist_desc}
self.futures[rid] = asyncio.get_running_loop().create_future()
try:
value = await self.futures[rid]
finally:
del self.pending[rid]
del self.futures[rid]
return value
def supply(self, rid, values):
# quick sanity checks
if rid not in self.futures or self.futures[rid].done():
raise ValueError("no experiment with this RID is "
"waiting for interactive arguments")
if {i[0] for i in self.pending.raw_view[rid]["arglist_desc"]} != set(values.keys()):
raise ValueError("supplied and requested keys do not match")
self.futures[rid].set_result(values)
def cancel(self, rid):
if rid not in self.futures or self.futures[rid].done():
raise ValueError("no experiment with this RID is "
"waiting for interactive arguments")
self.futures[rid].set_result(None)

View File

@ -111,7 +111,7 @@ class ExperimentDB:
try:
if new_cur_rev is None:
new_cur_rev = self.repo_backend.get_head_rev()
wd, _ = self.repo_backend.request_rev(new_cur_rev)
wd, _, _ = self.repo_backend.request_rev(new_cur_rev)
self.repo_backend.release_rev(self.cur_rev)
self.cur_rev = new_cur_rev
self.status["cur_rev"] = new_cur_rev
@ -132,7 +132,7 @@ class ExperimentDB:
if use_repository:
if revision is None:
revision = self.cur_rev
wd, _ = self.repo_backend.request_rev(revision)
wd, _, revision = self.repo_backend.request_rev(revision)
filename = os.path.join(wd, filename)
worker = Worker(self.worker_handlers)
try:
@ -169,7 +169,7 @@ class FilesystemBackend:
return "N/A"
def request_rev(self, rev):
return self.root, None
return self.root, None, "N/A"
def release_rev(self, rev):
pass
@ -200,14 +200,26 @@ class GitBackend:
def get_head_rev(self):
return str(self.git.head.target)
def _get_pinned_rev(self, rev):
"""
Resolve a git reference (e.g. "HEAD", "master", "abcdef123456...") into
a git hash
"""
commit, _ = self.git.resolve_refish(rev)
logger.debug('Resolved git ref "%s" into "%s"', rev, commit.hex)
return commit.hex
def request_rev(self, rev):
rev = self._get_pinned_rev(rev)
if rev in self.checkouts:
co = self.checkouts[rev]
co.ref_count += 1
else:
co = _GitCheckout(self.git, rev)
self.checkouts[rev] = co
return co.path, co.message
return co.path, co.message, rev
def release_rev(self, rev):
co = self.checkouts[rev]

View File

@ -132,15 +132,23 @@ class RunPool:
writer.writerow([rid, start_time, expid["file"]])
def submit(self, expid, priority, due_date, flush, pipeline_name):
"""
Submits an experiment to be run by this pool
If expid has the attribute `repo_rev`, treat it as a git revision or
reference and resolve into a unique git hash before submission
"""
# mutates expid to insert head repository revision if None and
# replaces relative path with the absolute one.
# called through scheduler.
rid = self.ridc.get()
if "repo_rev" in expid:
if expid["repo_rev"] is None:
expid["repo_rev"] = self.experiment_db.cur_rev
wd, repo_msg = self.experiment_db.repo_backend.request_rev(
expid["repo_rev"])
repo_rev_or_ref = expid["repo_rev"] or self.experiment_db.cur_rev
wd, repo_msg, repo_rev = self.experiment_db.repo_backend.request_rev(repo_rev_or_ref)
# Mutate expid's repo_rev to that returned from request_rev, in case
# a branch was passed instead of a hash
expid["repo_rev"] = repo_rev
else:
if "file" in expid:
expid["file"] = os.path.abspath(expid["file"])
@ -231,7 +239,7 @@ class PrepareStage(TaskObject):
try:
await run.build()
await run.prepare()
except:
except Exception:
logger.error("got worker exception in prepare stage, "
"deleting RID %d", run.rid)
log_worker_exception()
@ -281,7 +289,7 @@ class RunStage(TaskObject):
else:
run.status = RunStatus.running
completed = await run.run()
except:
except Exception:
logger.error("got worker exception in run stage, "
"deleting RID %d", run.rid)
log_worker_exception()
@ -318,7 +326,7 @@ class AnalyzeStage(TaskObject):
run.status = RunStatus.analyzing
try:
await run.analyze()
except:
except Exception:
logger.error("got worker exception in analyze stage of RID %d.",
run.rid)
log_worker_exception()
@ -506,4 +514,3 @@ class Scheduler:
if run.termination_requested:
return True
return False

View File

@ -226,9 +226,15 @@ class Worker:
else:
func = self.handlers[action]
try:
data = func(*obj["args"], **obj["kwargs"])
if getattr(func, "_worker_pass_rid", False):
args = [self.rid] + list(obj["args"])
else:
args = obj["args"]
data = func(*args, **obj["kwargs"])
if asyncio.iscoroutine(data):
data = await data
reply = {"status": "ok", "data": data}
except:
except Exception:
reply = {
"status": "failed",
"exception": current_exc_packed()

View File

@ -28,10 +28,10 @@ from artiq.master.worker_db import DeviceManager, DatasetManager, DummyDevice
from artiq.language.environment import (
is_public_experiment, TraceArgumentManager, ProcessArgumentManager
)
from artiq.language.core import set_watchdog_factory, TerminationRequested
from artiq.language.core import host_only, set_watchdog_factory, TerminationRequested
from artiq.language.types import TBool
from artiq.compiler import import_cache
from artiq.coredevice.core import CompileError, host_only, _render_diagnostic
from artiq.coredevice.core import CompileError, _render_diagnostic
from artiq import __version__ as artiq_version
@ -200,9 +200,7 @@ def examine(device_mgr, dataset_mgr, file):
name = name[:-1]
argument_mgr = TraceArgumentManager()
scheduler_defaults = {}
cls = exp_class( # noqa: F841 (fill argument_mgr)
(device_mgr, dataset_mgr, argument_mgr, scheduler_defaults)
)
exp_class((device_mgr, dataset_mgr, argument_mgr, scheduler_defaults))
arginfo = OrderedDict(
(k, (proc.describe(), group, tooltip))
for k, (proc, group, tooltip) in argument_mgr.requested_args.items()
@ -217,6 +215,19 @@ def examine(device_mgr, dataset_mgr, file):
del sys.modules[key]
class ArgumentManager(ProcessArgumentManager):
_get_interactive = make_parent_action("get_interactive_arguments")
def get_interactive(self, interactive_arglist, title):
arglist_desc = [(k, p.describe(), g, t)
for k, p, g, t in interactive_arglist]
arguments = ArgumentManager._get_interactive(arglist_desc, title)
if arguments is not None:
for key, processor, _, _ in interactive_arglist:
arguments[key] = processor.process(arguments[key])
return arguments
def setup_diagnostics(experiment_file, repository_path):
def render_diagnostic(self, diagnostic):
message = "While compiling {}\n".format(experiment_file) + \
@ -329,7 +340,7 @@ def main():
time.strftime("%H", start_local_time))
os.makedirs(dirname, exist_ok=True)
os.chdir(dirname)
argument_mgr = ProcessArgumentManager(expid["arguments"])
argument_mgr = ArgumentManager(expid["arguments"])
exp_inst = exp((device_mgr, dataset_mgr, argument_mgr, {}))
argument_mgr.check_unprocessed_arguments()
put_completed()

View File

@ -0,0 +1,22 @@
# RUN: env ARTIQ_DUMP_LLVM=%t %python -m artiq.compiler.testbench.embedding +compile %s
# RUN: OutputCheck %s --file-to-check=%t.ll
from artiq.language.core import *
from artiq.language.types import *
@kernel
def entrypoint():
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
message_pass()
# CHECK-NOT: call void @subkernel_send_message\(i32 ., i1 false, i8 1, i8 1, .*\), !dbg !.
# CHECK: call i8 @subkernel_await_message\(i32 2, i64 -1, { i8\*, i32 }\* nonnull .*, i8 1, i8 1\), !dbg !.
subkernel_recv("message", TInt32)
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare i8 @subkernel_await_message(i32, i64, { i8*, i32 }*, i8, i8) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_await_finish(i32, i64) local_unnamed_addr
@subkernel(destination=1)
def message_pass() -> TNone:
subkernel_send(0, "message", 15)

View File

@ -0,0 +1,20 @@
# RUN: env ARTIQ_DUMP_LLVM=%t %python -m artiq.compiler.testbench.embedding +compile %s
# RUN: OutputCheck %s --file-to-check=%t.ll
from artiq.language.core import *
from artiq.language.types import *
@kernel
def entrypoint():
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
message_pass()
# CHECK: call void @subkernel_send_message\(i32 2, i1 false, i8 1, i8 1, .*\), !dbg !.
# CHECK-NOT: call i8 @subkernel_await_message\(i32 1, i64 10000, { i8\*, i32 }\* nonnull .*, i8 1, i8 1\), !dbg !.
subkernel_send(1, "message", 15)
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
@subkernel(destination=1)
def message_pass() -> TNone:
subkernel_recv("message", TInt32)

View File

@ -6,13 +6,13 @@ from artiq.language.types import *
@kernel
def entrypoint():
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
no_arg()
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, { i8*, i32 }*, i8**) local_unnamed_addr
@subkernel(destination=1)
def no_arg() -> TStr:
pass

View File

@ -6,15 +6,15 @@ from artiq.language.types import *
@kernel
def entrypoint():
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
returning()
# CHECK: call i8 @subkernel_await_message\(i32 1, i64 10000, { i8\*, i32 }\* nonnull .*, i8 1, i8 1\), !dbg !.
# CHECK: call void @subkernel_await_finish\(i32 1, i64 10000\), !dbg !.
# CHECK: call i8 @subkernel_await_message\(i32 1, i64 -1, { i8\*, i32 }\* nonnull .*, i8 1, i8 1\), !dbg !.
# CHECK: call void @subkernel_await_finish\(i32 1, i64 -1\), !dbg !.
subkernel_await(returning)
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare i8 @subkernel_await_message(i32, i64, { i8*, i32 }*, i8, i8) local_unnamed_addr
# CHECK-L: declare void @subkernel_await_finish(i32, i64) local_unnamed_addr
@subkernel(destination=1)

View File

@ -6,15 +6,15 @@ from artiq.language.types import *
@kernel
def entrypoint():
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
returning_none()
# CHECK: call void @subkernel_await_finish\(i32 1, i64 10000\), !dbg !.
# CHECK-NOT: call i8 @subkernel_await_message\(i32 1, i64 10000\, .*\), !dbg !.
# CHECK: call void @subkernel_await_finish\(i32 1, i64 -1\), !dbg !.
# CHECK-NOT: call i8 @subkernel_await_message\(i32 1, i64 -1\, .*\), !dbg !.
subkernel_await(returning_none)
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare void @subkernel_await_finish(i32, i64) local_unnamed_addr
# CHECK-NOT-L: declare i8 @subkernel_await_message(i32, i64, { i8*, i32 }*, i8, i8) local_unnamed_addr
@subkernel(destination=1)

View File

@ -11,7 +11,7 @@ class A:
@kernel
def kernel_entrypoint(self):
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
# CHECK-NOT: call void @subkernel_send_message\(.*\), !dbg !.
self.sk()
@ -21,5 +21,5 @@ a = A()
def entrypoint():
a.kernel_entrypoint()
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-NOT-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr

View File

@ -11,8 +11,8 @@ class A:
@kernel
def kernel_entrypoint(self):
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_send_message\(i32 1, i8 1, .*\), !dbg !.
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_send_message\(i32 1, i1 false, i8 1, i8 1, .*\), !dbg !.
self.sk(1)
a = A()
@ -21,5 +21,5 @@ a = A()
def entrypoint():
a.kernel_entrypoint()
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
# CHECK-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr

View File

@ -6,13 +6,13 @@ from artiq.language.types import *
@kernel
def entrypoint():
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_send_message\(i32 ., i8 1, .*\), !dbg !.
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_send_message\(i32 ., i1 false, i8 1, i8 1, .*\), !dbg !.
accept_arg(1)
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
# CHECK-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
@subkernel(destination=1)
def accept_arg(arg: TInt32) -> TNone:
pass

View File

@ -6,16 +6,16 @@ from artiq.language.types import *
@kernel
def entrypoint():
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_send_message\(i32 ., i8 1, .*\), !dbg !.
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_send_message\(i32 ., i1 false, i8 1, i8 1, .*\), !dbg !.
accept_arg(1)
# CHECK: call void @subkernel_load_run\(i32 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_send_message\(i32 ., i8 2, .*\), !dbg !.
# CHECK: call void @subkernel_load_run\(i32 1, i8 1, i1 true\), !dbg !.
# CHECK: call void @subkernel_send_message\(i32 ., i1 false, i8 1, i8 2, .*\), !dbg !.
accept_arg(1, 2)
# CHECK-L: declare void @subkernel_load_run(i32, i1) local_unnamed_addr
# CHECK-L: declare void @subkernel_send_message(i32, i8, { i8*, i32 }*, i8**) local_unnamed_addr
# CHECK-L: declare void @subkernel_load_run(i32, i8, i1) local_unnamed_addr
# CHECK-L: declare void @subkernel_send_message(i32, i1, i8, i8, { i8*, i32 }*, i8**) local_unnamed_addr
@subkernel(destination=1)
def accept_arg(arg_a, arg_b=5) -> TNone:
pass

View File

@ -9,7 +9,7 @@ class TestFrontends(unittest.TestCase):
"""Test --help as a simple smoke test against catastrophic breakage."""
commands = {
"aqctl": [
"corelog", "moninj_proxy"
"corelog", "moninj_proxy", "coreanalyzer_proxy"
],
"artiq": [
"client", "compile", "coreanalyzer", "coremgmt",

View File

@ -34,7 +34,9 @@ mock_modules = ["artiq.gui.waitingspinnerwidget",
"artiq.gui.models",
"artiq.compiler.module",
"artiq.compiler.embedding",
"qasync", "pyqtgraph", "matplotlib",
"artiq.dashboard.waveform",
"artiq.dashboard.interactive_args",
"qasync", "pyqtgraph", "matplotlib", "lmdb",
"numpy", "dateutil", "dateutil.parser", "prettytable", "PyQt5",
"h5py", "serial", "scipy", "scipy.interpolate",
"llvmlite", "Levenshtein", "pythonparser",
@ -95,7 +97,7 @@ master_doc = 'index'
# General information about the project.
project = 'ARTIQ'
copyright = '2014-2023, M-Labs Limited'
copyright = '2014-2024, M-Labs Limited'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the

View File

@ -275,7 +275,7 @@ Subkernels refer to kernels running on a satellite device. This allows you to of
Subkernels behave in most part as regular kernels, they accept arguments and can return values. However, there are few caveats:
- they do not support RPCs or calling subsequent subkernels on other devices,
- they do not support RPCs,
- they do not support DRTIO,
- their return value must be fully annotated with an ARTIQ type,
- their arguments should be annotated, and only basic ARTIQ types are supported,
@ -304,13 +304,13 @@ For example, a subkernel performing integer addition: ::
result = subkernel_await(subkernel_add)
assert result == 4
Sometimes the subkernel execution may take more time - and the await has a default timeout of 10000 milliseconds (10 seconds). It can be adjusted, as ``subkernel_await()`` accepts an optional timeout argument.
Sometimes the subkernel execution may take more time. By default, the await function will wait forever. However, if timeout is needed it can be set, as ``subkernel_await()`` accepts an optional argument. The value is interpreted in milliseconds and if it is negative, timeout is disabled.
Subkernels are compiled after the main kernel, and then immediately uploaded to satellites. When called, master instructs the appropriate satellite to load the subkernel into their kernel core and to run it. If the subkernel is complex, and its binary relatively big, the delay between the call and actually running the subkernel may be substantial; if that delay has to be minimized, ``subkernel_preload(function)`` should be used before the call.
While ``self`` is accepted as an argument for subkernels, it is embedded into the compiled data. Any changes made by the main kernel or other subkernels, will not be available.
Subkernels can call other kernels and subkernels, if they're within the same destination. For a more complex example: ::
Subkernels can call other kernels and subkernels. For a more complex example: ::
from artiq.experiment import *
@ -347,3 +347,34 @@ In general, subkernels do not have to be awaited, but awaiting is required to re
.. note::
When a subkernel is running, regardless of devices used by it, RTIO devices on that satellite are not available to the master. Control is returned to master after the subkernel finishes - to be sure that you can use the device, the subkernel should be awaited before any RTIO operations on the affected satellite are performed.
Message passing
^^^^^^^^^^^^^^^
Subkernels besides arguments and returns, can also pass messages between each other or the master with built-in ``subkernel_send()`` and ``subkernel_recv()`` functions. This can be used for communication between subkernels, passing additional data, or partially computed data. Consider the following example: ::
from artiq.experiment import *
@subkernel(destination=1)
def simple_message() -> TInt32:
data = subkernel_recv("message", TInt32)
return data + 20
class MessagePassing(EnvExperiment):
def build(self):
self.setattr_device("core")
@kernel
def run(self):
simple_self()
subkernel_send(1, "message", 150)
result = subkernel_await(simple_self)
assert result == 170
The ``subkernel_send(destination, name, value)`` function requires three arguments: destination, name of the message that will be linked with the ``subkernel_recv()``, and the passed value.
The ``subkernel_recv(name, type, [timeout])`` function requires two arguments: message name (matching the name provided in ``subkernel_send``) and expected type. Optionally, it accepts a third argument - timeout for the operation in milliseconds. If the value is negative, timeout is disabled. The default value is no timeout.
The "name" argument in both ``send`` and ``recv`` functions acts as a link, and must match exactly between the two for a successful message transaction. The type of the value sent by ``subkernel_send`` is checked against the type declared in ``subkernel_recv`` with the same name, to avoid misinterpretation of the data. The compiler also checks if all subkernel message names have both a sending and receiving functions to help with typos. However, it cannot help if wrong names are used - the receiver will wait only for a matching message for the duration of the timeout.
A message can be received only when a subkernel is running, and is put into a buffer to be taken when required - thus whatever sending order will not cause a deadlock. However, a subkernel may timeout or wait forever, if destination or names do not match (e.g. message sent to wrong destination, or under different than expected name even if types match).

View File

@ -27,4 +27,4 @@ Website: https://m-labs.hk/artiq
`Cite ARTIQ <http://dx.doi.org/10.5281/zenodo.51303>`_ as ``Bourdeauducq, Sébastien et al. (2016). ARTIQ 1.0. Zenodo. 10.5281/zenodo.51303``.
Copyright (C) 2014-2023 M-Labs Limited. Licensed under GNU LGPL version 3+.
Copyright (C) 2014-2024 M-Labs Limited. Licensed under GNU LGPL version 3+.

View File

@ -157,11 +157,10 @@ Embedded applets should use `AppletRequestIPC` while standalone applets use `App
Applet entry area
*****************
Extensions are provided to enable the use of argument widgets in applets through the `EntryArea` class.
Argument widgets can be used in applets through the `EntryArea` class.
Below is a simple example code snippet using the `EntryArea` class: ::
# Create the experiment area
entry_area = EntryArea()
# Create a new widget

30
flake.lock generated
View File

@ -11,11 +11,11 @@
]
},
"locked": {
"lastModified": 1701573753,
"narHash": "sha256-vhEtXjb9AM6/HnsgfVmhJQeqQ9JqysUm7iWNzTIbexs=",
"lastModified": 1707216368,
"narHash": "sha256-ZXoqzG2QsVsybALLYXs473avXcyKSZNh2kIgcPo60XQ=",
"owner": "m-labs",
"repo": "artiq-comtools",
"rev": "199bdabf4de49cb7ada8a4ac7133008e0f8434b7",
"rev": "e5d0204490bccc07ef9141b0d7c405ab01cb8273",
"type": "github"
},
"original": {
@ -45,11 +45,11 @@
"mozilla-overlay": {
"flake": false,
"locked": {
"lastModified": 1695805681,
"narHash": "sha256-1ElPLD8eFfnuIk0G52HGGpRtQZ4QPCjChRlEOfkZ5ro=",
"lastModified": 1704373101,
"narHash": "sha256-+gi59LRWRQmwROrmE1E2b3mtocwueCQqZ60CwLG+gbg=",
"owner": "mozilla",
"repo": "nixpkgs-mozilla",
"rev": "6eabade97bc28d707a8b9d82ad13ef143836736e",
"rev": "9b11a87c0cc54e308fa83aac5b4ee1816d5418a2",
"type": "github"
},
"original": {
@ -60,11 +60,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1702346276,
"narHash": "sha256-eAQgwIWApFQ40ipeOjVSoK4TEHVd6nbSd9fApiHIw5A=",
"lastModified": 1711668574,
"narHash": "sha256-u1dfs0ASQIEr1icTVrsKwg2xToIpn7ZXxW3RHfHxshg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "cf28ee258fd5f9a52de6b9865cdb93a1f96d09b7",
"rev": "219951b495fc2eac67b1456824cc1ec1fd2ee659",
"type": "github"
},
"original": {
@ -92,11 +92,11 @@
]
},
"locked": {
"lastModified": 1701572254,
"narHash": "sha256-ixq8dlpyOytDr+d/OmW8v1Ioy9V2G2ibOlNj8GFDSq4=",
"lastModified": 1708937641,
"narHash": "sha256-Hkb9VYFzFgkYxfbh4kYcDSn7DbMUYehoQDeTALrxo2Q=",
"owner": "m-labs",
"repo": "sipyco",
"rev": "cceac0df537887135f99aa6b1bdd82853f16b4d6",
"rev": "4a28b311ce0069454b4e8fe1e6049db11b9f1296",
"type": "github"
},
"original": {
@ -108,11 +108,11 @@
"src-migen": {
"flake": false,
"locked": {
"lastModified": 1699335478,
"narHash": "sha256-BsubN4Mfdj02QPK6ZCrl+YOaSg7DaLQdSCVP49ztWik=",
"lastModified": 1702942348,
"narHash": "sha256-gKIfHZxsv+jcgDFRW9mPqmwqbZXuRvXefkZcSFjOGHw=",
"owner": "m-labs",
"repo": "migen",
"rev": "fd0bf5855a1367eab14b0d6f7f8266178e25d78e",
"rev": "50934ad10a87ade47219b796535978b9bdf24023",
"type": "github"
},
"original": {

View File

@ -92,22 +92,6 @@
disabledTestPaths = [ "tests/test_qeventloop.py" ];
};
outputcheck = pkgs.python3Packages.buildPythonApplication rec {
pname = "outputcheck";
version = "0.4.2";
src = pkgs.fetchFromGitHub {
owner = "stp";
repo = "OutputCheck";
rev = "e0f533d3c5af2949349856c711bf4bca50022b48";
sha256 = "1y27vz6jq6sywas07kz3v01sqjd0sga9yv9w2cksqac3v7wmf2a0";
};
prePatch = "echo ${version} > RELEASE-VERSION";
postPatch = ''
substituteInPlace OutputCheck/Driver.py \
--replace "argparse.FileType('rU')" "argparse.FileType('r')"
'';
};
libartiq-support = pkgs.stdenv.mkDerivation {
name = "libartiq-support";
src = self;
@ -187,7 +171,7 @@
# FIXME: automatically propagate lld_14 llvm_14 dependencies
# cacert is required in the check stage only, as certificates are to be
# obtained from system elsewhere
nativeCheckInputs = [ pkgs.lld_14 pkgs.llvm_14 libartiq-support pkgs.lit outputcheck pkgs.cacert ];
nativeCheckInputs = with pkgs; [ lld_14 llvm_14 lit outputcheck cacert ] ++ [ libartiq-support ];
checkPhase = ''
python -m unittest discover -v artiq.test
@ -351,17 +335,6 @@
paths = [ openocd-fixed bscan_spi_bitstreams-pkg ];
};
sphinxcontrib-wavedrom = pkgs.python3Packages.buildPythonPackage rec {
pname = "sphinxcontrib-wavedrom";
version = "3.0.4";
format = "pyproject";
src = pkgs.python3Packages.fetchPypi {
inherit pname version;
sha256 = "sha256-0zTHVBr9kXwMEo4VRTFsxdX2HI31DxdHfLUHCQmw1Ko=";
};
nativeBuildInputs = [ pkgs.python3Packages.setuptools-scm ];
propagatedBuildInputs = (with pkgs.python3Packages; [ wavedrom sphinx xcffib cairosvg ]);
};
latex-artiq-manual = pkgs.texlive.combine {
inherit (pkgs.texlive)
scheme-basic latexmk cmap collection-fontsrecommended fncychap
@ -395,14 +368,14 @@
target = "efc";
variant = "shuttler";
};
inherit sphinxcontrib-wavedrom latex-artiq-manual;
inherit latex-artiq-manual;
artiq-manual-html = pkgs.stdenvNoCC.mkDerivation rec {
name = "artiq-manual-html-${version}";
version = artiqVersion;
src = self;
buildInputs = [
pkgs.python3Packages.sphinx pkgs.python3Packages.sphinx_rtd_theme
pkgs.python3Packages.sphinx-argparse sphinxcontrib-wavedrom
buildInputs = with pkgs.python3Packages; [
sphinx sphinx_rtd_theme
sphinx-argparse sphinxcontrib-wavedrom
];
buildPhase = ''
export VERSIONEER_OVERRIDE=${artiqVersion}
@ -420,11 +393,10 @@
name = "artiq-manual-pdf-${version}";
version = artiqVersion;
src = self;
buildInputs = [
pkgs.python3Packages.sphinx pkgs.python3Packages.sphinx_rtd_theme
pkgs.python3Packages.sphinx-argparse sphinxcontrib-wavedrom
latex-artiq-manual
];
buildInputs = with pkgs.python3Packages; [
sphinx sphinx_rtd_theme
sphinx-argparse sphinxcontrib-wavedrom
] ++ [ latex-artiq-manual ];
buildPhase = ''
export VERSIONEER_OVERRIDE=${artiq.version}
export SOURCE_DATE_EPOCH=${builtins.toString self.sourceInfo.lastModified}
@ -440,7 +412,7 @@
};
};
inherit makeArtiqBoardPackage;
inherit makeArtiqBoardPackage openocd-bscanspi-f;
defaultPackage.x86_64-linux = pkgs.python3.withPackages(ps: [ packages.x86_64-linux.artiq ]);
@ -461,14 +433,14 @@
artiq-frontend-dev-wrappers
# To manually run compiler tests:
pkgs.lit
outputcheck
pkgs.outputcheck
libartiq-support
# use the vivado-env command to enter a FHS shell that lets you run the Vivado installer
packages.x86_64-linux.vivadoEnv
packages.x86_64-linux.vivado
packages.x86_64-linux.openocd-bscanspi
pkgs.python3Packages.sphinx pkgs.python3Packages.sphinx_rtd_theme
pkgs.python3Packages.sphinx-argparse sphinxcontrib-wavedrom latex-artiq-manual
pkgs.python3Packages.sphinx-argparse pkgs.python3Packages.sphinxcontrib-wavedrom latex-artiq-manual
];
shellHook = ''
export LIBARTIQ_SUPPORT=`libartiq-support`