mirror of https://github.com/m-labs/artiq.git
Merge branch 'master' into nac3
This commit is contained in:
commit
511f125c08
|
@ -27,6 +27,7 @@ Highlights:
|
||||||
* Full Python 3.10 support.
|
* Full Python 3.10 support.
|
||||||
* Distributed DMA is now supported, allowing DMA to be run directly on satellites for corresponding
|
* Distributed DMA is now supported, allowing DMA to be run directly on satellites for corresponding
|
||||||
RTIO events, increasing bandwidth in scenarios with heavy satellite usage.
|
RTIO events, increasing bandwidth in scenarios with heavy satellite usage.
|
||||||
|
* API extensions have been implemented, enabling applets to directly modify datasets.
|
||||||
* Persistent datasets are now stored in a LMDB database for improved performance. PYON databases can
|
* Persistent datasets are now stored in a LMDB database for improved performance. PYON databases can
|
||||||
be converted with the script below.
|
be converted with the script below.
|
||||||
|
|
||||||
|
@ -39,9 +40,34 @@ Highlights:
|
||||||
new = lmdb.open("dataset_db.mdb", subdir=False, map_size=2**30)
|
new = lmdb.open("dataset_db.mdb", subdir=False, map_size=2**30)
|
||||||
with new.begin(write=True) as txn:
|
with new.begin(write=True) as txn:
|
||||||
for key, value in old.items():
|
for key, value in old.items():
|
||||||
txn.put(key.encode(), pyon.encode(value).encode())
|
txn.put(key.encode(), pyon.encode((value, {})).encode())
|
||||||
new.close()
|
new.close()
|
||||||
|
|
||||||
|
Breaking changes:
|
||||||
|
|
||||||
|
* ``SimpleApplet`` now calls widget constructors with an additional ``ctl`` parameter for control
|
||||||
|
operations, which includes dataset operations. It can be ignored if not needed. For an example usage,
|
||||||
|
refer to the ``big_number.py`` applet.
|
||||||
|
* ``SimpleApplet`` and ``TitleApplet`` now call ``data_changed`` with additional parameters. Wrapped widgets
|
||||||
|
should refactor the function signature as seen below:
|
||||||
|
::
|
||||||
|
|
||||||
|
# SimpleApplet
|
||||||
|
def data_changed(self, value, metadata, persist, mods)
|
||||||
|
# SimpleApplet (old version)
|
||||||
|
def data_changed(self, data, mods)
|
||||||
|
# TitleApplet
|
||||||
|
def data_changed(self, value, metadata, persist, mods, title)
|
||||||
|
# TitleApplet (old version)
|
||||||
|
def data_changed(self, data, mods, title)
|
||||||
|
|
||||||
|
Old syntax should be replaced with the form shown on the right.
|
||||||
|
::
|
||||||
|
|
||||||
|
data[key][0] ==> persist[key]
|
||||||
|
data[key][1] ==> value[key]
|
||||||
|
data[key][2] ==> metadata[key]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
ARTIQ-7
|
ARTIQ-7
|
||||||
|
|
|
@ -1,22 +1,72 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
from PyQt5 import QtWidgets
|
from PyQt5 import QtWidgets, QtCore, QtGui
|
||||||
|
|
||||||
from artiq.applets.simple import SimpleApplet
|
from artiq.applets.simple import SimpleApplet
|
||||||
|
|
||||||
|
|
||||||
class NumberWidget(QtWidgets.QLCDNumber):
|
class QResponsiveLCDNumber(QtWidgets.QLCDNumber):
|
||||||
def __init__(self, args):
|
doubleClicked = QtCore.pyqtSignal()
|
||||||
QtWidgets.QLCDNumber.__init__(self)
|
|
||||||
self.setDigitCount(args.digit_count)
|
|
||||||
self.dataset_name = args.dataset
|
|
||||||
|
|
||||||
def data_changed(self, data, mods):
|
def mouseDoubleClickEvent(self, event):
|
||||||
|
self.doubleClicked.emit()
|
||||||
|
|
||||||
|
|
||||||
|
class QCancellableLineEdit(QtWidgets.QLineEdit):
|
||||||
|
editCancelled = QtCore.pyqtSignal()
|
||||||
|
|
||||||
|
def keyPressEvent(self, event):
|
||||||
|
if event.key() == QtCore.Qt.Key_Escape:
|
||||||
|
self.editCancelled.emit()
|
||||||
|
else:
|
||||||
|
super().keyPressEvent(event)
|
||||||
|
|
||||||
|
|
||||||
|
class NumberWidget(QtWidgets.QStackedWidget):
|
||||||
|
def __init__(self, args, ctl):
|
||||||
|
QtWidgets.QStackedWidget.__init__(self)
|
||||||
|
self.dataset_name = args.dataset
|
||||||
|
self.ctl = ctl
|
||||||
|
|
||||||
|
self.lcd_widget = QResponsiveLCDNumber()
|
||||||
|
self.lcd_widget.setDigitCount(args.digit_count)
|
||||||
|
self.lcd_widget.doubleClicked.connect(self.start_edit)
|
||||||
|
self.addWidget(self.lcd_widget)
|
||||||
|
|
||||||
|
self.edit_widget = QCancellableLineEdit()
|
||||||
|
self.edit_widget.setValidator(QtGui.QDoubleValidator())
|
||||||
|
self.edit_widget.setAlignment(QtCore.Qt.AlignRight)
|
||||||
|
self.edit_widget.editCancelled.connect(self.cancel_edit)
|
||||||
|
self.edit_widget.returnPressed.connect(self.confirm_edit)
|
||||||
|
self.addWidget(self.edit_widget)
|
||||||
|
|
||||||
|
font = QtGui.QFont()
|
||||||
|
font.setPointSize(60)
|
||||||
|
self.edit_widget.setFont(font)
|
||||||
|
|
||||||
|
self.setCurrentWidget(self.lcd_widget)
|
||||||
|
|
||||||
|
def start_edit(self):
|
||||||
|
# QLCDNumber value property contains the value of zero
|
||||||
|
# if the displayed value is not a number.
|
||||||
|
self.edit_widget.setText(str(self.lcd_widget.value()))
|
||||||
|
self.edit_widget.selectAll()
|
||||||
|
self.edit_widget.setFocus()
|
||||||
|
self.setCurrentWidget(self.edit_widget)
|
||||||
|
|
||||||
|
def confirm_edit(self):
|
||||||
|
value = float(self.edit_widget.text())
|
||||||
|
self.ctl.set_dataset(self.dataset_name, value)
|
||||||
|
self.setCurrentWidget(self.lcd_widget)
|
||||||
|
|
||||||
|
def cancel_edit(self):
|
||||||
|
self.setCurrentWidget(self.lcd_widget)
|
||||||
|
|
||||||
|
def data_changed(self, value, metadata, persist, mods):
|
||||||
try:
|
try:
|
||||||
n = float(data[self.dataset_name][1])
|
n = float(value[self.dataset_name])
|
||||||
except (KeyError, ValueError, TypeError):
|
except (KeyError, ValueError, TypeError):
|
||||||
n = "---"
|
n = "---"
|
||||||
self.display(n)
|
self.lcd_widget.display(n)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -7,13 +7,13 @@ from artiq.applets.simple import SimpleApplet
|
||||||
|
|
||||||
|
|
||||||
class Image(pyqtgraph.ImageView):
|
class Image(pyqtgraph.ImageView):
|
||||||
def __init__(self, args):
|
def __init__(self, args, ctl):
|
||||||
pyqtgraph.ImageView.__init__(self)
|
pyqtgraph.ImageView.__init__(self)
|
||||||
self.args = args
|
self.args = args
|
||||||
|
|
||||||
def data_changed(self, data, mods):
|
def data_changed(self, value, metadata, persist, mods):
|
||||||
try:
|
try:
|
||||||
img = data[self.args.img][1]
|
img = value[self.args.img]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return
|
return
|
||||||
self.setImage(img)
|
self.setImage(img)
|
||||||
|
|
|
@ -8,20 +8,20 @@ from artiq.applets.simple import TitleApplet
|
||||||
|
|
||||||
|
|
||||||
class HistogramPlot(pyqtgraph.PlotWidget):
|
class HistogramPlot(pyqtgraph.PlotWidget):
|
||||||
def __init__(self, args):
|
def __init__(self, args, ctl):
|
||||||
pyqtgraph.PlotWidget.__init__(self)
|
pyqtgraph.PlotWidget.__init__(self)
|
||||||
self.args = args
|
self.args = args
|
||||||
self.timer = QTimer()
|
self.timer = QTimer()
|
||||||
self.timer.setSingleShot(True)
|
self.timer.setSingleShot(True)
|
||||||
self.timer.timeout.connect(self.length_warning)
|
self.timer.timeout.connect(self.length_warning)
|
||||||
|
|
||||||
def data_changed(self, data, mods, title):
|
def data_changed(self, value, metadata, persist, mods, title):
|
||||||
try:
|
try:
|
||||||
y = data[self.args.y][1]
|
y = value[self.args.y]
|
||||||
if self.args.x is None:
|
if self.args.x is None:
|
||||||
x = None
|
x = None
|
||||||
else:
|
else:
|
||||||
x = data[self.args.x][1]
|
x = value[self.args.x]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return
|
return
|
||||||
if x is None:
|
if x is None:
|
||||||
|
|
|
@ -9,7 +9,7 @@ from artiq.applets.simple import TitleApplet
|
||||||
|
|
||||||
|
|
||||||
class XYPlot(pyqtgraph.PlotWidget):
|
class XYPlot(pyqtgraph.PlotWidget):
|
||||||
def __init__(self, args):
|
def __init__(self, args, ctl):
|
||||||
pyqtgraph.PlotWidget.__init__(self)
|
pyqtgraph.PlotWidget.__init__(self)
|
||||||
self.args = args
|
self.args = args
|
||||||
self.timer = QTimer()
|
self.timer = QTimer()
|
||||||
|
@ -19,16 +19,16 @@ class XYPlot(pyqtgraph.PlotWidget):
|
||||||
'Error bars': False,
|
'Error bars': False,
|
||||||
'Fit values': False}
|
'Fit values': False}
|
||||||
|
|
||||||
def data_changed(self, data, mods, title):
|
def data_changed(self, value, metadata, persist, mods, title):
|
||||||
try:
|
try:
|
||||||
y = data[self.args.y][1]
|
y = value[self.args.y]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return
|
return
|
||||||
x = data.get(self.args.x, (False, None))[1]
|
x = value.get(self.args.x, (False, None))
|
||||||
if x is None:
|
if x is None:
|
||||||
x = np.arange(len(y))
|
x = np.arange(len(y))
|
||||||
error = data.get(self.args.error, (False, None))[1]
|
error = value.get(self.args.error, (False, None))
|
||||||
fit = data.get(self.args.fit, (False, None))[1]
|
fit = value.get(self.args.fit, (False, None))
|
||||||
|
|
||||||
if not len(y) or len(y) != len(x):
|
if not len(y) or len(y) != len(x):
|
||||||
self.mismatch['X values'] = True
|
self.mismatch['X values'] = True
|
||||||
|
|
|
@ -22,7 +22,7 @@ def _compute_ys(histogram_bins, histograms_counts):
|
||||||
# pyqtgraph.GraphicsWindow fails to behave like a regular Qt widget
|
# pyqtgraph.GraphicsWindow fails to behave like a regular Qt widget
|
||||||
# and breaks embedding. Do not use as top widget.
|
# and breaks embedding. Do not use as top widget.
|
||||||
class XYHistPlot(QtWidgets.QSplitter):
|
class XYHistPlot(QtWidgets.QSplitter):
|
||||||
def __init__(self, args):
|
def __init__(self, args, ctl):
|
||||||
QtWidgets.QSplitter.__init__(self)
|
QtWidgets.QSplitter.__init__(self)
|
||||||
self.resize(1000, 600)
|
self.resize(1000, 600)
|
||||||
self.setWindowTitle("XY/Histogram")
|
self.setWindowTitle("XY/Histogram")
|
||||||
|
@ -124,11 +124,11 @@ class XYHistPlot(QtWidgets.QSplitter):
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def data_changed(self, data, mods):
|
def data_changed(self, value, metadata, persist, mods):
|
||||||
try:
|
try:
|
||||||
xs = data[self.args.xs][1]
|
xs = value[self.args.xs]
|
||||||
histogram_bins = data[self.args.histogram_bins][1]
|
histogram_bins = value[self.args.histogram_bins]
|
||||||
histograms_counts = data[self.args.histograms_counts][1]
|
histograms_counts = value[self.args.histograms_counts]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return
|
return
|
||||||
if len(xs) != histograms_counts.shape[0]:
|
if len(xs) != histograms_counts.shape[0]:
|
||||||
|
|
|
@ -6,18 +6,18 @@ from artiq.applets.simple import SimpleApplet
|
||||||
|
|
||||||
|
|
||||||
class ProgressWidget(QtWidgets.QProgressBar):
|
class ProgressWidget(QtWidgets.QProgressBar):
|
||||||
def __init__(self, args):
|
def __init__(self, args, ctl):
|
||||||
QtWidgets.QProgressBar.__init__(self)
|
QtWidgets.QProgressBar.__init__(self)
|
||||||
self.setMinimum(args.min)
|
self.setMinimum(args.min)
|
||||||
self.setMaximum(args.max)
|
self.setMaximum(args.max)
|
||||||
self.dataset_value = args.value
|
self.dataset_value = args.value
|
||||||
|
|
||||||
def data_changed(self, data, mods):
|
def data_changed(self, value, metadata, persist, mods):
|
||||||
try:
|
try:
|
||||||
value = round(data[self.dataset_value][1])
|
val = round(value[self.dataset_value])
|
||||||
except (KeyError, ValueError, TypeError):
|
except (KeyError, ValueError, TypeError):
|
||||||
value = 0
|
val = 0
|
||||||
self.setValue(value)
|
self.setValue(val)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@ import string
|
||||||
from qasync import QEventLoop, QtWidgets, QtCore
|
from qasync import QEventLoop, QtWidgets, QtCore
|
||||||
|
|
||||||
from sipyco.sync_struct import Subscriber, process_mod
|
from sipyco.sync_struct import Subscriber, process_mod
|
||||||
|
from sipyco.pc_rpc import AsyncioClient as RPCClient
|
||||||
from sipyco import pyon
|
from sipyco import pyon
|
||||||
from sipyco.pipe_ipc import AsyncioChildComm
|
from sipyco.pipe_ipc import AsyncioChildComm
|
||||||
|
|
||||||
|
@ -14,6 +15,59 @@ from sipyco.pipe_ipc import AsyncioChildComm
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AppletControlIPC:
|
||||||
|
def __init__(self, ipc):
|
||||||
|
self.ipc = ipc
|
||||||
|
|
||||||
|
def set_dataset(self, key, value, unit=None, scale=None, precision=None, persist=None):
|
||||||
|
metadata = {}
|
||||||
|
if unit is not None:
|
||||||
|
metadata["unit"] = unit
|
||||||
|
if scale is not None:
|
||||||
|
metadata["scale"] = scale
|
||||||
|
if precision is not None:
|
||||||
|
metadata["precision"] = precision
|
||||||
|
self.ipc.set_dataset(key, value, metadata, persist)
|
||||||
|
|
||||||
|
def mutate_dataset(self, key, index, value):
|
||||||
|
mod = {"action": "setitem", "path": [key, 1], "key": index, "value": value}
|
||||||
|
self.ipc.update_dataset(mod)
|
||||||
|
|
||||||
|
def append_to_dataset(self, key, value):
|
||||||
|
mod = {"action": "append", "path": [key, 1], "x": value}
|
||||||
|
self.ipc.update_dataset(mod)
|
||||||
|
|
||||||
|
|
||||||
|
class AppletControlRPC:
|
||||||
|
def __init__(self, loop, dataset_ctl):
|
||||||
|
self.loop = loop
|
||||||
|
self.dataset_ctl = dataset_ctl
|
||||||
|
self.background_tasks = set()
|
||||||
|
|
||||||
|
def _background(self, coro, *args):
|
||||||
|
task = self.loop.create_task(coro(*args))
|
||||||
|
self.background_tasks.add(task)
|
||||||
|
task.add_done_callback(self.background_tasks.discard)
|
||||||
|
|
||||||
|
def set_dataset(self, key, value, unit=None, scale=None, precision=None, persist=None):
|
||||||
|
metadata = {}
|
||||||
|
if unit is not None:
|
||||||
|
metadata["unit"] = unit
|
||||||
|
if scale is not None:
|
||||||
|
metadata["scale"] = scale
|
||||||
|
if precision is not None:
|
||||||
|
metadata["precision"] = precision
|
||||||
|
self._background(self.dataset_ctl.set, key, value, metadata=metadata, persist=persist)
|
||||||
|
|
||||||
|
def mutate_dataset(self, key, index, value):
|
||||||
|
mod = {"action": "setitem", "path": [key, 1], "key": index, "value": value}
|
||||||
|
self._background(self.dataset_ctl.update, mod)
|
||||||
|
|
||||||
|
def append_to_dataset(self, key, value):
|
||||||
|
mod = {"action": "append", "path": [key, 1], "x": value}
|
||||||
|
self._background(self.dataset_ctl.update, mod)
|
||||||
|
|
||||||
|
|
||||||
class AppletIPCClient(AsyncioChildComm):
|
class AppletIPCClient(AsyncioChildComm):
|
||||||
def set_close_cb(self, close_cb):
|
def set_close_cb(self, close_cb):
|
||||||
self.close_cb = close_cb
|
self.close_cb = close_cb
|
||||||
|
@ -64,13 +118,24 @@ class AppletIPCClient(AsyncioChildComm):
|
||||||
exc_info=True)
|
exc_info=True)
|
||||||
self.close_cb()
|
self.close_cb()
|
||||||
|
|
||||||
def subscribe(self, datasets, init_cb, mod_cb, dataset_prefixes=[]):
|
def subscribe(self, datasets, init_cb, mod_cb, dataset_prefixes=[], *, loop):
|
||||||
self.write_pyon({"action": "subscribe",
|
self.write_pyon({"action": "subscribe",
|
||||||
"datasets": datasets,
|
"datasets": datasets,
|
||||||
"dataset_prefixes": dataset_prefixes})
|
"dataset_prefixes": dataset_prefixes})
|
||||||
self.init_cb = init_cb
|
self.init_cb = init_cb
|
||||||
self.mod_cb = mod_cb
|
self.mod_cb = mod_cb
|
||||||
asyncio.ensure_future(self.listen())
|
self.listen_task = loop.create_task(self.listen())
|
||||||
|
|
||||||
|
def set_dataset(self, key, value, metadata, persist=None):
|
||||||
|
self.write_pyon({"action": "set_dataset",
|
||||||
|
"key": key,
|
||||||
|
"value": value,
|
||||||
|
"metadata": metadata,
|
||||||
|
"persist": persist})
|
||||||
|
|
||||||
|
def update_dataset(self, mod):
|
||||||
|
self.write_pyon({"action": "update_dataset",
|
||||||
|
"mod": mod})
|
||||||
|
|
||||||
|
|
||||||
class SimpleApplet:
|
class SimpleApplet:
|
||||||
|
@ -92,8 +157,11 @@ class SimpleApplet:
|
||||||
"for dataset notifications "
|
"for dataset notifications "
|
||||||
"(ignored in embedded mode)")
|
"(ignored in embedded mode)")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--port", default=3250, type=int,
|
"--port-notify", default=3250, type=int,
|
||||||
help="TCP port to connect to")
|
help="TCP port to connect to for notifications (ignored in embedded mode)")
|
||||||
|
group.add_argument(
|
||||||
|
"--port-control", default=3251, type=int,
|
||||||
|
help="TCP port to connect to for control (ignored in embedded mode)")
|
||||||
|
|
||||||
self._arggroup_datasets = self.argparser.add_argument_group("datasets")
|
self._arggroup_datasets = self.argparser.add_argument_group("datasets")
|
||||||
|
|
||||||
|
@ -132,8 +200,21 @@ class SimpleApplet:
|
||||||
if self.embed is not None:
|
if self.embed is not None:
|
||||||
self.ipc.close()
|
self.ipc.close()
|
||||||
|
|
||||||
|
def ctl_init(self):
|
||||||
|
if self.embed is None:
|
||||||
|
dataset_ctl = RPCClient()
|
||||||
|
self.loop.run_until_complete(dataset_ctl.connect_rpc(
|
||||||
|
self.args.server, self.args.port_control, "master_dataset_db"))
|
||||||
|
self.ctl = AppletControlRPC(self.loop, dataset_ctl)
|
||||||
|
else:
|
||||||
|
self.ctl = AppletControlIPC(self.ipc)
|
||||||
|
|
||||||
|
def ctl_close(self):
|
||||||
|
if self.embed is None:
|
||||||
|
self.ctl.dataset_ctl.close_rpc()
|
||||||
|
|
||||||
def create_main_widget(self):
|
def create_main_widget(self):
|
||||||
self.main_widget = self.main_widget_class(self.args)
|
self.main_widget = self.main_widget_class(self.args, self.ctl)
|
||||||
if self.embed is not None:
|
if self.embed is not None:
|
||||||
self.ipc.set_close_cb(self.main_widget.close)
|
self.ipc.set_close_cb(self.main_widget.close)
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
|
@ -189,7 +270,12 @@ class SimpleApplet:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def emit_data_changed(self, data, mod_buffer):
|
def emit_data_changed(self, data, mod_buffer):
|
||||||
self.main_widget.data_changed(data, mod_buffer)
|
persist = dict()
|
||||||
|
value = dict()
|
||||||
|
metadata = dict()
|
||||||
|
for k, d in data.items():
|
||||||
|
persist[k], value[k], metadata[k] = d
|
||||||
|
self.main_widget.data_changed(value, metadata, persist, mod_buffer)
|
||||||
|
|
||||||
def flush_mod_buffer(self):
|
def flush_mod_buffer(self):
|
||||||
self.emit_data_changed(self.data, self.mod_buffer)
|
self.emit_data_changed(self.data, self.mod_buffer)
|
||||||
|
@ -204,7 +290,7 @@ class SimpleApplet:
|
||||||
self.mod_buffer.append(mod)
|
self.mod_buffer.append(mod)
|
||||||
else:
|
else:
|
||||||
self.mod_buffer = [mod]
|
self.mod_buffer = [mod]
|
||||||
asyncio.get_event_loop().call_later(self.args.update_delay,
|
self.loop.call_later(self.args.update_delay,
|
||||||
self.flush_mod_buffer)
|
self.flush_mod_buffer)
|
||||||
else:
|
else:
|
||||||
self.emit_data_changed(self.data, [mod])
|
self.emit_data_changed(self.data, [mod])
|
||||||
|
@ -214,10 +300,11 @@ class SimpleApplet:
|
||||||
self.subscriber = Subscriber("datasets",
|
self.subscriber = Subscriber("datasets",
|
||||||
self.sub_init, self.sub_mod)
|
self.sub_init, self.sub_mod)
|
||||||
self.loop.run_until_complete(self.subscriber.connect(
|
self.loop.run_until_complete(self.subscriber.connect(
|
||||||
self.args.server, self.args.port))
|
self.args.server, self.args.port_notify))
|
||||||
else:
|
else:
|
||||||
self.ipc.subscribe(self.datasets, self.sub_init, self.sub_mod,
|
self.ipc.subscribe(self.datasets, self.sub_init, self.sub_mod,
|
||||||
dataset_prefixes=self.dataset_prefixes)
|
dataset_prefixes=self.dataset_prefixes,
|
||||||
|
loop=self.loop)
|
||||||
|
|
||||||
def unsubscribe(self):
|
def unsubscribe(self):
|
||||||
if self.embed is None:
|
if self.embed is None:
|
||||||
|
@ -228,6 +315,8 @@ class SimpleApplet:
|
||||||
self.qasync_init()
|
self.qasync_init()
|
||||||
try:
|
try:
|
||||||
self.ipc_init()
|
self.ipc_init()
|
||||||
|
try:
|
||||||
|
self.ctl_init()
|
||||||
try:
|
try:
|
||||||
self.create_main_widget()
|
self.create_main_widget()
|
||||||
self.subscribe()
|
self.subscribe()
|
||||||
|
@ -235,6 +324,8 @@ class SimpleApplet:
|
||||||
self.loop.run_forever()
|
self.loop.run_forever()
|
||||||
finally:
|
finally:
|
||||||
self.unsubscribe()
|
self.unsubscribe()
|
||||||
|
finally:
|
||||||
|
self.ctl_close()
|
||||||
finally:
|
finally:
|
||||||
self.ipc_close()
|
self.ipc_close()
|
||||||
finally:
|
finally:
|
||||||
|
@ -273,4 +364,9 @@ class TitleApplet(SimpleApplet):
|
||||||
title = self.args.title
|
title = self.args.title
|
||||||
else:
|
else:
|
||||||
title = None
|
title = None
|
||||||
self.main_widget.data_changed(data, mod_buffer, title)
|
persist = dict()
|
||||||
|
value = dict()
|
||||||
|
metadata = dict()
|
||||||
|
for k, d in data.items():
|
||||||
|
persist[k], value[k], metadata[k] = d
|
||||||
|
self.main_widget.data_changed(value, metadata, persist, mod_buffer, title)
|
||||||
|
|
|
@ -20,11 +20,46 @@ class Model(DictSyncTreeSepModel):
|
||||||
DictSyncTreeSepModel.__init__(self, ".", ["Dataset", "Value"], init)
|
DictSyncTreeSepModel.__init__(self, ".", ["Dataset", "Value"], init)
|
||||||
|
|
||||||
def convert(self, k, v, column):
|
def convert(self, k, v, column):
|
||||||
return short_format(v[1])
|
return short_format(v[1], v[2])
|
||||||
|
|
||||||
|
|
||||||
|
class DatasetCtl:
|
||||||
|
def __init__(self, master_host, master_port):
|
||||||
|
self.master_host = master_host
|
||||||
|
self.master_port = master_port
|
||||||
|
|
||||||
|
async def _execute_rpc(self, op_name, key_or_mod, value=None, persist=None, metadata=None):
|
||||||
|
logger.info("Starting %s operation on %s", op_name, key_or_mod)
|
||||||
|
try:
|
||||||
|
remote = RPCClient()
|
||||||
|
await remote.connect_rpc(self.master_host, self.master_port,
|
||||||
|
"master_dataset_db")
|
||||||
|
try:
|
||||||
|
if op_name == "set":
|
||||||
|
await remote.set(key_or_mod, value, persist, metadata)
|
||||||
|
elif op_name == "update":
|
||||||
|
await remote.update(key_or_mod)
|
||||||
|
else:
|
||||||
|
logger.error("Invalid operation: %s", op_name)
|
||||||
|
return
|
||||||
|
finally:
|
||||||
|
remote.close_rpc()
|
||||||
|
except:
|
||||||
|
logger.error("Failed %s operation on %s", op_name,
|
||||||
|
key_or_mod, exc_info=True)
|
||||||
|
else:
|
||||||
|
logger.info("Finished %s operation on %s", op_name,
|
||||||
|
key_or_mod)
|
||||||
|
|
||||||
|
async def set(self, key, value, persist=None, metadata=None):
|
||||||
|
await self._execute_rpc("set", key, value, persist, metadata)
|
||||||
|
|
||||||
|
async def update(self, mod):
|
||||||
|
await self._execute_rpc("update", mod)
|
||||||
|
|
||||||
|
|
||||||
class DatasetsDock(QtWidgets.QDockWidget):
|
class DatasetsDock(QtWidgets.QDockWidget):
|
||||||
def __init__(self, datasets_sub, master_host, master_port):
|
def __init__(self, dataset_sub, dataset_ctl):
|
||||||
QtWidgets.QDockWidget.__init__(self, "Datasets")
|
QtWidgets.QDockWidget.__init__(self, "Datasets")
|
||||||
self.setObjectName("Datasets")
|
self.setObjectName("Datasets")
|
||||||
self.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable |
|
self.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable |
|
||||||
|
@ -62,10 +97,9 @@ class DatasetsDock(QtWidgets.QDockWidget):
|
||||||
self.table.addAction(upload_action)
|
self.table.addAction(upload_action)
|
||||||
|
|
||||||
self.set_model(Model(dict()))
|
self.set_model(Model(dict()))
|
||||||
datasets_sub.add_setmodel_callback(self.set_model)
|
dataset_sub.add_setmodel_callback(self.set_model)
|
||||||
|
|
||||||
self.master_host = master_host
|
self.dataset_ctl = dataset_ctl
|
||||||
self.master_port = master_port
|
|
||||||
|
|
||||||
def _search_datasets(self):
|
def _search_datasets(self):
|
||||||
if hasattr(self, "table_model_filter"):
|
if hasattr(self, "table_model_filter"):
|
||||||
|
@ -82,30 +116,14 @@ class DatasetsDock(QtWidgets.QDockWidget):
|
||||||
self.table_model_filter.setSourceModel(self.table_model)
|
self.table_model_filter.setSourceModel(self.table_model)
|
||||||
self.table.setModel(self.table_model_filter)
|
self.table.setModel(self.table_model_filter)
|
||||||
|
|
||||||
async def _upload_dataset(self, name, value,):
|
|
||||||
logger.info("Uploading dataset '%s' to master...", name)
|
|
||||||
try:
|
|
||||||
remote = RPCClient()
|
|
||||||
await remote.connect_rpc(self.master_host, self.master_port,
|
|
||||||
"master_dataset_db")
|
|
||||||
try:
|
|
||||||
await remote.set(name, value)
|
|
||||||
finally:
|
|
||||||
remote.close_rpc()
|
|
||||||
except:
|
|
||||||
logger.error("Failed uploading dataset '%s'",
|
|
||||||
name, exc_info=True)
|
|
||||||
else:
|
|
||||||
logger.info("Finished uploading dataset '%s'", name)
|
|
||||||
|
|
||||||
def upload_clicked(self):
|
def upload_clicked(self):
|
||||||
idx = self.table.selectedIndexes()
|
idx = self.table.selectedIndexes()
|
||||||
if idx:
|
if idx:
|
||||||
idx = self.table_model_filter.mapToSource(idx[0])
|
idx = self.table_model_filter.mapToSource(idx[0])
|
||||||
key = self.table_model.index_to_key(idx)
|
key = self.table_model.index_to_key(idx)
|
||||||
if key is not None:
|
if key is not None:
|
||||||
persist, value = self.table_model.backing_store[key]
|
persist, value, metadata = self.table_model.backing_store[key]
|
||||||
asyncio.ensure_future(self._upload_dataset(key, value))
|
asyncio.ensure_future(self.dataset_ctl.set(key, value, metadata=metadata))
|
||||||
|
|
||||||
def save_state(self):
|
def save_state(self):
|
||||||
return bytes(self.table.header().saveState())
|
return bytes(self.table.header().saveState())
|
||||||
|
|
|
@ -378,9 +378,9 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
|
|
||||||
|
|
||||||
class LocalDatasetDB:
|
class LocalDatasetDB:
|
||||||
def __init__(self, datasets_sub):
|
def __init__(self, dataset_sub):
|
||||||
self.datasets_sub = datasets_sub
|
self.dataset_sub = dataset_sub
|
||||||
datasets_sub.add_setmodel_callback(self.init)
|
dataset_sub.add_setmodel_callback(self.init)
|
||||||
|
|
||||||
def init(self, data):
|
def init(self, data):
|
||||||
self._data = data
|
self._data = data
|
||||||
|
@ -389,11 +389,11 @@ class LocalDatasetDB:
|
||||||
return self._data.backing_store[key][1]
|
return self._data.backing_store[key][1]
|
||||||
|
|
||||||
def update(self, mod):
|
def update(self, mod):
|
||||||
self.datasets_sub.update(mod)
|
self.dataset_sub.update(mod)
|
||||||
|
|
||||||
|
|
||||||
class ExperimentsArea(QtWidgets.QMdiArea):
|
class ExperimentsArea(QtWidgets.QMdiArea):
|
||||||
def __init__(self, root, datasets_sub):
|
def __init__(self, root, dataset_sub):
|
||||||
QtWidgets.QMdiArea.__init__(self)
|
QtWidgets.QMdiArea.__init__(self)
|
||||||
self.pixmap = QtGui.QPixmap(os.path.join(
|
self.pixmap = QtGui.QPixmap(os.path.join(
|
||||||
artiq_dir, "gui", "logo_ver.svg"))
|
artiq_dir, "gui", "logo_ver.svg"))
|
||||||
|
@ -402,7 +402,7 @@ class ExperimentsArea(QtWidgets.QMdiArea):
|
||||||
|
|
||||||
self.open_experiments = []
|
self.open_experiments = []
|
||||||
|
|
||||||
self._ddb = LocalDatasetDB(datasets_sub)
|
self._ddb = LocalDatasetDB(dataset_sub)
|
||||||
|
|
||||||
self.worker_handlers = {
|
self.worker_handlers = {
|
||||||
"get_device_db": lambda: {},
|
"get_device_db": lambda: {},
|
||||||
|
|
|
@ -194,7 +194,9 @@ class FilesDock(QtWidgets.QDockWidget):
|
||||||
if "archive" in f:
|
if "archive" in f:
|
||||||
def visitor(k, v):
|
def visitor(k, v):
|
||||||
if isinstance(v, h5py.Dataset):
|
if isinstance(v, h5py.Dataset):
|
||||||
rd[k] = (True, v[()])
|
# v.attrs is a non-serializable h5py.AttributeManager, need to convert to dict
|
||||||
|
# See https://docs.h5py.org/en/stable/high/attr.html#h5py.AttributeManager
|
||||||
|
rd[k] = (True, v[()], dict(v.attrs))
|
||||||
|
|
||||||
f["archive"].visititems(visitor)
|
f["archive"].visititems(visitor)
|
||||||
|
|
||||||
|
@ -204,7 +206,9 @@ class FilesDock(QtWidgets.QDockWidget):
|
||||||
if k in rd:
|
if k in rd:
|
||||||
logger.warning("dataset '%s' is both in archive "
|
logger.warning("dataset '%s' is both in archive "
|
||||||
"and outputs", k)
|
"and outputs", k)
|
||||||
rd[k] = (True, v[()])
|
# v.attrs is a non-serializable h5py.AttributeManager, need to convert to dict
|
||||||
|
# See https://docs.h5py.org/en/stable/high/attr.html#h5py.AttributeManager
|
||||||
|
rd[k] = (True, v[()], dict(v.attrs))
|
||||||
|
|
||||||
f["datasets"].visititems(visitor)
|
f["datasets"].visititems(visitor)
|
||||||
|
|
||||||
|
|
|
@ -59,7 +59,6 @@ def build_artiq_soc(soc, argdict):
|
||||||
builder.software_packages = []
|
builder.software_packages = []
|
||||||
builder.add_software_package("bootloader", os.path.join(firmware_dir, "bootloader"))
|
builder.add_software_package("bootloader", os.path.join(firmware_dir, "bootloader"))
|
||||||
is_kasli_v1 = isinstance(soc.platform, kasli.Platform) and soc.platform.hw_rev in ("v1.0", "v1.1")
|
is_kasli_v1 = isinstance(soc.platform, kasli.Platform) and soc.platform.hw_rev in ("v1.0", "v1.1")
|
||||||
if isinstance(soc, AMPSoC):
|
|
||||||
kernel_cpu_type = "vexriscv" if is_kasli_v1 else "vexriscv-g"
|
kernel_cpu_type = "vexriscv" if is_kasli_v1 else "vexriscv-g"
|
||||||
builder.add_software_package("libm", cpu_type=kernel_cpu_type)
|
builder.add_software_package("libm", cpu_type=kernel_cpu_type)
|
||||||
builder.add_software_package("libprintf", cpu_type=kernel_cpu_type)
|
builder.add_software_package("libprintf", cpu_type=kernel_cpu_type)
|
||||||
|
@ -69,6 +68,7 @@ def build_artiq_soc(soc, argdict):
|
||||||
# If the kernel lacks FPU, then the runtime unwinder is already generated
|
# If the kernel lacks FPU, then the runtime unwinder is already generated
|
||||||
if not is_kasli_v1:
|
if not is_kasli_v1:
|
||||||
builder.add_software_package("libunwind")
|
builder.add_software_package("libunwind")
|
||||||
|
if not soc.config["DRTIO_ROLE"] == "satellite":
|
||||||
builder.add_software_package("runtime", os.path.join(firmware_dir, "runtime"))
|
builder.add_software_package("runtime", os.path.join(firmware_dir, "runtime"))
|
||||||
else:
|
else:
|
||||||
# Assume DRTIO satellite.
|
# Assume DRTIO satellite.
|
||||||
|
|
|
@ -19,16 +19,24 @@
|
||||||
},
|
},
|
||||||
"min_artiq_version": {
|
"min_artiq_version": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "Minimum required ARTIQ version"
|
"description": "Minimum required ARTIQ version",
|
||||||
|
"default": "0"
|
||||||
},
|
},
|
||||||
"hw_rev": {
|
"hw_rev": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"description": "Hardware revision"
|
"description": "Hardware revision"
|
||||||
},
|
},
|
||||||
"base": {
|
"base": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["use_drtio_role", "standalone", "master", "satellite"],
|
||||||
|
"description": "Deprecated, use drtio_role instead",
|
||||||
|
"default": "use_drtio_role"
|
||||||
|
},
|
||||||
|
"drtio_role": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"enum": ["standalone", "master", "satellite"],
|
"enum": ["standalone", "master", "satellite"],
|
||||||
"description": "SoC base; value depends on intended system topology"
|
"description": "Role that this device takes in a DRTIO network; 'standalone' means no DRTIO",
|
||||||
|
"default": "standalone"
|
||||||
},
|
},
|
||||||
"ext_ref_frequency": {
|
"ext_ref_frequency": {
|
||||||
"type": "number",
|
"type": "number",
|
||||||
|
@ -204,7 +212,8 @@
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"name": {
|
"name": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"default": "dio_spi"
|
||||||
},
|
},
|
||||||
"clk": {
|
"clk": {
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
|
@ -240,7 +249,8 @@
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
"name": {
|
"name": {
|
||||||
"type": "string"
|
"type": "string",
|
||||||
|
"default": "ttl"
|
||||||
},
|
},
|
||||||
"pin": {
|
"pin": {
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
|
@ -257,7 +267,8 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["pin", "direction"]
|
"required": ["pin", "direction"]
|
||||||
}
|
},
|
||||||
|
"default": []
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["ports", "spi"]
|
"required": ["ports", "spi"]
|
||||||
|
@ -297,7 +308,8 @@
|
||||||
"clk_div": {
|
"clk_div": {
|
||||||
"type": "integer",
|
"type": "integer",
|
||||||
"minimum": 0,
|
"minimum": 0,
|
||||||
"maximum": 3
|
"maximum": 3,
|
||||||
|
"default": 0
|
||||||
},
|
},
|
||||||
"pll_n": {
|
"pll_n": {
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
|
@ -338,7 +350,8 @@
|
||||||
},
|
},
|
||||||
"sampler_hw_rev": {
|
"sampler_hw_rev": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"pattern": "^v[0-9]+\\.[0-9]+"
|
"pattern": "^v[0-9]+\\.[0-9]+",
|
||||||
|
"default": "v2.2"
|
||||||
},
|
},
|
||||||
"urukul0_ports": {
|
"urukul0_ports": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
|
@ -518,6 +531,11 @@
|
||||||
},
|
},
|
||||||
"minItems": 1,
|
"minItems": 1,
|
||||||
"maxItems": 1
|
"maxItems": 1
|
||||||
|
},
|
||||||
|
"mode": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["base", "miqro"],
|
||||||
|
"default": "base"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": ["ports"]
|
"required": ["ports"]
|
||||||
|
|
|
@ -32,4 +32,7 @@ def load(description_path):
|
||||||
global validator
|
global validator
|
||||||
validator.validate(result)
|
validator.validate(result)
|
||||||
|
|
||||||
|
if result["base"] != "use_drtio_role":
|
||||||
|
result["drtio_role"] = result["base"]
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -14,14 +14,14 @@ from artiq.gui.scientific_spinbox import ScientificSpinBox
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def rename(key, new_key, value, persist, dataset_ctl):
|
async def rename(key, new_key, value, metadata, persist, dataset_ctl):
|
||||||
if key != new_key:
|
if key != new_key:
|
||||||
await dataset_ctl.delete(key)
|
await dataset_ctl.delete(key)
|
||||||
await dataset_ctl.set(new_key, value, persist)
|
await dataset_ctl.set(new_key, value, metadata=metadata, persist=persist)
|
||||||
|
|
||||||
|
|
||||||
class CreateEditDialog(QtWidgets.QDialog):
|
class CreateEditDialog(QtWidgets.QDialog):
|
||||||
def __init__(self, parent, dataset_ctl, key=None, value=None, persist=False):
|
def __init__(self, parent, dataset_ctl, key=None, value=None, metadata=None, persist=False):
|
||||||
QtWidgets.QDialog.__init__(self, parent=parent)
|
QtWidgets.QDialog.__init__(self, parent=parent)
|
||||||
self.dataset_ctl = dataset_ctl
|
self.dataset_ctl = dataset_ctl
|
||||||
|
|
||||||
|
@ -43,9 +43,21 @@ class CreateEditDialog(QtWidgets.QDialog):
|
||||||
grid.addWidget(self.data_type, 1, 2)
|
grid.addWidget(self.data_type, 1, 2)
|
||||||
self.value_widget.textChanged.connect(self.dtype)
|
self.value_widget.textChanged.connect(self.dtype)
|
||||||
|
|
||||||
grid.addWidget(QtWidgets.QLabel("Persist:"), 2, 0)
|
grid.addWidget(QtWidgets.QLabel("Unit:"), 2, 0)
|
||||||
|
self.unit_widget = QtWidgets.QLineEdit()
|
||||||
|
grid.addWidget(self.unit_widget, 2, 1)
|
||||||
|
|
||||||
|
grid.addWidget(QtWidgets.QLabel("Scale:"), 3, 0)
|
||||||
|
self.scale_widget = QtWidgets.QLineEdit()
|
||||||
|
grid.addWidget(self.scale_widget, 3, 1)
|
||||||
|
|
||||||
|
grid.addWidget(QtWidgets.QLabel("Precision:"), 4, 0)
|
||||||
|
self.precision_widget = QtWidgets.QLineEdit()
|
||||||
|
grid.addWidget(self.precision_widget, 4, 1)
|
||||||
|
|
||||||
|
grid.addWidget(QtWidgets.QLabel("Persist:"), 5, 0)
|
||||||
self.box_widget = QtWidgets.QCheckBox()
|
self.box_widget = QtWidgets.QCheckBox()
|
||||||
grid.addWidget(self.box_widget, 2, 1)
|
grid.addWidget(self.box_widget, 5, 1)
|
||||||
|
|
||||||
self.ok = QtWidgets.QPushButton('&Ok')
|
self.ok = QtWidgets.QPushButton('&Ok')
|
||||||
self.ok.setEnabled(False)
|
self.ok.setEnabled(False)
|
||||||
|
@ -55,24 +67,40 @@ class CreateEditDialog(QtWidgets.QDialog):
|
||||||
self.ok, QtWidgets.QDialogButtonBox.AcceptRole)
|
self.ok, QtWidgets.QDialogButtonBox.AcceptRole)
|
||||||
self.buttons.addButton(
|
self.buttons.addButton(
|
||||||
self.cancel, QtWidgets.QDialogButtonBox.RejectRole)
|
self.cancel, QtWidgets.QDialogButtonBox.RejectRole)
|
||||||
grid.setRowStretch(3, 1)
|
grid.setRowStretch(6, 1)
|
||||||
grid.addWidget(self.buttons, 4, 0, 1, 3, alignment=QtCore.Qt.AlignHCenter)
|
grid.addWidget(self.buttons, 7, 0, 1, 3, alignment=QtCore.Qt.AlignHCenter)
|
||||||
self.buttons.accepted.connect(self.accept)
|
self.buttons.accepted.connect(self.accept)
|
||||||
self.buttons.rejected.connect(self.reject)
|
self.buttons.rejected.connect(self.reject)
|
||||||
|
|
||||||
self.key = key
|
self.key = key
|
||||||
self.name_widget.setText(key)
|
self.name_widget.setText(key)
|
||||||
self.value_widget.setText(value)
|
self.value_widget.setText(value)
|
||||||
|
|
||||||
|
if metadata is not None:
|
||||||
|
self.unit_widget.setText(metadata.get('unit', ''))
|
||||||
|
self.scale_widget.setText(str(metadata.get('scale', '')))
|
||||||
|
self.precision_widget.setText(str(metadata.get('precision', '')))
|
||||||
|
|
||||||
self.box_widget.setChecked(persist)
|
self.box_widget.setChecked(persist)
|
||||||
|
|
||||||
def accept(self):
|
def accept(self):
|
||||||
key = self.name_widget.text()
|
key = self.name_widget.text()
|
||||||
value = self.value_widget.text()
|
value = self.value_widget.text()
|
||||||
persist = self.box_widget.isChecked()
|
persist = self.box_widget.isChecked()
|
||||||
|
unit = self.unit_widget.text()
|
||||||
|
scale = self.scale_widget.text()
|
||||||
|
precision = self.precision_widget.text()
|
||||||
|
metadata = {}
|
||||||
|
if unit != "":
|
||||||
|
metadata['unit'] = unit
|
||||||
|
if scale != "":
|
||||||
|
metadata['scale'] = float(scale)
|
||||||
|
if precision != "":
|
||||||
|
metadata['precision'] = int(precision)
|
||||||
if self.key and self.key != key:
|
if self.key and self.key != key:
|
||||||
asyncio.ensure_future(exc_to_warning(rename(self.key, key, pyon.decode(value), persist, self.dataset_ctl)))
|
asyncio.ensure_future(exc_to_warning(rename(self.key, key, pyon.decode(value), metadata, persist, self.dataset_ctl)))
|
||||||
else:
|
else:
|
||||||
asyncio.ensure_future(exc_to_warning(self.dataset_ctl.set(key, pyon.decode(value), persist)))
|
asyncio.ensure_future(exc_to_warning(self.dataset_ctl.set(key, pyon.decode(value), metadata=metadata, persist=persist)))
|
||||||
self.key = key
|
self.key = key
|
||||||
QtWidgets.QDialog.accept(self)
|
QtWidgets.QDialog.accept(self)
|
||||||
|
|
||||||
|
@ -100,13 +128,13 @@ class Model(DictSyncTreeSepModel):
|
||||||
if column == 1:
|
if column == 1:
|
||||||
return "Y" if v[0] else "N"
|
return "Y" if v[0] else "N"
|
||||||
elif column == 2:
|
elif column == 2:
|
||||||
return short_format(v[1])
|
return short_format(v[1], v[2])
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
|
|
||||||
class DatasetsDock(QtWidgets.QDockWidget):
|
class DatasetsDock(QtWidgets.QDockWidget):
|
||||||
def __init__(self, datasets_sub, dataset_ctl):
|
def __init__(self, dataset_sub, dataset_ctl):
|
||||||
QtWidgets.QDockWidget.__init__(self, "Datasets")
|
QtWidgets.QDockWidget.__init__(self, "Datasets")
|
||||||
self.setObjectName("Datasets")
|
self.setObjectName("Datasets")
|
||||||
self.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable |
|
self.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable |
|
||||||
|
@ -146,7 +174,7 @@ class DatasetsDock(QtWidgets.QDockWidget):
|
||||||
self.table.addAction(delete_action)
|
self.table.addAction(delete_action)
|
||||||
|
|
||||||
self.table_model = Model(dict())
|
self.table_model = Model(dict())
|
||||||
datasets_sub.add_setmodel_callback(self.set_model)
|
dataset_sub.add_setmodel_callback(self.set_model)
|
||||||
|
|
||||||
def _search_datasets(self):
|
def _search_datasets(self):
|
||||||
if hasattr(self, "table_model_filter"):
|
if hasattr(self, "table_model_filter"):
|
||||||
|
@ -168,7 +196,7 @@ class DatasetsDock(QtWidgets.QDockWidget):
|
||||||
idx = self.table_model_filter.mapToSource(idx[0])
|
idx = self.table_model_filter.mapToSource(idx[0])
|
||||||
key = self.table_model.index_to_key(idx)
|
key = self.table_model.index_to_key(idx)
|
||||||
if key is not None:
|
if key is not None:
|
||||||
persist, value = self.table_model.backing_store[key]
|
persist, value, metadata = self.table_model.backing_store[key]
|
||||||
t = type(value)
|
t = type(value)
|
||||||
if np.issubdtype(t, np.number) or np.issubdtype(t, np.bool_):
|
if np.issubdtype(t, np.number) or np.issubdtype(t, np.bool_):
|
||||||
value = str(value)
|
value = str(value)
|
||||||
|
@ -176,7 +204,7 @@ class DatasetsDock(QtWidgets.QDockWidget):
|
||||||
value = '"{}"'.format(str(value))
|
value = '"{}"'.format(str(value))
|
||||||
else:
|
else:
|
||||||
value = pyon.encode(value)
|
value = pyon.encode(value)
|
||||||
CreateEditDialog(self, self.dataset_ctl, key, value, persist).open()
|
CreateEditDialog(self, self.dataset_ctl, key, value, metadata, persist).open()
|
||||||
|
|
||||||
def delete_clicked(self):
|
def delete_clicked(self):
|
||||||
idx = self.table.selectedIndexes()
|
idx = self.table.selectedIndexes()
|
||||||
|
|
|
@ -268,7 +268,7 @@ class _ExperimentDock(QtWidgets.QMdiSubWindow):
|
||||||
datetime.setDate(QtCore.QDate.currentDate())
|
datetime.setDate(QtCore.QDate.currentDate())
|
||||||
else:
|
else:
|
||||||
datetime.setDateTime(QtCore.QDateTime.fromMSecsSinceEpoch(
|
datetime.setDateTime(QtCore.QDateTime.fromMSecsSinceEpoch(
|
||||||
scheduling["due_date"]*1000))
|
int(scheduling["due_date"]*1000)))
|
||||||
datetime_en.setChecked(scheduling["due_date"] is not None)
|
datetime_en.setChecked(scheduling["due_date"] is not None)
|
||||||
|
|
||||||
def update_datetime(dt):
|
def update_datetime(dt):
|
||||||
|
|
|
@ -18,7 +18,9 @@ use board_misoc::slave_fpga;
|
||||||
use board_misoc::{clock, ethmac, net_settings};
|
use board_misoc::{clock, ethmac, net_settings};
|
||||||
use board_misoc::uart_console::Console;
|
use board_misoc::uart_console::Console;
|
||||||
use riscv::register::{mcause, mepc, mtval};
|
use riscv::register::{mcause, mepc, mtval};
|
||||||
|
#[cfg(has_ethmac)]
|
||||||
use smoltcp::iface::{Routes, SocketStorage};
|
use smoltcp::iface::{Routes, SocketStorage};
|
||||||
|
#[cfg(has_ethmac)]
|
||||||
use smoltcp::wire::{HardwareAddress, IpAddress, Ipv4Address, Ipv6Address};
|
use smoltcp::wire::{HardwareAddress, IpAddress, Ipv4Address, Ipv6Address};
|
||||||
|
|
||||||
fn check_integrity() -> bool {
|
fn check_integrity() -> bool {
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
use io::{Write, Error as IoError};
|
use io::{Write, Error as IoError};
|
||||||
|
#[cfg(has_drtio)]
|
||||||
|
use alloc::vec::Vec;
|
||||||
use board_misoc::{csr, cache};
|
use board_misoc::{csr, cache};
|
||||||
use sched::{Io, Mutex, TcpListener, TcpStream, Error as SchedError};
|
use sched::{Io, Mutex, TcpListener, TcpStream, Error as SchedError};
|
||||||
use analyzer_proto::*;
|
use analyzer_proto::*;
|
||||||
|
@ -42,7 +44,6 @@ fn disarm() {
|
||||||
pub mod remote_analyzer {
|
pub mod remote_analyzer {
|
||||||
use super::*;
|
use super::*;
|
||||||
use rtio_mgt::drtio;
|
use rtio_mgt::drtio;
|
||||||
use alloc::vec::Vec;
|
|
||||||
|
|
||||||
pub struct RemoteBuffer {
|
pub struct RemoteBuffer {
|
||||||
pub total_byte_count: u64,
|
pub total_byte_count: u64,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use core::cmp::min;
|
||||||
use board_misoc::{csr, cache};
|
use board_misoc::{csr, cache};
|
||||||
use proto_artiq::drtioaux_proto::ANALYZER_MAX_SIZE;
|
use proto_artiq::drtioaux_proto::ANALYZER_MAX_SIZE;
|
||||||
|
|
||||||
|
@ -35,8 +36,9 @@ fn disarm() {
|
||||||
|
|
||||||
pub struct Analyzer {
|
pub struct Analyzer {
|
||||||
// necessary for keeping track of sent data
|
// necessary for keeping track of sent data
|
||||||
|
data_len: usize,
|
||||||
sent_bytes: usize,
|
sent_bytes: usize,
|
||||||
data_iter: usize
|
data_pointer: usize
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct Header {
|
pub struct Header {
|
||||||
|
@ -50,48 +52,53 @@ pub struct AnalyzerSliceMeta {
|
||||||
pub last: bool
|
pub last: bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Drop for Analyzer {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
disarm();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Analyzer {
|
impl Analyzer {
|
||||||
pub fn new() -> Analyzer {
|
pub fn new() -> Analyzer {
|
||||||
// create and arm new Analyzer
|
// create and arm new Analyzer
|
||||||
arm();
|
arm();
|
||||||
Analyzer {
|
Analyzer {
|
||||||
|
data_len: 0,
|
||||||
sent_bytes: 0,
|
sent_bytes: 0,
|
||||||
data_iter: 0
|
data_pointer: 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn drop(&mut self) {
|
|
||||||
disarm();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_header(&mut self) -> Header {
|
pub fn get_header(&mut self) -> Header {
|
||||||
disarm();
|
disarm();
|
||||||
|
|
||||||
let overflow = unsafe { csr::rtio_analyzer::message_encoder_overflow_read() != 0 };
|
let overflow = unsafe { csr::rtio_analyzer::message_encoder_overflow_read() != 0 };
|
||||||
let total_byte_count = unsafe { csr::rtio_analyzer::dma_byte_count_read() };
|
let total_byte_count = unsafe { csr::rtio_analyzer::dma_byte_count_read() };
|
||||||
let wraparound = total_byte_count >= BUFFER_SIZE as u64;
|
let wraparound = total_byte_count >= BUFFER_SIZE as u64;
|
||||||
self.sent_bytes = if wraparound { BUFFER_SIZE } else { total_byte_count as usize };
|
self.data_len = if wraparound { BUFFER_SIZE } else { total_byte_count as usize };
|
||||||
self.data_iter = if wraparound { (total_byte_count % BUFFER_SIZE as u64) as usize } else { 0 };
|
self.data_pointer = if wraparound { (total_byte_count % BUFFER_SIZE as u64) as usize } else { 0 };
|
||||||
|
self.sent_bytes = 0;
|
||||||
|
|
||||||
Header {
|
Header {
|
||||||
total_byte_count: total_byte_count,
|
total_byte_count: total_byte_count,
|
||||||
sent_bytes: self.sent_bytes as u32,
|
sent_bytes: self.data_len as u32,
|
||||||
overflow: overflow
|
overflow: overflow
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_data(&mut self, data_slice: &mut [u8; ANALYZER_MAX_SIZE]) -> AnalyzerSliceMeta {
|
pub fn get_data(&mut self, data_slice: &mut [u8; ANALYZER_MAX_SIZE]) -> AnalyzerSliceMeta {
|
||||||
let data = unsafe { &BUFFER.data[..] };
|
let data = unsafe { &BUFFER.data[..] };
|
||||||
let i = self.data_iter;
|
let i = (self.data_pointer + self.sent_bytes) % BUFFER_SIZE;
|
||||||
let len = if i + ANALYZER_MAX_SIZE < self.sent_bytes { ANALYZER_MAX_SIZE } else { self.sent_bytes - i };
|
let len = min(ANALYZER_MAX_SIZE, self.data_len - self.sent_bytes);
|
||||||
let last = i + len == self.sent_bytes;
|
let last = self.sent_bytes + len == self.data_len;
|
||||||
|
|
||||||
if i + len >= BUFFER_SIZE {
|
if i + len >= BUFFER_SIZE {
|
||||||
data_slice[..len].clone_from_slice(&data[i..BUFFER_SIZE]);
|
data_slice[..(BUFFER_SIZE-i)].clone_from_slice(&data[i..BUFFER_SIZE]);
|
||||||
data_slice[..len].clone_from_slice(&data[..(i+len) % BUFFER_SIZE]);
|
data_slice[(BUFFER_SIZE-i)..len].clone_from_slice(&data[..(i + len) % BUFFER_SIZE]);
|
||||||
} else {
|
} else {
|
||||||
data_slice[..len].clone_from_slice(&data[i..i+len]);
|
data_slice[..len].clone_from_slice(&data[i..i+len]);
|
||||||
}
|
}
|
||||||
self.data_iter += len;
|
self.sent_bytes += len;
|
||||||
|
|
||||||
if last {
|
if last {
|
||||||
arm();
|
arm();
|
||||||
|
|
|
@ -48,8 +48,8 @@ def get_argparser():
|
||||||
|
|
||||||
|
|
||||||
class Browser(QtWidgets.QMainWindow):
|
class Browser(QtWidgets.QMainWindow):
|
||||||
def __init__(self, smgr, datasets_sub, browse_root,
|
def __init__(self, smgr, dataset_sub, dataset_ctl, browse_root,
|
||||||
master_host, master_port, *, loop=None):
|
*, loop=None):
|
||||||
QtWidgets.QMainWindow.__init__(self)
|
QtWidgets.QMainWindow.__init__(self)
|
||||||
smgr.register(self)
|
smgr.register(self)
|
||||||
|
|
||||||
|
@ -65,7 +65,7 @@ class Browser(QtWidgets.QMainWindow):
|
||||||
self.setUnifiedTitleAndToolBarOnMac(True)
|
self.setUnifiedTitleAndToolBarOnMac(True)
|
||||||
|
|
||||||
self.experiments = experiments.ExperimentsArea(
|
self.experiments = experiments.ExperimentsArea(
|
||||||
browse_root, datasets_sub)
|
browse_root, dataset_sub)
|
||||||
smgr.register(self.experiments)
|
smgr.register(self.experiments)
|
||||||
self.experiments.setHorizontalScrollBarPolicy(
|
self.experiments.setHorizontalScrollBarPolicy(
|
||||||
QtCore.Qt.ScrollBarAsNeeded)
|
QtCore.Qt.ScrollBarAsNeeded)
|
||||||
|
@ -73,7 +73,7 @@ class Browser(QtWidgets.QMainWindow):
|
||||||
QtCore.Qt.ScrollBarAsNeeded)
|
QtCore.Qt.ScrollBarAsNeeded)
|
||||||
self.setCentralWidget(self.experiments)
|
self.setCentralWidget(self.experiments)
|
||||||
|
|
||||||
self.files = files.FilesDock(datasets_sub, browse_root)
|
self.files = files.FilesDock(dataset_sub, browse_root)
|
||||||
smgr.register(self.files)
|
smgr.register(self.files)
|
||||||
|
|
||||||
self.files.dataset_activated.connect(
|
self.files.dataset_activated.connect(
|
||||||
|
@ -81,12 +81,11 @@ class Browser(QtWidgets.QMainWindow):
|
||||||
self.files.dataset_changed.connect(
|
self.files.dataset_changed.connect(
|
||||||
self.experiments.dataset_changed)
|
self.experiments.dataset_changed)
|
||||||
|
|
||||||
self.applets = applets.AppletsDock(self, datasets_sub, loop=loop)
|
self.applets = applets.AppletsDock(self, dataset_sub, dataset_ctl, loop=loop)
|
||||||
smgr.register(self.applets)
|
smgr.register(self.applets)
|
||||||
atexit_register_coroutine(self.applets.stop, loop=loop)
|
atexit_register_coroutine(self.applets.stop, loop=loop)
|
||||||
|
|
||||||
self.datasets = datasets.DatasetsDock(
|
self.datasets = datasets.DatasetsDock(dataset_sub, dataset_ctl)
|
||||||
datasets_sub, master_host, master_port)
|
|
||||||
smgr.register(self.datasets)
|
smgr.register(self.datasets)
|
||||||
self.files.metadata_changed.connect(self.datasets.metadata_changed)
|
self.files.metadata_changed.connect(self.datasets.metadata_changed)
|
||||||
|
|
||||||
|
@ -146,13 +145,14 @@ def main():
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
atexit.register(loop.close)
|
atexit.register(loop.close)
|
||||||
|
|
||||||
datasets_sub = models.LocalModelManager(datasets.Model)
|
dataset_sub = models.LocalModelManager(datasets.Model)
|
||||||
datasets_sub.init({})
|
dataset_sub.init({})
|
||||||
|
|
||||||
smgr = state.StateManager(args.db_file)
|
smgr = state.StateManager(args.db_file)
|
||||||
|
|
||||||
browser = Browser(smgr, datasets_sub, args.browse_root,
|
dataset_ctl = datasets.DatasetCtl(args.server, args.port)
|
||||||
args.server, args.port, loop=loop)
|
browser = Browser(smgr, dataset_sub, dataset_ctl, args.browse_root,
|
||||||
|
loop=loop)
|
||||||
widget_log_handler.callback = browser.log.model.append
|
widget_log_handler.callback = browser.log.model.append
|
||||||
|
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
|
|
|
@ -91,6 +91,12 @@ def get_argparser():
|
||||||
help="name of the dataset")
|
help="name of the dataset")
|
||||||
parser_set_dataset.add_argument("value", metavar="VALUE",
|
parser_set_dataset.add_argument("value", metavar="VALUE",
|
||||||
help="value in PYON format")
|
help="value in PYON format")
|
||||||
|
parser_set_dataset.add_argument("--unit", default=None, type=str,
|
||||||
|
help="physical unit of the dataset")
|
||||||
|
parser_set_dataset.add_argument("--scale", default=None, type=float,
|
||||||
|
help="factor to multiply value of dataset in displays")
|
||||||
|
parser_set_dataset.add_argument("--precision", default=None, type=int,
|
||||||
|
help="maximum number of decimals to print in displays")
|
||||||
|
|
||||||
persist_group = parser_set_dataset.add_mutually_exclusive_group()
|
persist_group = parser_set_dataset.add_mutually_exclusive_group()
|
||||||
persist_group.add_argument("-p", "--persist", action="store_true",
|
persist_group.add_argument("-p", "--persist", action="store_true",
|
||||||
|
@ -174,7 +180,14 @@ def _action_set_dataset(remote, args):
|
||||||
persist = True
|
persist = True
|
||||||
if args.no_persist:
|
if args.no_persist:
|
||||||
persist = False
|
persist = False
|
||||||
remote.set(args.name, pyon.decode(args.value), persist)
|
metadata = {}
|
||||||
|
if args.unit is not None:
|
||||||
|
metadata["unit"] = args.unit
|
||||||
|
if args.scale is not None:
|
||||||
|
metadata["scale"] = args.scale
|
||||||
|
if args.precision is not None:
|
||||||
|
metadata["precision"] = args.precision
|
||||||
|
remote.set(args.name, pyon.decode(args.value), persist, metadata)
|
||||||
|
|
||||||
|
|
||||||
def _action_del_dataset(remote, args):
|
def _action_del_dataset(remote, args):
|
||||||
|
@ -246,8 +259,8 @@ def _show_devices(devices):
|
||||||
def _show_datasets(datasets):
|
def _show_datasets(datasets):
|
||||||
clear_screen()
|
clear_screen()
|
||||||
table = PrettyTable(["Dataset", "Persistent", "Value"])
|
table = PrettyTable(["Dataset", "Persistent", "Value"])
|
||||||
for k, (persist, value) in sorted(datasets.items(), key=itemgetter(0)):
|
for k, (persist, value, metadata) in sorted(datasets.items(), key=itemgetter(0)):
|
||||||
table.add_row([k, "Y" if persist else "N", short_format(value)])
|
table.add_row([k, "Y" if persist else "N", short_format(value, metadata)])
|
||||||
print(table)
|
print(table)
|
||||||
|
|
||||||
|
|
||||||
|
@ -327,7 +340,7 @@ def main():
|
||||||
"scan_devices": "master_device_db",
|
"scan_devices": "master_device_db",
|
||||||
"scan_repository": "master_experiment_db",
|
"scan_repository": "master_experiment_db",
|
||||||
"ls": "master_experiment_db",
|
"ls": "master_experiment_db",
|
||||||
"terminate": "master_terminate",
|
"terminate": "master_management",
|
||||||
}[action]
|
}[action]
|
||||||
remote = Client(args.server, port, target_name)
|
remote = Client(args.server, port, target_name)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -125,11 +125,11 @@ def main():
|
||||||
atexit.register(client.close_rpc)
|
atexit.register(client.close_rpc)
|
||||||
rpc_clients[target] = client
|
rpc_clients[target] = client
|
||||||
|
|
||||||
config = Client(args.server, args.port_control, "master_config")
|
master_management = Client(args.server, args.port_control, "master_management")
|
||||||
try:
|
try:
|
||||||
server_name = config.get_name()
|
server_name = master_management.get_name()
|
||||||
finally:
|
finally:
|
||||||
config.close_rpc()
|
master_management.close_rpc()
|
||||||
|
|
||||||
disconnect_reported = False
|
disconnect_reported = False
|
||||||
def report_disconnect():
|
def report_disconnect():
|
||||||
|
@ -191,6 +191,7 @@ def main():
|
||||||
|
|
||||||
d_applets = applets_ccb.AppletsCCBDock(main_window,
|
d_applets = applets_ccb.AppletsCCBDock(main_window,
|
||||||
sub_clients["datasets"],
|
sub_clients["datasets"],
|
||||||
|
rpc_clients["dataset_db"],
|
||||||
extra_substitutes={
|
extra_substitutes={
|
||||||
"server": args.server,
|
"server": args.server,
|
||||||
"port_notify": args.port_notify,
|
"port_notify": args.port_notify,
|
||||||
|
|
|
@ -79,10 +79,10 @@ def process_header(output, description):
|
||||||
|
|
||||||
|
|
||||||
class PeripheralManager:
|
class PeripheralManager:
|
||||||
def __init__(self, output, master_description):
|
def __init__(self, output, primary_description):
|
||||||
self.counts = defaultdict(int)
|
self.counts = defaultdict(int)
|
||||||
self.output = output
|
self.output = output
|
||||||
self.master_description = master_description
|
self.primary_description = primary_description
|
||||||
|
|
||||||
def get_name(self, ty):
|
def get_name(self, ty):
|
||||||
count = self.counts[ty]
|
count = self.counts[ty]
|
||||||
|
@ -115,7 +115,7 @@ class PeripheralManager:
|
||||||
name=name[i],
|
name=name[i],
|
||||||
class_name=classes[i // 4],
|
class_name=classes[i // 4],
|
||||||
channel=rtio_offset + next(channel))
|
channel=rtio_offset + next(channel))
|
||||||
if peripheral.get("edge_counter", False):
|
if peripheral["edge_counter"]:
|
||||||
for i in range(num_channels):
|
for i in range(num_channels):
|
||||||
class_name = classes[i // 4]
|
class_name = classes[i // 4]
|
||||||
if class_name == "TTLInOut":
|
if class_name == "TTLInOut":
|
||||||
|
@ -140,14 +140,14 @@ class PeripheralManager:
|
||||||
"class": "SPIMaster",
|
"class": "SPIMaster",
|
||||||
"arguments": {{"channel": 0x{channel:06x}}}
|
"arguments": {{"channel": 0x{channel:06x}}}
|
||||||
}}""",
|
}}""",
|
||||||
name=self.get_name(spi.get("name", "dio_spi")),
|
name=self.get_name(spi["name"]),
|
||||||
channel=rtio_offset + next(channel))
|
channel=rtio_offset + next(channel))
|
||||||
for ttl in peripheral.get("ttl", []):
|
for ttl in peripheral["ttl"]:
|
||||||
ttl_class_names = {
|
ttl_class_names = {
|
||||||
"input": "TTLInOut",
|
"input": "TTLInOut",
|
||||||
"output": "TTLOut"
|
"output": "TTLOut"
|
||||||
}
|
}
|
||||||
name = self.get_name(ttl.get("name", "ttl"))
|
name = self.get_name(ttl["name"])
|
||||||
self.gen("""
|
self.gen("""
|
||||||
device_db["{name}"] = {{
|
device_db["{name}"] = {{
|
||||||
"type": "local",
|
"type": "local",
|
||||||
|
@ -158,7 +158,7 @@ class PeripheralManager:
|
||||||
name=name,
|
name=name,
|
||||||
class_name=ttl_class_names[ttl["direction"]],
|
class_name=ttl_class_names[ttl["direction"]],
|
||||||
channel=rtio_offset + next(channel))
|
channel=rtio_offset + next(channel))
|
||||||
if ttl.get("edge_counter", False):
|
if ttl["edge_counter"]:
|
||||||
self.gen("""
|
self.gen("""
|
||||||
device_db["{name}_counter"] = {{
|
device_db["{name}_counter"] = {{
|
||||||
"type": "local",
|
"type": "local",
|
||||||
|
@ -232,13 +232,15 @@ class PeripheralManager:
|
||||||
"sync_device": {sync_device},
|
"sync_device": {sync_device},
|
||||||
"io_update_device": "ttl_{name}_io_update",
|
"io_update_device": "ttl_{name}_io_update",
|
||||||
"refclk": {refclk},
|
"refclk": {refclk},
|
||||||
"clk_sel": {clk_sel}
|
"clk_sel": {clk_sel},
|
||||||
|
"clk_div": {clk_div}
|
||||||
}}
|
}}
|
||||||
}}""",
|
}}""",
|
||||||
name=urukul_name,
|
name=urukul_name,
|
||||||
sync_device="\"ttl_{name}_sync\"".format(name=urukul_name) if synchronization else "None",
|
sync_device="\"ttl_{name}_sync\"".format(name=urukul_name) if synchronization else "None",
|
||||||
refclk=peripheral.get("refclk", self.master_description["rtio_frequency"]),
|
refclk=peripheral.get("refclk", self.primary_description["rtio_frequency"]),
|
||||||
clk_sel=peripheral["clk_sel"])
|
clk_sel=peripheral["clk_sel"],
|
||||||
|
clk_div=peripheral["clk_div"])
|
||||||
dds = peripheral["dds"]
|
dds = peripheral["dds"]
|
||||||
pll_vco = peripheral.get("pll_vco")
|
pll_vco = peripheral.get("pll_vco")
|
||||||
for i in range(4):
|
for i in range(4):
|
||||||
|
@ -260,7 +262,7 @@ class PeripheralManager:
|
||||||
uchn=i,
|
uchn=i,
|
||||||
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
|
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
|
||||||
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
||||||
pll_n=peripheral.get("pll_n", 32), pll_en=peripheral.get("pll_en", True),
|
pll_n=peripheral.get("pll_n", 32), pll_en=peripheral["pll_en"],
|
||||||
sync_delay_seed=",\n \"sync_delay_seed\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "",
|
sync_delay_seed=",\n \"sync_delay_seed\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "",
|
||||||
io_update_delay=",\n \"io_update_delay\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "")
|
io_update_delay=",\n \"io_update_delay\": \"eeprom_{}:{}\"".format(urukul_name, 64 + 4*i) if synchronization else "")
|
||||||
elif dds == "ad9912":
|
elif dds == "ad9912":
|
||||||
|
@ -281,7 +283,7 @@ class PeripheralManager:
|
||||||
uchn=i,
|
uchn=i,
|
||||||
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
|
sw=",\n \"sw_device\": \"ttl_{name}_sw{uchn}\"".format(name=urukul_name, uchn=i) if len(peripheral["ports"]) > 1 else "",
|
||||||
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
||||||
pll_n=peripheral.get("pll_n", 8), pll_en=peripheral.get("pll_en", True))
|
pll_n=peripheral.get("pll_n", 8), pll_en=peripheral["pll_en"])
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
return next(channel)
|
return next(channel)
|
||||||
|
@ -441,7 +443,7 @@ class PeripheralManager:
|
||||||
}}""",
|
}}""",
|
||||||
suservo_name=suservo_name,
|
suservo_name=suservo_name,
|
||||||
sampler_name=sampler_name,
|
sampler_name=sampler_name,
|
||||||
sampler_hw_rev=peripheral.get("sampler_hw_rev", "v2.2"),
|
sampler_hw_rev=peripheral["sampler_hw_rev"],
|
||||||
cpld_names_list=[urukul_name + "_cpld" for urukul_name in urukul_names],
|
cpld_names_list=[urukul_name + "_cpld" for urukul_name in urukul_names],
|
||||||
dds_names_list=[urukul_name + "_dds" for urukul_name in urukul_names],
|
dds_names_list=[urukul_name + "_dds" for urukul_name in urukul_names],
|
||||||
suservo_channel=rtio_offset+next(channel))
|
suservo_channel=rtio_offset+next(channel))
|
||||||
|
@ -486,10 +488,10 @@ class PeripheralManager:
|
||||||
}}""",
|
}}""",
|
||||||
urukul_name=urukul_name,
|
urukul_name=urukul_name,
|
||||||
urukul_channel=rtio_offset+next(channel),
|
urukul_channel=rtio_offset+next(channel),
|
||||||
refclk=peripheral.get("refclk", self.master_description["rtio_frequency"]),
|
refclk=peripheral.get("refclk", self.primary_description["rtio_frequency"]),
|
||||||
clk_sel=peripheral["clk_sel"],
|
clk_sel=peripheral["clk_sel"],
|
||||||
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
pll_vco=",\n \"pll_vco\": {}".format(pll_vco) if pll_vco is not None else "",
|
||||||
pll_n=peripheral["pll_n"], pll_en=peripheral.get("pll_en", True))
|
pll_n=peripheral["pll_n"], pll_en=peripheral["pll_en"])
|
||||||
return next(channel)
|
return next(channel)
|
||||||
|
|
||||||
def process_zotino(self, rtio_offset, peripheral):
|
def process_zotino(self, rtio_offset, peripheral):
|
||||||
|
@ -554,7 +556,7 @@ class PeripheralManager:
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def process_phaser(self, rtio_offset, peripheral):
|
def process_phaser(self, rtio_offset, peripheral):
|
||||||
mode = peripheral.get("mode", "base")
|
mode = peripheral["mode"]
|
||||||
if mode == "miqro":
|
if mode == "miqro":
|
||||||
dac = f', "dac": {{"pll_m": 16, "pll_n": 3, "interpolation": 2}}, "gw_rev": {PHASER_GW_MIQRO}'
|
dac = f', "dac": {{"pll_m": 16, "pll_n": 3, "interpolation": 2}}, "gw_rev": {PHASER_GW_MIQRO}'
|
||||||
n_channels = 3
|
n_channels = 3
|
||||||
|
@ -609,30 +611,30 @@ class PeripheralManager:
|
||||||
return 2
|
return 2
|
||||||
|
|
||||||
|
|
||||||
def process(output, master_description, satellites):
|
def process(output, primary_description, satellites):
|
||||||
base = master_description["base"]
|
drtio_role = primary_description["drtio_role"]
|
||||||
if base not in ("standalone", "master"):
|
if drtio_role not in ("standalone", "master"):
|
||||||
raise ValueError("Invalid master base")
|
raise ValueError("Invalid primary node DRTIO role")
|
||||||
|
|
||||||
if base == "standalone" and satellites:
|
if drtio_role == "standalone" and satellites:
|
||||||
raise ValueError("A standalone system cannot have satellites")
|
raise ValueError("A standalone system cannot have satellites")
|
||||||
|
|
||||||
process_header(output, master_description)
|
process_header(output, primary_description)
|
||||||
|
|
||||||
pm = PeripheralManager(output, master_description)
|
pm = PeripheralManager(output, primary_description)
|
||||||
|
|
||||||
print("# {} peripherals".format(base), file=output)
|
print("# {} peripherals".format(drtio_role), file=output)
|
||||||
rtio_offset = 0
|
rtio_offset = 0
|
||||||
for peripheral in master_description["peripherals"]:
|
for peripheral in primary_description["peripherals"]:
|
||||||
n_channels = pm.process(rtio_offset, peripheral)
|
n_channels = pm.process(rtio_offset, peripheral)
|
||||||
rtio_offset += n_channels
|
rtio_offset += n_channels
|
||||||
if base == "standalone":
|
if drtio_role == "standalone":
|
||||||
n_channels = pm.add_board_leds(rtio_offset)
|
n_channels = pm.add_board_leds(rtio_offset)
|
||||||
rtio_offset += n_channels
|
rtio_offset += n_channels
|
||||||
|
|
||||||
for destination, description in satellites:
|
for destination, description in satellites:
|
||||||
if description["base"] != "satellite":
|
if description["drtio_role"] != "satellite":
|
||||||
raise ValueError("Invalid base for satellite at destination {}".format(destination))
|
raise ValueError("Invalid DRTIO role for satellite at destination {}".format(destination))
|
||||||
|
|
||||||
print("# DEST#{} peripherals".format(destination), file=output)
|
print("# DEST#{} peripherals".format(destination), file=output)
|
||||||
rtio_offset = destination << 16
|
rtio_offset = destination << 16
|
||||||
|
@ -647,8 +649,8 @@ def main():
|
||||||
parser.add_argument("--version", action="version",
|
parser.add_argument("--version", action="version",
|
||||||
version="ARTIQ v{}".format(artiq_version),
|
version="ARTIQ v{}".format(artiq_version),
|
||||||
help="print the ARTIQ version number")
|
help="print the ARTIQ version number")
|
||||||
parser.add_argument("master_description", metavar="MASTER_DESCRIPTION",
|
parser.add_argument("primary_description", metavar="PRIMARY_DESCRIPTION",
|
||||||
help="JSON system description file for the standalone or master node")
|
help="JSON system description file for the primary (standalone or master) node")
|
||||||
parser.add_argument("-o", "--output",
|
parser.add_argument("-o", "--output",
|
||||||
help="output file, defaults to standard output if omitted")
|
help="output file, defaults to standard output if omitted")
|
||||||
parser.add_argument("-s", "--satellite", nargs=2, action="append",
|
parser.add_argument("-s", "--satellite", nargs=2, action="append",
|
||||||
|
@ -658,7 +660,7 @@ def main():
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
master_description = jsondesc.load(args.master_description)
|
primary_description = jsondesc.load(args.primary_description)
|
||||||
|
|
||||||
satellites = []
|
satellites = []
|
||||||
for destination, description_path in args.satellite:
|
for destination, description_path in args.satellite:
|
||||||
|
@ -667,9 +669,9 @@ def main():
|
||||||
|
|
||||||
if args.output is not None:
|
if args.output is not None:
|
||||||
with open(args.output, "w") as f:
|
with open(args.output, "w") as f:
|
||||||
process(f, master_description, satellites)
|
process(f, primary_description, satellites)
|
||||||
else:
|
else:
|
||||||
process(sys.stdout, master_description, satellites)
|
process(sys.stdout, primary_description, satellites)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -65,14 +65,6 @@ def get_argparser():
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
class MasterConfig:
|
|
||||||
def __init__(self, name):
|
|
||||||
self.name = name
|
|
||||||
|
|
||||||
def get_name(self):
|
|
||||||
return self.name
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
log_forwarder = init_log(args)
|
log_forwarder = init_log(args)
|
||||||
|
@ -118,8 +110,6 @@ def main():
|
||||||
scheduler.start(loop=loop)
|
scheduler.start(loop=loop)
|
||||||
atexit_register_coroutine(scheduler.stop, loop=loop)
|
atexit_register_coroutine(scheduler.stop, loop=loop)
|
||||||
|
|
||||||
config = MasterConfig(args.name)
|
|
||||||
|
|
||||||
worker_handlers.update({
|
worker_handlers.update({
|
||||||
"get_device_db": device_db.get_device_db,
|
"get_device_db": device_db.get_device_db,
|
||||||
"get_device": device_db.get,
|
"get_device": device_db.get,
|
||||||
|
@ -136,15 +126,17 @@ def main():
|
||||||
experiment_db.scan_repository_async(loop=loop)
|
experiment_db.scan_repository_async(loop=loop)
|
||||||
|
|
||||||
signal_handler_task = loop.create_task(signal_handler.wait_terminate())
|
signal_handler_task = loop.create_task(signal_handler.wait_terminate())
|
||||||
master_terminate = SimpleNamespace(terminate=lambda: signal_handler_task.cancel())
|
master_management = SimpleNamespace(
|
||||||
|
get_name=lambda: args.name,
|
||||||
|
terminate=lambda: signal_handler_task.cancel()
|
||||||
|
)
|
||||||
|
|
||||||
server_control = RPCServer({
|
server_control = RPCServer({
|
||||||
"master_config": config,
|
"master_management": master_management,
|
||||||
"master_device_db": device_db,
|
"master_device_db": device_db,
|
||||||
"master_dataset_db": dataset_db,
|
"master_dataset_db": dataset_db,
|
||||||
"master_schedule": scheduler,
|
"master_schedule": scheduler,
|
||||||
"master_experiment_db": experiment_db,
|
"master_experiment_db": experiment_db,
|
||||||
"master_terminate": master_terminate
|
|
||||||
}, allow_parallel=True)
|
}, allow_parallel=True)
|
||||||
loop.run_until_complete(server_control.start(
|
loop.run_until_complete(server_control.start(
|
||||||
bind, args.port_control))
|
bind, args.port_control))
|
||||||
|
|
|
@ -36,6 +36,7 @@ class AMPSoC:
|
||||||
csrs = getattr(self, name).get_csrs()
|
csrs = getattr(self, name).get_csrs()
|
||||||
csr_bus = wishbone.Interface(data_width=32, adr_width=32-log2_int(self.csr_separation))
|
csr_bus = wishbone.Interface(data_width=32, adr_width=32-log2_int(self.csr_separation))
|
||||||
bank = wishbone.CSRBank(csrs, bus=csr_bus)
|
bank = wishbone.CSRBank(csrs, bus=csr_bus)
|
||||||
|
self.config["kernel_has_"+name] = None
|
||||||
self.submodules += bank
|
self.submodules += bank
|
||||||
self.kernel_cpu.add_wb_slave(self.mem_map[name], self.csr_separation*2**bank.decode_bits, bank.bus)
|
self.kernel_cpu.add_wb_slave(self.mem_map[name], self.csr_separation*2**bank.decode_bits, bank.bus)
|
||||||
self.add_csr_region(name,
|
self.add_csr_region(name,
|
||||||
|
|
|
@ -74,6 +74,8 @@ class GTX_20X(Module):
|
||||||
p_CPLL_REFCLK_DIV=1,
|
p_CPLL_REFCLK_DIV=1,
|
||||||
p_RXOUT_DIV=2,
|
p_RXOUT_DIV=2,
|
||||||
p_TXOUT_DIV=2,
|
p_TXOUT_DIV=2,
|
||||||
|
p_CPLL_INIT_CFG=0x00001E,
|
||||||
|
p_CPLL_LOCK_CFG=0x01C0,
|
||||||
i_CPLLRESET=cpllreset,
|
i_CPLLRESET=cpllreset,
|
||||||
i_CPLLPD=cpllreset,
|
i_CPLLPD=cpllreset,
|
||||||
o_CPLLLOCK=cplllock,
|
o_CPLLLOCK=cplllock,
|
||||||
|
@ -288,9 +290,9 @@ class GTX(Module, TransceiverInterface):
|
||||||
i_I=clock_pads.p,
|
i_I=clock_pads.p,
|
||||||
i_IB=clock_pads.n,
|
i_IB=clock_pads.n,
|
||||||
o_O=refclk,
|
o_O=refclk,
|
||||||
p_CLKCM_CFG="0b1",
|
p_CLKCM_CFG="TRUE",
|
||||||
p_CLKRCV_TRST="0b1",
|
p_CLKRCV_TRST="TRUE",
|
||||||
p_CLKSWING_CFG="0b11"
|
p_CLKSWING_CFG=3
|
||||||
)
|
)
|
||||||
|
|
||||||
channel_interfaces = []
|
channel_interfaces = []
|
||||||
|
@ -320,7 +322,7 @@ class GTX(Module, TransceiverInterface):
|
||||||
|
|
||||||
# stable_clkin resets after reboot since it's in SYS domain
|
# stable_clkin resets after reboot since it's in SYS domain
|
||||||
# still need to keep clk_enable high after this
|
# still need to keep clk_enable high after this
|
||||||
self.sync.bootstrap += clk_enable.eq(self.stable_clkin.storage | self.gtxs[0].tx_init.done)
|
self.sync.bootstrap += clk_enable.eq(self.stable_clkin.storage | self.gtxs[0].tx_init.cplllock)
|
||||||
|
|
||||||
# Connect slave i's `rtio_rx` clock to `rtio_rxi` clock
|
# Connect slave i's `rtio_rx` clock to `rtio_rxi` clock
|
||||||
for i in range(nchannels):
|
for i in range(nchannels):
|
||||||
|
|
|
@ -110,9 +110,9 @@ class GTXInit(Module):
|
||||||
|
|
||||||
startup_fsm.act("INITIAL",
|
startup_fsm.act("INITIAL",
|
||||||
startup_timer.wait.eq(1),
|
startup_timer.wait.eq(1),
|
||||||
If(startup_timer.done & self.stable_clkin, NextState("RESET_ALL"))
|
If(startup_timer.done & self.stable_clkin, NextState("RESET_PLL"))
|
||||||
)
|
)
|
||||||
startup_fsm.act("RESET_ALL",
|
startup_fsm.act("RESET_PLL",
|
||||||
gtXxreset.eq(1),
|
gtXxreset.eq(1),
|
||||||
self.cpllreset.eq(1),
|
self.cpllreset.eq(1),
|
||||||
pll_reset_timer.wait.eq(1),
|
pll_reset_timer.wait.eq(1),
|
||||||
|
@ -120,19 +120,24 @@ class GTXInit(Module):
|
||||||
)
|
)
|
||||||
startup_fsm.act("RELEASE_PLL_RESET",
|
startup_fsm.act("RELEASE_PLL_RESET",
|
||||||
gtXxreset.eq(1),
|
gtXxreset.eq(1),
|
||||||
If(cplllock, NextState("RELEASE_GTH_RESET"))
|
If(cplllock, NextState("RESET_GTX"))
|
||||||
|
)
|
||||||
|
startup_fsm.act("RESET_GTX",
|
||||||
|
gtXxreset.eq(1),
|
||||||
|
pll_reset_timer.wait.eq(1),
|
||||||
|
If(pll_reset_timer.done, NextState("RELEASE_GTX_RESET"))
|
||||||
)
|
)
|
||||||
# Release GTX reset and wait for GTX resetdone
|
# Release GTX reset and wait for GTX resetdone
|
||||||
# (from UG476, GTX is reset on falling edge
|
# (from UG476, GTX is reset on falling edge
|
||||||
# of gttxreset)
|
# of gttxreset)
|
||||||
if rx:
|
if rx:
|
||||||
startup_fsm.act("RELEASE_GTH_RESET",
|
startup_fsm.act("RELEASE_GTX_RESET",
|
||||||
Xxuserrdy.eq(1),
|
Xxuserrdy.eq(1),
|
||||||
cdr_stable_timer.wait.eq(1),
|
cdr_stable_timer.wait.eq(1),
|
||||||
If(Xxresetdone & cdr_stable_timer.done, NextState("DELAY_ALIGN"))
|
If(Xxresetdone & cdr_stable_timer.done, NextState("DELAY_ALIGN"))
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
startup_fsm.act("RELEASE_GTH_RESET",
|
startup_fsm.act("RELEASE_GTX_RESET",
|
||||||
Xxuserrdy.eq(1),
|
Xxuserrdy.eq(1),
|
||||||
If(Xxresetdone, NextState("DELAY_ALIGN"))
|
If(Xxresetdone, NextState("DELAY_ALIGN"))
|
||||||
)
|
)
|
||||||
|
@ -229,7 +234,7 @@ class GTXInit(Module):
|
||||||
startup_fsm.act("READY",
|
startup_fsm.act("READY",
|
||||||
Xxuserrdy.eq(1),
|
Xxuserrdy.eq(1),
|
||||||
self.done.eq(1),
|
self.done.eq(1),
|
||||||
If(self.restart, NextState("RESET_ALL"))
|
If(self.restart, NextState("RESET_GTX"))
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -259,25 +259,25 @@ class Sampler(_EEM):
|
||||||
ios = [
|
ios = [
|
||||||
("sampler{}_adc_spi_p".format(eem), 0,
|
("sampler{}_adc_spi_p".format(eem), 0,
|
||||||
Subsignal("clk", Pins(_eem_pin(eem, 0, "p"))),
|
Subsignal("clk", Pins(_eem_pin(eem, 0, "p"))),
|
||||||
Subsignal("miso", Pins(_eem_pin(eem, 1, "p"))),
|
Subsignal("miso", Pins(_eem_pin(eem, 1, "p")), Misc("DIFF_TERM=TRUE")),
|
||||||
iostandard(eem),
|
iostandard(eem),
|
||||||
),
|
),
|
||||||
("sampler{}_adc_spi_n".format(eem), 0,
|
("sampler{}_adc_spi_n".format(eem), 0,
|
||||||
Subsignal("clk", Pins(_eem_pin(eem, 0, "n"))),
|
Subsignal("clk", Pins(_eem_pin(eem, 0, "n"))),
|
||||||
Subsignal("miso", Pins(_eem_pin(eem, 1, "n"))),
|
Subsignal("miso", Pins(_eem_pin(eem, 1, "n")), Misc("DIFF_TERM=TRUE")),
|
||||||
iostandard(eem),
|
iostandard(eem),
|
||||||
),
|
),
|
||||||
("sampler{}_pgia_spi_p".format(eem), 0,
|
("sampler{}_pgia_spi_p".format(eem), 0,
|
||||||
Subsignal("clk", Pins(_eem_pin(eem, 4, "p"))),
|
Subsignal("clk", Pins(_eem_pin(eem, 4, "p"))),
|
||||||
Subsignal("mosi", Pins(_eem_pin(eem, 5, "p"))),
|
Subsignal("mosi", Pins(_eem_pin(eem, 5, "p"))),
|
||||||
Subsignal("miso", Pins(_eem_pin(eem, 6, "p"))),
|
Subsignal("miso", Pins(_eem_pin(eem, 6, "p")), Misc("DIFF_TERM=TRUE")),
|
||||||
Subsignal("cs_n", Pins(_eem_pin(eem, 7, "p"))),
|
Subsignal("cs_n", Pins(_eem_pin(eem, 7, "p"))),
|
||||||
iostandard(eem),
|
iostandard(eem),
|
||||||
),
|
),
|
||||||
("sampler{}_pgia_spi_n".format(eem), 0,
|
("sampler{}_pgia_spi_n".format(eem), 0,
|
||||||
Subsignal("clk", Pins(_eem_pin(eem, 4, "n"))),
|
Subsignal("clk", Pins(_eem_pin(eem, 4, "n"))),
|
||||||
Subsignal("mosi", Pins(_eem_pin(eem, 5, "n"))),
|
Subsignal("mosi", Pins(_eem_pin(eem, 5, "n"))),
|
||||||
Subsignal("miso", Pins(_eem_pin(eem, 6, "n"))),
|
Subsignal("miso", Pins(_eem_pin(eem, 6, "n")), Misc("DIFF_TERM=TRUE")),
|
||||||
Subsignal("cs_n", Pins(_eem_pin(eem, 7, "n"))),
|
Subsignal("cs_n", Pins(_eem_pin(eem, 7, "n"))),
|
||||||
iostandard(eem),
|
iostandard(eem),
|
||||||
),
|
),
|
||||||
|
@ -586,14 +586,14 @@ class Mirny(_EEM):
|
||||||
("mirny{}_spi_p".format(eem), 0,
|
("mirny{}_spi_p".format(eem), 0,
|
||||||
Subsignal("clk", Pins(_eem_pin(eem, 0, "p"))),
|
Subsignal("clk", Pins(_eem_pin(eem, 0, "p"))),
|
||||||
Subsignal("mosi", Pins(_eem_pin(eem, 1, "p"))),
|
Subsignal("mosi", Pins(_eem_pin(eem, 1, "p"))),
|
||||||
Subsignal("miso", Pins(_eem_pin(eem, 2, "p"))),
|
Subsignal("miso", Pins(_eem_pin(eem, 2, "p")), Misc("DIFF_TERM=TRUE")),
|
||||||
Subsignal("cs_n", Pins(_eem_pin(eem, 3, "p"))),
|
Subsignal("cs_n", Pins(_eem_pin(eem, 3, "p"))),
|
||||||
iostandard(eem),
|
iostandard(eem),
|
||||||
),
|
),
|
||||||
("mirny{}_spi_n".format(eem), 0,
|
("mirny{}_spi_n".format(eem), 0,
|
||||||
Subsignal("clk", Pins(_eem_pin(eem, 0, "n"))),
|
Subsignal("clk", Pins(_eem_pin(eem, 0, "n"))),
|
||||||
Subsignal("mosi", Pins(_eem_pin(eem, 1, "n"))),
|
Subsignal("mosi", Pins(_eem_pin(eem, 1, "n"))),
|
||||||
Subsignal("miso", Pins(_eem_pin(eem, 2, "n"))),
|
Subsignal("miso", Pins(_eem_pin(eem, 2, "n")), Misc("DIFF_TERM=TRUE")),
|
||||||
Subsignal("cs_n", Pins(_eem_pin(eem, 3, "n"))),
|
Subsignal("cs_n", Pins(_eem_pin(eem, 3, "n"))),
|
||||||
iostandard(eem),
|
iostandard(eem),
|
||||||
),
|
),
|
||||||
|
|
|
@ -31,7 +31,7 @@ def peripheral_dio_spi(module, peripheral, **kwargs):
|
||||||
for s in peripheral["spi"]]
|
for s in peripheral["spi"]]
|
||||||
ttl = [(t["pin"], ttl_classes[t["direction"]],
|
ttl = [(t["pin"], ttl_classes[t["direction"]],
|
||||||
edge_counter.SimpleEdgeCounter if t.get("edge_counter") else None)
|
edge_counter.SimpleEdgeCounter if t.get("edge_counter") else None)
|
||||||
for t in peripheral.get("ttl", [])]
|
for t in peripheral["ttl"]]
|
||||||
eem.DIO_SPI.add_std(module, peripheral["ports"][0], spi, ttl, **kwargs)
|
eem.DIO_SPI.add_std(module, peripheral["ports"][0], spi, ttl, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ def peripheral_phaser(module, peripheral, **kwargs):
|
||||||
if len(peripheral["ports"]) != 1:
|
if len(peripheral["ports"]) != 1:
|
||||||
raise ValueError("wrong number of ports")
|
raise ValueError("wrong number of ports")
|
||||||
eem.Phaser.add_std(module, peripheral["ports"][0],
|
eem.Phaser.add_std(module, peripheral["ports"][0],
|
||||||
peripheral.get("mode", "base"), **kwargs)
|
peripheral["mode"], **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def peripheral_hvamp(module, peripheral, **kwargs):
|
def peripheral_hvamp(module, peripheral, **kwargs):
|
||||||
|
|
|
@ -87,7 +87,10 @@ class StandaloneBase(MiniSoC, AMPSoC):
|
||||||
Instance("IBUFDS_GTE2",
|
Instance("IBUFDS_GTE2",
|
||||||
i_CEB=0,
|
i_CEB=0,
|
||||||
i_I=cdr_clk_out.p, i_IB=cdr_clk_out.n,
|
i_I=cdr_clk_out.p, i_IB=cdr_clk_out.n,
|
||||||
o_O=cdr_clk),
|
o_O=cdr_clk,
|
||||||
|
p_CLKCM_CFG="TRUE",
|
||||||
|
p_CLKRCV_TRST="TRUE",
|
||||||
|
p_CLKSWING_CFG=3),
|
||||||
Instance("BUFG", i_I=cdr_clk, o_O=cdr_clk_buf)
|
Instance("BUFG", i_I=cdr_clk, o_O=cdr_clk_buf)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -378,7 +381,10 @@ class MasterBase(MiniSoC, AMPSoC):
|
||||||
self.specials += Instance("IBUFDS_GTE2",
|
self.specials += Instance("IBUFDS_GTE2",
|
||||||
i_CEB=0,
|
i_CEB=0,
|
||||||
i_I=cdr_clk_out.p, i_IB=cdr_clk_out.n,
|
i_I=cdr_clk_out.p, i_IB=cdr_clk_out.n,
|
||||||
o_O=cdr_clk)
|
o_O=cdr_clk,
|
||||||
|
p_CLKCM_CFG="TRUE",
|
||||||
|
p_CLKRCV_TRST="TRUE",
|
||||||
|
p_CLKSWING_CFG=3)
|
||||||
# Note precisely the rules Xilinx made up:
|
# Note precisely the rules Xilinx made up:
|
||||||
# refclksel=0b001 GTREFCLK0 selected
|
# refclksel=0b001 GTREFCLK0 selected
|
||||||
# refclksel=0b010 GTREFCLK1 selected
|
# refclksel=0b010 GTREFCLK1 selected
|
||||||
|
@ -400,9 +406,11 @@ class MasterBase(MiniSoC, AMPSoC):
|
||||||
self.drtio_qpll_channel, self.ethphy_qpll_channel = qpll.channels
|
self.drtio_qpll_channel, self.ethphy_qpll_channel = qpll.channels
|
||||||
|
|
||||||
|
|
||||||
class SatelliteBase(BaseSoC):
|
class SatelliteBase(BaseSoC, AMPSoC):
|
||||||
mem_map = {
|
mem_map = {
|
||||||
|
"rtio": 0x20000000,
|
||||||
"drtioaux": 0x50000000,
|
"drtioaux": 0x50000000,
|
||||||
|
"mailbox": 0x70000000
|
||||||
}
|
}
|
||||||
mem_map.update(BaseSoC.mem_map)
|
mem_map.update(BaseSoC.mem_map)
|
||||||
|
|
||||||
|
@ -411,6 +419,7 @@ class SatelliteBase(BaseSoC):
|
||||||
cpu_bus_width = 32
|
cpu_bus_width = 32
|
||||||
else:
|
else:
|
||||||
cpu_bus_width = 64
|
cpu_bus_width = 64
|
||||||
|
|
||||||
BaseSoC.__init__(self,
|
BaseSoC.__init__(self,
|
||||||
cpu_type="vexriscv",
|
cpu_type="vexriscv",
|
||||||
hw_rev=hw_rev,
|
hw_rev=hw_rev,
|
||||||
|
@ -420,6 +429,7 @@ class SatelliteBase(BaseSoC):
|
||||||
clk_freq=rtio_clk_freq,
|
clk_freq=rtio_clk_freq,
|
||||||
rtio_sys_merge=True,
|
rtio_sys_merge=True,
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
AMPSoC.__init__(self)
|
||||||
add_identifier(self, gateware_identifier_str=gateware_identifier_str)
|
add_identifier(self, gateware_identifier_str=gateware_identifier_str)
|
||||||
|
|
||||||
platform = self.platform
|
platform = self.platform
|
||||||
|
@ -440,7 +450,10 @@ class SatelliteBase(BaseSoC):
|
||||||
self.specials += Instance("IBUFDS_GTE2",
|
self.specials += Instance("IBUFDS_GTE2",
|
||||||
i_CEB=0,
|
i_CEB=0,
|
||||||
i_I=cdr_clk_out.p, i_IB=cdr_clk_out.n,
|
i_I=cdr_clk_out.p, i_IB=cdr_clk_out.n,
|
||||||
o_O=cdr_clk)
|
o_O=cdr_clk,
|
||||||
|
p_CLKCM_CFG="TRUE",
|
||||||
|
p_CLKRCV_TRST="TRUE",
|
||||||
|
p_CLKSWING_CFG=3)
|
||||||
qpll_drtio_settings = QPLLSettings(
|
qpll_drtio_settings = QPLLSettings(
|
||||||
refclksel=0b001,
|
refclksel=0b001,
|
||||||
fbdiv=4,
|
fbdiv=4,
|
||||||
|
@ -565,12 +578,18 @@ class SatelliteBase(BaseSoC):
|
||||||
self.submodules.rtio_moninj = rtio.MonInj(rtio_channels)
|
self.submodules.rtio_moninj = rtio.MonInj(rtio_channels)
|
||||||
self.csr_devices.append("rtio_moninj")
|
self.csr_devices.append("rtio_moninj")
|
||||||
|
|
||||||
|
# satellite (master-controlled) RTIO
|
||||||
self.submodules.local_io = SyncRTIO(self.rtio_tsc, rtio_channels, lane_count=sed_lanes)
|
self.submodules.local_io = SyncRTIO(self.rtio_tsc, rtio_channels, lane_count=sed_lanes)
|
||||||
self.comb += self.drtiosat.async_errors.eq(self.local_io.async_errors)
|
self.comb += self.drtiosat.async_errors.eq(self.local_io.async_errors)
|
||||||
|
|
||||||
|
# subkernel RTIO
|
||||||
|
self.submodules.rtio = rtio.KernelInitiator(self.rtio_tsc)
|
||||||
|
self.register_kernel_cpu_csrdevice("rtio")
|
||||||
|
|
||||||
self.submodules.rtio_dma = rtio.DMA(self.get_native_sdram_if(), self.cpu_dw)
|
self.submodules.rtio_dma = rtio.DMA(self.get_native_sdram_if(), self.cpu_dw)
|
||||||
self.csr_devices.append("rtio_dma")
|
self.csr_devices.append("rtio_dma")
|
||||||
self.submodules.cri_con = rtio.CRIInterconnectShared(
|
self.submodules.cri_con = rtio.CRIInterconnectShared(
|
||||||
[self.drtiosat.cri, self.rtio_dma.cri],
|
[self.drtiosat.cri, self.rtio_dma.cri, self.rtio.cri],
|
||||||
[self.local_io.cri] + self.drtio_cri,
|
[self.local_io.cri] + self.drtio_cri,
|
||||||
enable_routing=True)
|
enable_routing=True)
|
||||||
self.csr_devices.append("cri_con")
|
self.csr_devices.append("cri_con")
|
||||||
|
|
|
@ -22,7 +22,7 @@ class GenericStandalone(StandaloneBase):
|
||||||
hw_rev = description["hw_rev"]
|
hw_rev = description["hw_rev"]
|
||||||
self.class_name_override = description["variant"]
|
self.class_name_override = description["variant"]
|
||||||
StandaloneBase.__init__(self, hw_rev=hw_rev, **kwargs)
|
StandaloneBase.__init__(self, hw_rev=hw_rev, **kwargs)
|
||||||
|
self.config["DRTIO_ROLE"] = description["drtio_role"]
|
||||||
self.config["RTIO_FREQUENCY"] = "{:.1f}".format(description["rtio_frequency"]/1e6)
|
self.config["RTIO_FREQUENCY"] = "{:.1f}".format(description["rtio_frequency"]/1e6)
|
||||||
if "ext_ref_frequency" in description:
|
if "ext_ref_frequency" in description:
|
||||||
self.config["SI5324_EXT_REF"] = None
|
self.config["SI5324_EXT_REF"] = None
|
||||||
|
@ -76,6 +76,7 @@ class GenericMaster(MasterBase):
|
||||||
rtio_clk_freq=description["rtio_frequency"],
|
rtio_clk_freq=description["rtio_frequency"],
|
||||||
enable_sata=description["enable_sata_drtio"],
|
enable_sata=description["enable_sata_drtio"],
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
self.config["DRTIO_ROLE"] = description["drtio_role"]
|
||||||
if "ext_ref_frequency" in description:
|
if "ext_ref_frequency" in description:
|
||||||
self.config["SI5324_EXT_REF"] = None
|
self.config["SI5324_EXT_REF"] = None
|
||||||
self.config["EXT_REF_FREQUENCY"] = "{:.1f}".format(
|
self.config["EXT_REF_FREQUENCY"] = "{:.1f}".format(
|
||||||
|
@ -113,6 +114,7 @@ class GenericSatellite(SatelliteBase):
|
||||||
rtio_clk_freq=description["rtio_frequency"],
|
rtio_clk_freq=description["rtio_frequency"],
|
||||||
enable_sata=description["enable_sata_drtio"],
|
enable_sata=description["enable_sata_drtio"],
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
self.config["DRTIO_ROLE"] = description["drtio_role"]
|
||||||
if hw_rev == "v1.0":
|
if hw_rev == "v1.0":
|
||||||
# EEM clock fan-out from Si5324, not MMCX
|
# EEM clock fan-out from Si5324, not MMCX
|
||||||
self.comb += self.platform.request("clk_sel").eq(1)
|
self.comb += self.platform.request("clk_sel").eq(1)
|
||||||
|
@ -149,7 +151,7 @@ def main():
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
description = jsondesc.load(args.description)
|
description = jsondesc.load(args.description)
|
||||||
|
|
||||||
min_artiq_version = description.get("min_artiq_version", "0")
|
min_artiq_version = description["min_artiq_version"]
|
||||||
if Version(artiq_version) < Version(min_artiq_version):
|
if Version(artiq_version) < Version(min_artiq_version):
|
||||||
logger.warning("ARTIQ version mismatch: current %s < %s minimum",
|
logger.warning("ARTIQ version mismatch: current %s < %s minimum",
|
||||||
artiq_version, min_artiq_version)
|
artiq_version, min_artiq_version)
|
||||||
|
@ -157,14 +159,14 @@ def main():
|
||||||
if description["target"] != "kasli":
|
if description["target"] != "kasli":
|
||||||
raise ValueError("Description is for a different target")
|
raise ValueError("Description is for a different target")
|
||||||
|
|
||||||
if description["base"] == "standalone":
|
if description["drtio_role"] == "standalone":
|
||||||
cls = GenericStandalone
|
cls = GenericStandalone
|
||||||
elif description["base"] == "master":
|
elif description["drtio_role"] == "master":
|
||||||
cls = GenericMaster
|
cls = GenericMaster
|
||||||
elif description["base"] == "satellite":
|
elif description["drtio_role"] == "satellite":
|
||||||
cls = GenericSatellite
|
cls = GenericSatellite
|
||||||
else:
|
else:
|
||||||
raise ValueError("Invalid base")
|
raise ValueError("Invalid DRTIO role")
|
||||||
|
|
||||||
soc = cls(description, gateware_identifier_str=args.gateware_identifier_str, **soc_kasli_argdict(args))
|
soc = cls(description, gateware_identifier_str=args.gateware_identifier_str, **soc_kasli_argdict(args))
|
||||||
args.variant = description["variant"]
|
args.variant = description["variant"]
|
||||||
|
|
|
@ -98,6 +98,8 @@ class _StandaloneBase(MiniSoC, AMPSoC):
|
||||||
AMPSoC.__init__(self)
|
AMPSoC.__init__(self)
|
||||||
add_identifier(self, gateware_identifier_str=gateware_identifier_str)
|
add_identifier(self, gateware_identifier_str=gateware_identifier_str)
|
||||||
|
|
||||||
|
self.config["DRTIO_ROLE"] = "standalone"
|
||||||
|
|
||||||
if isinstance(self.platform.toolchain, XilinxVivadoToolchain):
|
if isinstance(self.platform.toolchain, XilinxVivadoToolchain):
|
||||||
self.platform.toolchain.bitstream_commands.extend([
|
self.platform.toolchain.bitstream_commands.extend([
|
||||||
"set_property BITSTREAM.GENERAL.COMPRESS True [current_design]",
|
"set_property BITSTREAM.GENERAL.COMPRESS True [current_design]",
|
||||||
|
@ -119,9 +121,9 @@ class _StandaloneBase(MiniSoC, AMPSoC):
|
||||||
i_CEB=0,
|
i_CEB=0,
|
||||||
i_I=cdr_clk_out.p, i_IB=cdr_clk_out.n,
|
i_I=cdr_clk_out.p, i_IB=cdr_clk_out.n,
|
||||||
o_O=cdr_clk,
|
o_O=cdr_clk,
|
||||||
p_CLKCM_CFG=1,
|
p_CLKCM_CFG="TRUE",
|
||||||
p_CLKRCV_TRST=1,
|
p_CLKRCV_TRST="TRUE",
|
||||||
p_CLKSWING_CFG="2'b11"),
|
p_CLKSWING_CFG=3),
|
||||||
Instance("BUFG", i_I=cdr_clk, o_O=cdr_clk_buf)
|
Instance("BUFG", i_I=cdr_clk, o_O=cdr_clk_buf)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -194,6 +196,8 @@ class _MasterBase(MiniSoC, AMPSoC):
|
||||||
AMPSoC.__init__(self)
|
AMPSoC.__init__(self)
|
||||||
add_identifier(self, gateware_identifier_str=gateware_identifier_str)
|
add_identifier(self, gateware_identifier_str=gateware_identifier_str)
|
||||||
|
|
||||||
|
self.config["DRTIO_ROLE"] = "master"
|
||||||
|
|
||||||
if isinstance(self.platform.toolchain, XilinxVivadoToolchain):
|
if isinstance(self.platform.toolchain, XilinxVivadoToolchain):
|
||||||
self.platform.toolchain.bitstream_commands.extend([
|
self.platform.toolchain.bitstream_commands.extend([
|
||||||
"set_property BITSTREAM.GENERAL.COMPRESS True [current_design]",
|
"set_property BITSTREAM.GENERAL.COMPRESS True [current_design]",
|
||||||
|
@ -314,9 +318,11 @@ class _MasterBase(MiniSoC, AMPSoC):
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class _SatelliteBase(BaseSoC):
|
class _SatelliteBase(BaseSoC, AMPSoC):
|
||||||
mem_map = {
|
mem_map = {
|
||||||
|
"rtio": 0x20000000,
|
||||||
"drtioaux": 0x50000000,
|
"drtioaux": 0x50000000,
|
||||||
|
"mailbox": 0x70000000
|
||||||
}
|
}
|
||||||
mem_map.update(BaseSoC.mem_map)
|
mem_map.update(BaseSoC.mem_map)
|
||||||
|
|
||||||
|
@ -331,8 +337,11 @@ class _SatelliteBase(BaseSoC):
|
||||||
clk_freq=clk_freq,
|
clk_freq=clk_freq,
|
||||||
rtio_sys_merge=True,
|
rtio_sys_merge=True,
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
AMPSoC.__init__(self)
|
||||||
add_identifier(self, gateware_identifier_str=gateware_identifier_str)
|
add_identifier(self, gateware_identifier_str=gateware_identifier_str)
|
||||||
|
|
||||||
|
self.config["DRTIO_ROLE"] = "satellite"
|
||||||
|
|
||||||
if isinstance(self.platform.toolchain, XilinxVivadoToolchain):
|
if isinstance(self.platform.toolchain, XilinxVivadoToolchain):
|
||||||
self.platform.toolchain.bitstream_commands.extend([
|
self.platform.toolchain.bitstream_commands.extend([
|
||||||
"set_property BITSTREAM.GENERAL.COMPRESS True [current_design]",
|
"set_property BITSTREAM.GENERAL.COMPRESS True [current_design]",
|
||||||
|
@ -453,12 +462,18 @@ class _SatelliteBase(BaseSoC):
|
||||||
self.submodules.rtio_moninj = rtio.MonInj(rtio_channels)
|
self.submodules.rtio_moninj = rtio.MonInj(rtio_channels)
|
||||||
self.csr_devices.append("rtio_moninj")
|
self.csr_devices.append("rtio_moninj")
|
||||||
|
|
||||||
|
# DRTIO
|
||||||
self.submodules.local_io = SyncRTIO(self.rtio_tsc, rtio_channels)
|
self.submodules.local_io = SyncRTIO(self.rtio_tsc, rtio_channels)
|
||||||
self.comb += self.drtiosat.async_errors.eq(self.local_io.async_errors)
|
self.comb += self.drtiosat.async_errors.eq(self.local_io.async_errors)
|
||||||
|
|
||||||
|
# subkernel RTIO
|
||||||
|
self.submodules.rtio = rtio.KernelInitiator(self.rtio_tsc)
|
||||||
|
self.register_kernel_cpu_csrdevice("rtio")
|
||||||
|
|
||||||
self.submodules.rtio_dma = rtio.DMA(self.get_native_sdram_if(), self.cpu_dw)
|
self.submodules.rtio_dma = rtio.DMA(self.get_native_sdram_if(), self.cpu_dw)
|
||||||
self.csr_devices.append("rtio_dma")
|
self.csr_devices.append("rtio_dma")
|
||||||
self.submodules.cri_con = rtio.CRIInterconnectShared(
|
self.submodules.cri_con = rtio.CRIInterconnectShared(
|
||||||
[self.drtiosat.cri, self.rtio_dma.cri],
|
[self.drtiosat.cri, self.rtio_dma.cri, self.rtio.cri],
|
||||||
[self.local_io.cri] + self.drtio_cri,
|
[self.local_io.cri] + self.drtio_cri,
|
||||||
enable_routing=True)
|
enable_routing=True)
|
||||||
self.csr_devices.append("cri_con")
|
self.csr_devices.append("cri_con")
|
||||||
|
|
|
@ -21,9 +21,10 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class AppletIPCServer(AsyncioParentComm):
|
class AppletIPCServer(AsyncioParentComm):
|
||||||
def __init__(self, datasets_sub):
|
def __init__(self, dataset_sub, dataset_ctl):
|
||||||
AsyncioParentComm.__init__(self)
|
AsyncioParentComm.__init__(self)
|
||||||
self.datasets_sub = datasets_sub
|
self.dataset_sub = dataset_sub
|
||||||
|
self.dataset_ctl = dataset_ctl
|
||||||
self.datasets = set()
|
self.datasets = set()
|
||||||
self.dataset_prefixes = []
|
self.dataset_prefixes = []
|
||||||
|
|
||||||
|
@ -60,7 +61,7 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
self.write_pyon({"action": "mod", "mod": mod})
|
self.write_pyon({"action": "mod", "mod": mod})
|
||||||
|
|
||||||
async def serve(self, embed_cb, fix_initial_size_cb):
|
async def serve(self, embed_cb, fix_initial_size_cb):
|
||||||
self.datasets_sub.notify_cbs.append(self._on_mod)
|
self.dataset_sub.notify_cbs.append(self._on_mod)
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
obj = await self.read_pyon()
|
obj = await self.read_pyon()
|
||||||
|
@ -74,10 +75,14 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
elif action == "subscribe":
|
elif action == "subscribe":
|
||||||
self.datasets = obj["datasets"]
|
self.datasets = obj["datasets"]
|
||||||
self.dataset_prefixes = obj["dataset_prefixes"]
|
self.dataset_prefixes = obj["dataset_prefixes"]
|
||||||
if self.datasets_sub.model is not None:
|
if self.dataset_sub.model is not None:
|
||||||
mod = self._synthesize_init(
|
mod = self._synthesize_init(
|
||||||
self.datasets_sub.model.backing_store)
|
self.dataset_sub.model.backing_store)
|
||||||
self.write_pyon({"action": "mod", "mod": mod})
|
self.write_pyon({"action": "mod", "mod": mod})
|
||||||
|
elif action == "set_dataset":
|
||||||
|
await self.dataset_ctl.set(obj["key"], obj["value"], metadata=obj["metadata"], persist=obj["persist"])
|
||||||
|
elif action == "update_dataset":
|
||||||
|
await self.dataset_ctl.update(obj["mod"])
|
||||||
else:
|
else:
|
||||||
raise ValueError("unknown action in applet message")
|
raise ValueError("unknown action in applet message")
|
||||||
except:
|
except:
|
||||||
|
@ -90,7 +95,7 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
logger.error("error processing data from applet, "
|
logger.error("error processing data from applet, "
|
||||||
"server stopped", exc_info=True)
|
"server stopped", exc_info=True)
|
||||||
finally:
|
finally:
|
||||||
self.datasets_sub.notify_cbs.remove(self._on_mod)
|
self.dataset_sub.notify_cbs.remove(self._on_mod)
|
||||||
|
|
||||||
def start_server(self, embed_cb, fix_initial_size_cb, *, loop=None):
|
def start_server(self, embed_cb, fix_initial_size_cb, *, loop=None):
|
||||||
self.server_task = asyncio.ensure_future(
|
self.server_task = asyncio.ensure_future(
|
||||||
|
@ -103,7 +108,7 @@ class AppletIPCServer(AsyncioParentComm):
|
||||||
|
|
||||||
|
|
||||||
class _AppletDock(QDockWidgetCloseDetect):
|
class _AppletDock(QDockWidgetCloseDetect):
|
||||||
def __init__(self, datasets_sub, uid, name, spec, extra_substitutes):
|
def __init__(self, dataset_sub, dataset_ctl, uid, name, spec, extra_substitutes):
|
||||||
QDockWidgetCloseDetect.__init__(self, "Applet: " + name)
|
QDockWidgetCloseDetect.__init__(self, "Applet: " + name)
|
||||||
self.setObjectName("applet" + str(uid))
|
self.setObjectName("applet" + str(uid))
|
||||||
|
|
||||||
|
@ -111,7 +116,8 @@ class _AppletDock(QDockWidgetCloseDetect):
|
||||||
self.setMinimumSize(20*qfm.averageCharWidth(), 5*qfm.lineSpacing())
|
self.setMinimumSize(20*qfm.averageCharWidth(), 5*qfm.lineSpacing())
|
||||||
self.resize(40*qfm.averageCharWidth(), 10*qfm.lineSpacing())
|
self.resize(40*qfm.averageCharWidth(), 10*qfm.lineSpacing())
|
||||||
|
|
||||||
self.datasets_sub = datasets_sub
|
self.dataset_sub = dataset_sub
|
||||||
|
self.dataset_ctl = dataset_ctl
|
||||||
self.applet_name = name
|
self.applet_name = name
|
||||||
self.spec = spec
|
self.spec = spec
|
||||||
self.extra_substitutes = extra_substitutes
|
self.extra_substitutes = extra_substitutes
|
||||||
|
@ -130,7 +136,7 @@ class _AppletDock(QDockWidgetCloseDetect):
|
||||||
return
|
return
|
||||||
self.starting_stopping = True
|
self.starting_stopping = True
|
||||||
try:
|
try:
|
||||||
self.ipc = AppletIPCServer(self.datasets_sub)
|
self.ipc = AppletIPCServer(self.dataset_sub, self.dataset_ctl)
|
||||||
env = os.environ.copy()
|
env = os.environ.copy()
|
||||||
env["PYTHONUNBUFFERED"] = "1"
|
env["PYTHONUNBUFFERED"] = "1"
|
||||||
env["ARTIQ_APPLET_EMBED"] = self.ipc.get_address()
|
env["ARTIQ_APPLET_EMBED"] = self.ipc.get_address()
|
||||||
|
@ -327,7 +333,7 @@ class _CompleterDelegate(QtWidgets.QStyledItemDelegate):
|
||||||
|
|
||||||
|
|
||||||
class AppletsDock(QtWidgets.QDockWidget):
|
class AppletsDock(QtWidgets.QDockWidget):
|
||||||
def __init__(self, main_window, datasets_sub, extra_substitutes={}, *, loop=None):
|
def __init__(self, main_window, dataset_sub, dataset_ctl, extra_substitutes={}, *, loop=None):
|
||||||
"""
|
"""
|
||||||
:param extra_substitutes: Map of extra ``${strings}`` to substitute in applet
|
:param extra_substitutes: Map of extra ``${strings}`` to substitute in applet
|
||||||
commands to their respective values.
|
commands to their respective values.
|
||||||
|
@ -338,7 +344,8 @@ class AppletsDock(QtWidgets.QDockWidget):
|
||||||
QtWidgets.QDockWidget.DockWidgetFloatable)
|
QtWidgets.QDockWidget.DockWidgetFloatable)
|
||||||
|
|
||||||
self.main_window = main_window
|
self.main_window = main_window
|
||||||
self.datasets_sub = datasets_sub
|
self.dataset_sub = dataset_sub
|
||||||
|
self.dataset_ctl = dataset_ctl
|
||||||
self.extra_substitutes = extra_substitutes
|
self.extra_substitutes = extra_substitutes
|
||||||
self.applet_uids = set()
|
self.applet_uids = set()
|
||||||
|
|
||||||
|
@ -364,7 +371,7 @@ class AppletsDock(QtWidgets.QDockWidget):
|
||||||
|
|
||||||
completer_delegate = _CompleterDelegate()
|
completer_delegate = _CompleterDelegate()
|
||||||
self.table.setItemDelegateForColumn(1, completer_delegate)
|
self.table.setItemDelegateForColumn(1, completer_delegate)
|
||||||
datasets_sub.add_setmodel_callback(completer_delegate.set_model)
|
dataset_sub.add_setmodel_callback(completer_delegate.set_model)
|
||||||
|
|
||||||
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
||||||
new_action = QtWidgets.QAction("New applet", self.table)
|
new_action = QtWidgets.QAction("New applet", self.table)
|
||||||
|
@ -440,7 +447,7 @@ class AppletsDock(QtWidgets.QDockWidget):
|
||||||
self.table.itemChanged.connect(self.item_changed)
|
self.table.itemChanged.connect(self.item_changed)
|
||||||
|
|
||||||
def create(self, item, name, spec):
|
def create(self, item, name, spec):
|
||||||
dock = _AppletDock(self.datasets_sub, item.applet_uid, name, spec, self.extra_substitutes)
|
dock = _AppletDock(self.dataset_sub, self.dataset_ctl, item.applet_uid, name, spec, self.extra_substitutes)
|
||||||
self.main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, dock)
|
self.main_window.addDockWidget(QtCore.Qt.RightDockWidgetArea, dock)
|
||||||
dock.setFloating(True)
|
dock.setFloating(True)
|
||||||
asyncio.ensure_future(dock.start(), loop=self._loop)
|
asyncio.ensure_future(dock.start(), loop=self._loop)
|
||||||
|
|
|
@ -337,13 +337,21 @@ class HasEnvironment:
|
||||||
self.kernel_invariants = kernel_invariants | {key}
|
self.kernel_invariants = kernel_invariants | {key}
|
||||||
|
|
||||||
@rpc(flags={"async"})
|
@rpc(flags={"async"})
|
||||||
def set_dataset(self, key, value,
|
def set_dataset(self, key, value, *,
|
||||||
|
unit=None, scale=None, precision=None,
|
||||||
broadcast=False, persist=False, archive=True):
|
broadcast=False, persist=False, archive=True):
|
||||||
"""Sets the contents and handling modes of a dataset.
|
"""Sets the contents and handling modes of a dataset.
|
||||||
|
|
||||||
Datasets must be scalars (``bool``, ``int``, ``float`` or NumPy scalar)
|
Datasets must be scalars (``bool``, ``int``, ``float`` or NumPy scalar)
|
||||||
or NumPy arrays.
|
or NumPy arrays.
|
||||||
|
|
||||||
|
:param unit: A string representing the unit of the value.
|
||||||
|
:param scale: A numerical factor that is used to adjust the value of
|
||||||
|
the dataset to match the scale or units of the experiment's
|
||||||
|
reference frame when the value is displayed.
|
||||||
|
:param precision: The maximum number of digits to print after the
|
||||||
|
decimal point. Set ``precision=None`` to print as many digits as
|
||||||
|
necessary to uniquely specify the value. Uses IEEE unbiased rounding.
|
||||||
:param broadcast: the data is sent in real-time to the master, which
|
:param broadcast: the data is sent in real-time to the master, which
|
||||||
dispatches it.
|
dispatches it.
|
||||||
:param persist: the master should store the data on-disk. Implies
|
:param persist: the master should store the data on-disk. Implies
|
||||||
|
@ -351,7 +359,14 @@ class HasEnvironment:
|
||||||
:param archive: the data is saved into the local storage of the current
|
:param archive: the data is saved into the local storage of the current
|
||||||
run (archived as a HDF5 file).
|
run (archived as a HDF5 file).
|
||||||
"""
|
"""
|
||||||
self.__dataset_mgr.set(key, value, broadcast, persist, archive)
|
metadata = {}
|
||||||
|
if unit is not None:
|
||||||
|
metadata["unit"] = unit
|
||||||
|
if scale is not None:
|
||||||
|
metadata["scale"] = scale
|
||||||
|
if precision is not None:
|
||||||
|
metadata["precision"] = precision
|
||||||
|
self.__dataset_mgr.set(key, value, metadata, broadcast, persist, archive)
|
||||||
|
|
||||||
@rpc(flags={"async"})
|
@rpc(flags={"async"})
|
||||||
def mutate_dataset(self, key, index, value):
|
def mutate_dataset(self, key, index, value):
|
||||||
|
@ -405,6 +420,24 @@ class HasEnvironment:
|
||||||
else:
|
else:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
|
def get_dataset_metadata(self, key, default=NoDefault):
|
||||||
|
"""Returns the metadata of a dataset.
|
||||||
|
|
||||||
|
Returns dictionary with items describing the dataset, including the units,
|
||||||
|
scale and precision.
|
||||||
|
|
||||||
|
This function is used to get additional information for displaying the dataset.
|
||||||
|
|
||||||
|
See ``set_dataset`` for documentation of metadata items.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.__dataset_mgr.get_metadata(key)
|
||||||
|
except KeyError:
|
||||||
|
if default is NoDefault:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
return default
|
||||||
|
|
||||||
def setattr_dataset(self, key, default=NoDefault, archive=True):
|
def setattr_dataset(self, key, default=NoDefault, archive=True):
|
||||||
"""Sets the contents of a dataset as attribute. The names of the
|
"""Sets the contents of a dataset as attribute. The names of the
|
||||||
dataset and of the attribute are the same."""
|
dataset and of the attribute are the same."""
|
||||||
|
|
|
@ -45,8 +45,9 @@ class DatasetDB(TaskObject):
|
||||||
self.lmdb = lmdb.open(persist_file, subdir=False, map_size=2**30)
|
self.lmdb = lmdb.open(persist_file, subdir=False, map_size=2**30)
|
||||||
data = dict()
|
data = dict()
|
||||||
with self.lmdb.begin() as txn:
|
with self.lmdb.begin() as txn:
|
||||||
for key, value in txn.cursor():
|
for key, value_and_metadata in txn.cursor():
|
||||||
data[key.decode()] = (True, pyon.decode(value.decode()))
|
value, metadata = pyon.decode(value_and_metadata.decode())
|
||||||
|
data[key.decode()] = (True, value, metadata)
|
||||||
self.data = Notifier(data)
|
self.data = Notifier(data)
|
||||||
self.pending_keys = set()
|
self.pending_keys = set()
|
||||||
|
|
||||||
|
@ -59,7 +60,10 @@ class DatasetDB(TaskObject):
|
||||||
if key not in self.data.raw_view or not self.data.raw_view[key][0]:
|
if key not in self.data.raw_view or not self.data.raw_view[key][0]:
|
||||||
txn.delete(key.encode())
|
txn.delete(key.encode())
|
||||||
else:
|
else:
|
||||||
txn.put(key.encode(), pyon.encode(self.data.raw_view[key][1]).encode())
|
value_and_metadata = (self.data.raw_view[key][1],
|
||||||
|
self.data.raw_view[key][2])
|
||||||
|
txn.put(key.encode(),
|
||||||
|
pyon.encode(value_and_metadata).encode())
|
||||||
self.pending_keys.clear()
|
self.pending_keys.clear()
|
||||||
|
|
||||||
async def _do(self):
|
async def _do(self):
|
||||||
|
@ -73,6 +77,9 @@ class DatasetDB(TaskObject):
|
||||||
def get(self, key):
|
def get(self, key):
|
||||||
return self.data.raw_view[key][1]
|
return self.data.raw_view[key][1]
|
||||||
|
|
||||||
|
def get_metadata(self, key):
|
||||||
|
return self.data.raw_view[key][2]
|
||||||
|
|
||||||
def update(self, mod):
|
def update(self, mod):
|
||||||
if mod["path"]:
|
if mod["path"]:
|
||||||
key = mod["path"][0]
|
key = mod["path"][0]
|
||||||
|
@ -83,13 +90,18 @@ class DatasetDB(TaskObject):
|
||||||
process_mod(self.data, mod)
|
process_mod(self.data, mod)
|
||||||
|
|
||||||
# convenience functions (update() can be used instead)
|
# convenience functions (update() can be used instead)
|
||||||
def set(self, key, value, persist=None):
|
def set(self, key, value, persist=None, metadata=None):
|
||||||
if persist is None:
|
if persist is None:
|
||||||
if key in self.data.raw_view:
|
if key in self.data.raw_view:
|
||||||
persist = self.data.raw_view[key][0]
|
persist = self.data.raw_view[key][0]
|
||||||
else:
|
else:
|
||||||
persist = False
|
persist = False
|
||||||
self.data[key] = (persist, value)
|
if metadata is None:
|
||||||
|
if key in self.data.raw_view:
|
||||||
|
metadata = self.data.raw_view[key][2]
|
||||||
|
else:
|
||||||
|
metadata = {}
|
||||||
|
self.data[key] = (persist, value, metadata)
|
||||||
self.pending_keys.add(key)
|
self.pending_keys.add(key)
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key):
|
||||||
|
|
|
@ -111,11 +111,12 @@ class DatasetManager:
|
||||||
self._broadcaster = Notifier(dict())
|
self._broadcaster = Notifier(dict())
|
||||||
self.local = dict()
|
self.local = dict()
|
||||||
self.archive = dict()
|
self.archive = dict()
|
||||||
|
self.metadata = dict()
|
||||||
|
|
||||||
self.ddb = ddb
|
self.ddb = ddb
|
||||||
self._broadcaster.publish = ddb.update
|
self._broadcaster.publish = ddb.update
|
||||||
|
|
||||||
def set(self, key, value, broadcast=False, persist=False, archive=True):
|
def set(self, key, value, metadata, broadcast, persist, archive):
|
||||||
if persist:
|
if persist:
|
||||||
broadcast = True
|
broadcast = True
|
||||||
|
|
||||||
|
@ -123,7 +124,7 @@ class DatasetManager:
|
||||||
logger.warning(f"Dataset '{key}' will not be stored. Both 'broadcast' and 'archive' are set to False.")
|
logger.warning(f"Dataset '{key}' will not be stored. Both 'broadcast' and 'archive' are set to False.")
|
||||||
|
|
||||||
if broadcast:
|
if broadcast:
|
||||||
self._broadcaster[key] = persist, value
|
self._broadcaster[key] = persist, value, metadata
|
||||||
elif key in self._broadcaster.raw_view:
|
elif key in self._broadcaster.raw_view:
|
||||||
del self._broadcaster[key]
|
del self._broadcaster[key]
|
||||||
|
|
||||||
|
@ -132,6 +133,8 @@ class DatasetManager:
|
||||||
elif key in self.local:
|
elif key in self.local:
|
||||||
del self.local[key]
|
del self.local[key]
|
||||||
|
|
||||||
|
self.metadata[key] = metadata
|
||||||
|
|
||||||
def _get_mutation_target(self, key):
|
def _get_mutation_target(self, key):
|
||||||
target = self.local.get(key, None)
|
target = self.local.get(key, None)
|
||||||
if key in self._broadcaster.raw_view:
|
if key in self._broadcaster.raw_view:
|
||||||
|
@ -166,21 +169,30 @@ class DatasetManager:
|
||||||
self.archive[key] = data
|
self.archive[key] = data
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
def get_metadata(self, key):
|
||||||
|
if key in self.metadata:
|
||||||
|
return self.metadata[key]
|
||||||
|
return self.ddb.get_metadata(key)
|
||||||
|
|
||||||
def write_hdf5(self, f):
|
def write_hdf5(self, f):
|
||||||
datasets_group = f.create_group("datasets")
|
datasets_group = f.create_group("datasets")
|
||||||
for k, v in self.local.items():
|
for k, v in self.local.items():
|
||||||
_write(datasets_group, k, v)
|
m = self.metadata.get(k, {})
|
||||||
|
_write(datasets_group, k, v, m)
|
||||||
|
|
||||||
archive_group = f.create_group("archive")
|
archive_group = f.create_group("archive")
|
||||||
for k, v in self.archive.items():
|
for k, v in self.archive.items():
|
||||||
_write(archive_group, k, v)
|
m = self.metadata.get(k, {})
|
||||||
|
_write(archive_group, k, v, m)
|
||||||
|
|
||||||
|
|
||||||
def _write(group, k, v):
|
def _write(group, k, v, m):
|
||||||
# Add context to exception message when the user writes a dataset that is
|
# Add context to exception message when the user writes a dataset that is
|
||||||
# not representable in HDF5.
|
# not representable in HDF5.
|
||||||
try:
|
try:
|
||||||
group[k] = v
|
group[k] = v
|
||||||
|
for key, val in m.items():
|
||||||
|
group[k].attrs[key] = val
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
raise TypeError("Error writing dataset '{}' of type '{}': {}".format(
|
raise TypeError("Error writing dataset '{}' of type '{}': {}".format(
|
||||||
k, type(v), e))
|
k, type(v), e))
|
||||||
|
|
|
@ -16,6 +16,9 @@ class MockDatasetDB:
|
||||||
def get(self, key):
|
def get(self, key):
|
||||||
return self.data[key][1]
|
return self.data[key][1]
|
||||||
|
|
||||||
|
def get_metadata(self, key):
|
||||||
|
return self.data[key][2]
|
||||||
|
|
||||||
def update(self, mod):
|
def update(self, mod):
|
||||||
# Copy mod before applying to avoid sharing references to objects
|
# Copy mod before applying to avoid sharing references to objects
|
||||||
# between this and the DatasetManager, which would lead to mods being
|
# between this and the DatasetManager, which would lead to mods being
|
||||||
|
@ -30,6 +33,9 @@ class TestExperiment(EnvExperiment):
|
||||||
def get(self, key):
|
def get(self, key):
|
||||||
return self.get_dataset(key)
|
return self.get_dataset(key)
|
||||||
|
|
||||||
|
def get_metadata(self, key):
|
||||||
|
return self.get_dataset_metadata(key)
|
||||||
|
|
||||||
def set(self, key, value, **kwargs):
|
def set(self, key, value, **kwargs):
|
||||||
self.set_dataset(key, value, **kwargs)
|
self.set_dataset(key, value, **kwargs)
|
||||||
|
|
||||||
|
@ -82,9 +88,9 @@ class ExperimentDatasetCase(unittest.TestCase):
|
||||||
def test_append_broadcast(self):
|
def test_append_broadcast(self):
|
||||||
self.exp.set(KEY, [], broadcast=True)
|
self.exp.set(KEY, [], broadcast=True)
|
||||||
self.exp.append(KEY, 0)
|
self.exp.append(KEY, 0)
|
||||||
self.assertEqual(self.dataset_db.data[KEY][1], [0])
|
self.assertEqual(self.dataset_db.get(KEY), [0])
|
||||||
self.exp.append(KEY, 1)
|
self.exp.append(KEY, 1)
|
||||||
self.assertEqual(self.dataset_db.data[KEY][1], [0, 1])
|
self.assertEqual(self.dataset_db.get(KEY), [0, 1])
|
||||||
|
|
||||||
def test_append_array(self):
|
def test_append_array(self):
|
||||||
for broadcast in (True, False):
|
for broadcast in (True, False):
|
||||||
|
@ -103,3 +109,26 @@ class ExperimentDatasetCase(unittest.TestCase):
|
||||||
with self.assertRaises(KeyError):
|
with self.assertRaises(KeyError):
|
||||||
self.exp.append(KEY, 0)
|
self.exp.append(KEY, 0)
|
||||||
|
|
||||||
|
def test_set_dataset_metadata(self):
|
||||||
|
self.exp.set(KEY, 0, unit="kV", precision=2)
|
||||||
|
md = {"unit": "kV", "precision": 2}
|
||||||
|
self.assertEqual(self.exp.get_metadata(KEY), md)
|
||||||
|
|
||||||
|
def test_metadata_default(self):
|
||||||
|
self.exp.set(KEY, 0)
|
||||||
|
self.assertEqual(self.exp.get_metadata(KEY), {})
|
||||||
|
|
||||||
|
def test_metadata_scale(self):
|
||||||
|
self.exp.set(KEY, 0, scale=1000)
|
||||||
|
self.assertEqual(self.exp.get_metadata(KEY), {"scale": 1000})
|
||||||
|
|
||||||
|
def test_metadata_broadcast(self):
|
||||||
|
self.exp.set(KEY, 0, unit="kV", precision=2, broadcast=True)
|
||||||
|
md = {"unit": "kV", "precision": 2}
|
||||||
|
self.assertEqual(self.dataset_db.get_metadata(KEY), md)
|
||||||
|
|
||||||
|
def test_metadata_broadcast_default(self):
|
||||||
|
self.exp.set(KEY, 0, broadcast=True)
|
||||||
|
self.assertEqual(self.dataset_db.get_metadata(KEY), {})
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -288,7 +288,7 @@ class SchedulerCase(unittest.TestCase):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
mod,
|
mod,
|
||||||
{"action": "setitem", "key": "termination_ok",
|
{"action": "setitem", "key": "termination_ok",
|
||||||
"value": (False, True), "path": []})
|
"value": (False, True, {}), "path": []})
|
||||||
termination_ok = True
|
termination_ok = True
|
||||||
handlers = {
|
handlers = {
|
||||||
"update_dataset": check_termination
|
"update_dataset": check_termination
|
||||||
|
|
|
@ -15,6 +15,7 @@ from sipyco import pyon
|
||||||
from artiq import __version__ as artiq_version
|
from artiq import __version__ as artiq_version
|
||||||
from artiq.appdirs import user_config_dir
|
from artiq.appdirs import user_config_dir
|
||||||
from artiq.language.environment import is_public_experiment
|
from artiq.language.environment import is_public_experiment
|
||||||
|
from artiq.language import units
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["parse_arguments", "elide", "short_format", "file_import",
|
__all__ = ["parse_arguments", "elide", "short_format", "file_import",
|
||||||
|
@ -54,20 +55,40 @@ def elide(s, maxlen):
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
def short_format(v):
|
def short_format(v, metadata={}):
|
||||||
|
m = metadata
|
||||||
|
unit = m.get("unit", "")
|
||||||
|
default_scale = getattr(units, unit, 1)
|
||||||
|
scale = m.get("scale", default_scale)
|
||||||
|
precision = m.get("precision", None)
|
||||||
if v is None:
|
if v is None:
|
||||||
return "None"
|
return "None"
|
||||||
t = type(v)
|
t = type(v)
|
||||||
if np.issubdtype(t, np.number) or np.issubdtype(t, np.bool_):
|
if np.issubdtype(t, np.number):
|
||||||
|
v_t = np.divide(v, scale)
|
||||||
|
v_str = np.format_float_positional(v_t,
|
||||||
|
precision=precision,
|
||||||
|
unique=True)
|
||||||
|
v_str += " " + unit if unit else ""
|
||||||
|
return v_str
|
||||||
|
elif np.issubdtype(t, np.bool_):
|
||||||
return str(v)
|
return str(v)
|
||||||
elif np.issubdtype(t, np.unicode_):
|
elif np.issubdtype(t, np.unicode_):
|
||||||
return "\"" + elide(v, 50) + "\""
|
return "\"" + elide(v, 50) + "\""
|
||||||
else:
|
elif t is np.ndarray:
|
||||||
r = t.__name__
|
v_t = np.divide(v, scale)
|
||||||
if t is list or t is dict or t is set:
|
v_str = np.array2string(v_t,
|
||||||
r += " ({})".format(len(v))
|
max_line_width=1000,
|
||||||
if t is np.ndarray:
|
precision=precision,
|
||||||
r += " " + str(np.shape(v))
|
suppress_small=True,
|
||||||
|
separator=', ',
|
||||||
|
threshold=4,
|
||||||
|
edgeitems=2,
|
||||||
|
floatmode='maxprec')
|
||||||
|
v_str += " " + unit if unit else ""
|
||||||
|
return v_str
|
||||||
|
elif isinstance(v, (dict, list)):
|
||||||
|
r = t.__name__ + " ({})".format(len(v))
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,12 +14,9 @@ ARTIQ itself does not depend on Nix, and it is also possible to compile everythi
|
||||||
* Install the `Nix package manager <http://nixos.org/nix/>`_, version 2.4 or later. Prefer a single-user installation for simplicity.
|
* Install the `Nix package manager <http://nixos.org/nix/>`_, version 2.4 or later. Prefer a single-user installation for simplicity.
|
||||||
* If you did not install Vivado in its default location ``/opt``, clone the ARTIQ Git repository and edit ``flake.nix`` accordingly.
|
* If you did not install Vivado in its default location ``/opt``, clone the ARTIQ Git repository and edit ``flake.nix`` accordingly.
|
||||||
* Enable flakes in Nix by e.g. adding ``experimental-features = nix-command flakes`` to ``nix.conf`` (for example ``~/.config/nix/nix.conf``).
|
* Enable flakes in Nix by e.g. adding ``experimental-features = nix-command flakes`` to ``nix.conf`` (for example ``~/.config/nix/nix.conf``).
|
||||||
* Enter the development shell by running ``nix develop git+https://github.com/m-labs/artiq.git``, or alternatively by cloning the ARTIQ Git repository and running ``nix develop`` at the root (where ``flake.nix`` is).
|
* Clone the ARTIQ Git repository and run ``nix develop`` at the root (where ``flake.nix`` is).
|
||||||
|
* Make the current source code of ARTIQ available to the Python interpreter by running ``export PYTHONPATH=`pwd`:$PYTHONPATH``.
|
||||||
* You can then build the firmware and gateware with a command such as ``$ python -m artiq.gateware.targets.kasli``. If you are using a JSON system description file, use ``$ python -m artiq.gateware.targets.kasli_generic file.json``.
|
* You can then build the firmware and gateware with a command such as ``$ python -m artiq.gateware.targets.kasli``. If you are using a JSON system description file, use ``$ python -m artiq.gateware.targets.kasli_generic file.json``.
|
||||||
* Flash the binaries into the FPGA board with a command such as ``$ artiq_flash --srcbuild -d artiq_kasli/<your_variant>``. You need to configure OpenOCD as explained :ref:`in the user section <configuring-openocd>`. OpenOCD is already part of the flake's development environment.
|
* Flash the binaries into the FPGA board with a command such as ``$ artiq_flash --srcbuild -d artiq_kasli/<your_variant>``. You need to configure OpenOCD as explained :ref:`in the user section <configuring-openocd>`. OpenOCD is already part of the flake's development environment.
|
||||||
* Check that the board boots and examine the UART messages by running a serial terminal program, e.g. ``$ flterm /dev/ttyUSB1`` (``flterm`` is part of MiSoC and installed in the flake's development environment). Leave the terminal running while you are flashing the board, so that you see the startup messages when the board boots immediately after flashing. You can also restart the board (without reflashing it) with ``$ artiq_flash start``.
|
* Check that the board boots and examine the UART messages by running a serial terminal program, e.g. ``$ flterm /dev/ttyUSB1`` (``flterm`` is part of MiSoC and installed in the flake's development environment). Leave the terminal running while you are flashing the board, so that you see the startup messages when the board boots immediately after flashing. You can also restart the board (without reflashing it) with ``$ artiq_flash start``.
|
||||||
* The communication parameters are 115200 8-N-1. Ensure that your user has access to the serial device (e.g. by adding the user account to the ``dialout`` group).
|
* The communication parameters are 115200 8-N-1. Ensure that your user has access to the serial device (e.g. by adding the user account to the ``dialout`` group).
|
||||||
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
Nix will make a read-only copy of the ARTIQ source to use in the shell environment. Therefore, any modifications that you make to the source after the shell is started will not be taken into account. A solution applicable to ARTIQ (and several other Python packages such as Migen and MiSoC) is to prepend the ARTIQ source directory to the ``PYTHONPATH`` environment variable after entering the shell. If you want this to be done by default, edit the ``devShell`` section of ``flake.nix``.
|
|
||||||
|
|
|
@ -51,7 +51,6 @@ Installing multiple packages and making them visible to the ARTIQ commands requi
|
||||||
#ps.scipy
|
#ps.scipy
|
||||||
#ps.numba
|
#ps.numba
|
||||||
#ps.matplotlib
|
#ps.matplotlib
|
||||||
#ps.jsonschema # required by artiq_ddb_template
|
|
||||||
# or if you need Qt (will recompile):
|
# or if you need Qt (will recompile):
|
||||||
#(ps.matplotlib.override { enableQt = true; })
|
#(ps.matplotlib.override { enableQt = true; })
|
||||||
#ps.bokeh
|
#ps.bokeh
|
||||||
|
|
30
flake.nix
30
flake.nix
|
@ -86,8 +86,8 @@
|
||||||
nativeBuildInputs = [ pkgs.qt5.wrapQtAppsHook ];
|
nativeBuildInputs = [ pkgs.qt5.wrapQtAppsHook ];
|
||||||
|
|
||||||
# keep llvm_x in sync with nac3
|
# keep llvm_x in sync with nac3
|
||||||
propagatedBuildInputs = [ pkgs.llvm_14 nac3.packages.x86_64-linux.nac3artiq-pgo sipyco.packages.x86_64-linux.sipyco artiq-comtools.packages.x86_64-linux.artiq-comtools ]
|
propagatedBuildInputs = [ pkgs.llvm_14 nac3.packages.x86_64-linux.nac3artiq-pgo sipyco.packages.x86_64-linux.sipyco pkgs.qt5.qtsvg artiq-comtools.packages.x86_64-linux.artiq-comtools ]
|
||||||
++ (with pkgs.python3Packages; [ pyqtgraph pygit2 numpy dateutil scipy prettytable pyserial h5py pyqt5 qasync tqdm lmdb ]);
|
++ (with pkgs.python3Packages; [ pyqtgraph pygit2 numpy dateutil scipy prettytable pyserial h5py pyqt5 qasync tqdm lmdb jsonschema ]);
|
||||||
|
|
||||||
dontWrapQtApps = true;
|
dontWrapQtApps = true;
|
||||||
postFixup = ''
|
postFixup = ''
|
||||||
|
@ -182,7 +182,7 @@
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
nativeBuildInputs = [
|
nativeBuildInputs = [
|
||||||
(pkgs.python3.withPackages(ps: [ ps.jsonschema migen misoc (artiq.withExperimentalFeatures experimentalFeatures) ]))
|
(pkgs.python3.withPackages(ps: [ migen misoc (artiq.withExperimentalFeatures experimentalFeatures) ps.packaging ]))
|
||||||
rust
|
rust
|
||||||
pkgs.cargo-xbuild
|
pkgs.cargo-xbuild
|
||||||
pkgs.llvmPackages_14.clang-unwrapped
|
pkgs.llvmPackages_14.clang-unwrapped
|
||||||
|
@ -342,10 +342,13 @@
|
||||||
|
|
||||||
defaultPackage.x86_64-linux = packages.x86_64-linux.python3-mimalloc.withPackages(ps: [ packages.x86_64-linux.artiq ]);
|
defaultPackage.x86_64-linux = packages.x86_64-linux.python3-mimalloc.withPackages(ps: [ packages.x86_64-linux.artiq ]);
|
||||||
|
|
||||||
devShell.x86_64-linux = pkgs.mkShell {
|
# Main development shell with everything you need to develop ARTIQ on Linux.
|
||||||
|
# ARTIQ itself is not included in the environment, you can make Python use the current sources using e.g.
|
||||||
|
# export PYTHONPATH=`pwd`:$PYTHONPATH
|
||||||
|
devShells.x86_64-linux.default = pkgs.mkShell {
|
||||||
name = "artiq-dev-shell";
|
name = "artiq-dev-shell";
|
||||||
buildInputs = [
|
buildInputs = [
|
||||||
(packages.x86_64-linux.python3-mimalloc.withPackages(ps: with packages.x86_64-linux; [ migen misoc artiq ps.paramiko ps.jsonschema microscope ]))
|
(packages.x86_64-linux.python3-mimalloc.withPackages(ps: with packages.x86_64-linux; [ migen misoc ps.paramiko microscope ps.packaging ] ++ artiq.propagatedBuildInputs))
|
||||||
rust
|
rust
|
||||||
pkgs.cargo-xbuild
|
pkgs.cargo-xbuild
|
||||||
pkgs.llvmPackages_14.clang-unwrapped
|
pkgs.llvmPackages_14.clang-unwrapped
|
||||||
|
@ -359,11 +362,26 @@
|
||||||
pkgs.python3Packages.sphinx-argparse sphinxcontrib-wavedrom latex-artiq-manual
|
pkgs.python3Packages.sphinx-argparse sphinxcontrib-wavedrom latex-artiq-manual
|
||||||
];
|
];
|
||||||
shellHook = ''
|
shellHook = ''
|
||||||
export QT_PLUGIN_PATH=${pkgs.qt5.qtbase}/${pkgs.qt5.qtbase.dev.qtPluginPrefix}
|
export QT_PLUGIN_PATH=${pkgs.qt5.qtbase}/${pkgs.qt5.qtbase.dev.qtPluginPrefix}:${pkgs.qt5.qtsvg.bin}/${pkgs.qt5.qtbase.dev.qtPluginPrefix}
|
||||||
export QML2_IMPORT_PATH=${pkgs.qt5.qtbase}/${pkgs.qt5.qtbase.dev.qtQmlPrefix}
|
export QML2_IMPORT_PATH=${pkgs.qt5.qtbase}/${pkgs.qt5.qtbase.dev.qtQmlPrefix}
|
||||||
'';
|
'';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
# Lighter development shell optimized for building firmware and flashing boards.
|
||||||
|
devShells.x86_64-linux.boards = pkgs.mkShell {
|
||||||
|
name = "artiq-boards-shell";
|
||||||
|
buildInputs = [
|
||||||
|
(pkgs.python3.withPackages(ps: with packages.x86_64-linux; [ migen misoc artiq ps.packaging ]))
|
||||||
|
rust
|
||||||
|
pkgs.cargo-xbuild
|
||||||
|
pkgs.llvmPackages_11.clang-unwrapped
|
||||||
|
pkgs.llvm_11
|
||||||
|
pkgs.lld_11
|
||||||
|
packages.x86_64-linux.vivado
|
||||||
|
packages.x86_64-linux.openocd-bscanspi
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
packages.aarch64-linux = {
|
packages.aarch64-linux = {
|
||||||
openocd-bscanspi = openocd-bscanspi-f pkgs-aarch64;
|
openocd-bscanspi = openocd-bscanspi-f pkgs-aarch64;
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in New Issue