From d8b1e59538822b3377a30ba21306073f1367f3d7 Mon Sep 17 00:00:00 2001 From: Etienne Wodey Date: Mon, 16 Nov 2020 21:10:56 +0100 Subject: [PATCH 01/59] datasets: allow passing options to HDF5 backend (e.g. compression) This breaks the internal dataset representation used by applets and when saving to disk (``dataset_db.pyon``). See ``test/test_dataset_db.py`` and ``test/test_datasets.py`` for examples. Signed-off-by: Etienne Wodey --- RELEASE_NOTES.rst | 8 +++ artiq/applets/big_number.py | 2 +- artiq/applets/image.py | 2 +- artiq/applets/plot_hist.py | 4 +- artiq/applets/plot_xy.py | 9 ++-- artiq/applets/plot_xy_hist.py | 6 +-- artiq/applets/simple.py | 3 +- artiq/browser/datasets.py | 4 +- artiq/dashboard/datasets.py | 18 +++---- artiq/language/environment.py | 13 ++++- artiq/master/databases.py | 39 +++++++++++--- artiq/master/worker_db.py | 38 +++++++++----- artiq/test/test_dataset_db.py | 99 +++++++++++++++++++++++++++++++++++ artiq/test/test_datasets.py | 43 +++++++++++++-- artiq/test/test_scheduler.py | 10 +++- 15 files changed, 248 insertions(+), 50 deletions(-) create mode 100644 artiq/test/test_dataset_db.py diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 1c668fd3f..bbd2cd113 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -13,6 +13,10 @@ Highlights: - Improved documentation - Expose the DAC coarse mixer and sif_sync - Exposes upconverter calibration and enabling/disabling of upconverter LO & RF outputs. +* HDF5 options can now be passed when creating datasets with ``set_dataset``. This allows + in particular to use transparent compression filters as follows: + ``set_dataset(name, value, compression="gzip")``. + Breaking changes: * Updated Phaser-Upconverter default frequency 2.875 GHz. The new default uses the target PFD @@ -20,6 +24,9 @@ Breaking changes: * `Phaser.init()` now disables all Kasli-oscillators. This avoids full power RF output being generated for some configurations. * Phaser: fixed coarse mixer frequency configuration +* The internal dataset representation was changed to support tracking HDF5 options like e.g. + a compression method. This requires changes to code reading the dataset persistence file + (``dataset_db.pyon``) and to custom applets. ARTIQ-6 @@ -89,6 +96,7 @@ Breaking changes: * Experiment classes with underscore-prefixed names are now ignored when ``artiq_client`` determines which experiment to submit (consistent with ``artiq_run``). + ARTIQ-5 ------- diff --git a/artiq/applets/big_number.py b/artiq/applets/big_number.py index 62348c8cf..a0714b734 100755 --- a/artiq/applets/big_number.py +++ b/artiq/applets/big_number.py @@ -13,7 +13,7 @@ class NumberWidget(QtWidgets.QLCDNumber): def data_changed(self, data, mods): try: - n = float(data[self.dataset_name][1]) + n = float(data[self.dataset_name]["value"]) except (KeyError, ValueError, TypeError): n = "---" self.display(n) diff --git a/artiq/applets/image.py b/artiq/applets/image.py index b7d36c1a1..4bc6b5f86 100755 --- a/artiq/applets/image.py +++ b/artiq/applets/image.py @@ -13,7 +13,7 @@ class Image(pyqtgraph.ImageView): def data_changed(self, data, mods): try: - img = data[self.args.img][1] + img = data[self.args.img]["value"] except KeyError: return self.setImage(img) diff --git a/artiq/applets/plot_hist.py b/artiq/applets/plot_hist.py index dc46997b9..44a4d242e 100755 --- a/artiq/applets/plot_hist.py +++ b/artiq/applets/plot_hist.py @@ -13,11 +13,11 @@ class HistogramPlot(pyqtgraph.PlotWidget): def data_changed(self, data, mods, title): try: - y = data[self.args.y][1] + y = data[self.args.y]["value"] if self.args.x is None: x = None else: - x = data[self.args.x][1] + x = data[self.args.x]["value"] except KeyError: return if x is None: diff --git a/artiq/applets/plot_xy.py b/artiq/applets/plot_xy.py index da7f2197d..ae387cb75 100755 --- a/artiq/applets/plot_xy.py +++ b/artiq/applets/plot_xy.py @@ -5,6 +5,7 @@ import PyQt5 # make sure pyqtgraph imports Qt5 import pyqtgraph from artiq.applets.simple import TitleApplet +from artiq.master.databases import make_dataset as empty_dataset class XYPlot(pyqtgraph.PlotWidget): @@ -14,14 +15,14 @@ class XYPlot(pyqtgraph.PlotWidget): def data_changed(self, data, mods, title): try: - y = data[self.args.y][1] + y = data[self.args.y]["value"] except KeyError: return - x = data.get(self.args.x, (False, None))[1] + x = data.get(self.args.x, empty_dataset())["value"] if x is None: x = np.arange(len(y)) - error = data.get(self.args.error, (False, None))[1] - fit = data.get(self.args.fit, (False, None))[1] + error = data.get(self.args.error, empty_dataset())["value"] + fit = data.get(self.args.fit, empty_dataset())["value"] if not len(y) or len(y) != len(x): return diff --git a/artiq/applets/plot_xy_hist.py b/artiq/applets/plot_xy_hist.py index 7b8135561..a3a35b4ad 100755 --- a/artiq/applets/plot_xy_hist.py +++ b/artiq/applets/plot_xy_hist.py @@ -112,9 +112,9 @@ class XYHistPlot(QtWidgets.QSplitter): def data_changed(self, data, mods): try: - xs = data[self.args.xs][1] - histogram_bins = data[self.args.histogram_bins][1] - histograms_counts = data[self.args.histograms_counts][1] + xs = data[self.args.xs]["value"] + histogram_bins = data[self.args.histogram_bins]["value"] + histograms_counts = data[self.args.histograms_counts]["value"] except KeyError: return if self._can_use_partial(mods): diff --git a/artiq/applets/simple.py b/artiq/applets/simple.py index e5776310a..db6f2334d 100644 --- a/artiq/applets/simple.py +++ b/artiq/applets/simple.py @@ -10,6 +10,7 @@ from sipyco.sync_struct import Subscriber, process_mod from sipyco import pyon from sipyco.pipe_ipc import AsyncioChildComm +from artiq.master.databases import make_dataset as empty_dataset logger = logging.getLogger(__name__) @@ -251,7 +252,7 @@ class TitleApplet(SimpleApplet): def emit_data_changed(self, data, mod_buffer): if self.args.title is not None: - title_values = {k.replace(".", "/"): data.get(k, (False, None))[1] + title_values = {k.replace(".", "/"): data.get(k, empty_dataset())["value"] for k in self.dataset_title} try: title = self.args.title.format(**title_values) diff --git a/artiq/browser/datasets.py b/artiq/browser/datasets.py index b66b18216..d3d8171ac 100644 --- a/artiq/browser/datasets.py +++ b/artiq/browser/datasets.py @@ -104,8 +104,8 @@ class DatasetsDock(QtWidgets.QDockWidget): idx = self.table_model_filter.mapToSource(idx[0]) key = self.table_model.index_to_key(idx) if key is not None: - persist, value = self.table_model.backing_store[key] - asyncio.ensure_future(self._upload_dataset(key, value)) + dataset = self.table_model.backing_store[key] + asyncio.ensure_future(self._upload_dataset(key, dataset["value"])) def save_state(self): return bytes(self.table.header().saveState()) diff --git a/artiq/dashboard/datasets.py b/artiq/dashboard/datasets.py index 5e353d4c9..e0c3a7981 100644 --- a/artiq/dashboard/datasets.py +++ b/artiq/dashboard/datasets.py @@ -83,16 +83,16 @@ class StringEditor(Editor): class Model(DictSyncTreeSepModel): - def __init__(self, init): - DictSyncTreeSepModel.__init__(self, ".", - ["Dataset", "Persistent", "Value"], - init) + def __init__(self, init): + DictSyncTreeSepModel.__init__( + self, ".", ["Dataset", "Persistent", "Value"], init + ) def convert(self, k, v, column): if column == 1: - return "Y" if v[0] else "N" + return "Y" if v["persist"] else "N" elif column == 2: - return short_format(v[1]) + return short_format(v["value"]) else: raise ValueError @@ -152,8 +152,8 @@ class DatasetsDock(QtWidgets.QDockWidget): idx = self.table_model_filter.mapToSource(idx[0]) key = self.table_model.index_to_key(idx) if key is not None: - persist, value = self.table_model.backing_store[key] - t = type(value) + dataset = self.table_model.backing_store[key] + t = type(dataset["value"]) if np.issubdtype(t, np.number): dialog_cls = NumberEditor elif np.issubdtype(t, np.bool_): @@ -164,7 +164,7 @@ class DatasetsDock(QtWidgets.QDockWidget): logger.error("Cannot edit dataset %s: " "type %s is not supported", key, t) return - dialog_cls(self, self.dataset_ctl, key, value).open() + dialog_cls(self, self.dataset_ctl, key, dataset["value"]).open() def delete_clicked(self): idx = self.table.selectedIndexes() diff --git a/artiq/language/environment.py b/artiq/language/environment.py index 7992fe3af..fa64c7906 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -331,7 +331,8 @@ class HasEnvironment: @rpc(flags={"async"}) def set_dataset(self, key, value, - broadcast=False, persist=False, archive=True, save=None): + broadcast=False, persist=False, archive=True, save=None, + **hdf5_options): """Sets the contents and handling modes of a dataset. Datasets must be scalars (``bool``, ``int``, ``float`` or NumPy scalar) @@ -344,12 +345,20 @@ class HasEnvironment: :param archive: the data is saved into the local storage of the current run (archived as a HDF5 file). :param save: deprecated. + :param hdf5_options: additional keyword arguments are passed to + :meth:`h5py.Group.create_dataset`. For example, pass ``compression="gzip"`` + to enable transparent zlib compression of this dataset in the HDF5 archive. + See the `h5py documentation `_ + for a list of valid options. """ if save is not None: warnings.warn("set_dataset save parameter is deprecated, " "use archive instead", FutureWarning) archive = save - self.__dataset_mgr.set(key, value, broadcast, persist, archive) + + self.__dataset_mgr.set( + key, value, broadcast, persist, archive, hdf5_options + ) @rpc(flags={"async"}) def mutate_dataset(self, key, index, value): diff --git a/artiq/master/databases.py b/artiq/master/databases.py index 14cfae4cd..fcf1ad31c 100644 --- a/artiq/master/databases.py +++ b/artiq/master/databases.py @@ -35,6 +35,15 @@ class DeviceDB: return desc +def make_dataset(*, persist=False, value=None, hdf5_options=None): + "PYON-serializable representation of a dataset in the DatasetDB" + return { + "persist": persist, + "value": value, + "hdf5_options": hdf5_options or {}, + } + + class DatasetDB(TaskObject): def __init__(self, persist_file, autosave_period=30): self.persist_file = persist_file @@ -44,10 +53,23 @@ class DatasetDB(TaskObject): file_data = pyon.load_file(self.persist_file) except FileNotFoundError: file_data = dict() - self.data = Notifier({k: (True, v) for k, v in file_data.items()}) + self.data = Notifier( + { + k: make_dataset( + persist=True, + value=v["value"], + hdf5_options=v["hdf5_options"] + ) + for k, v in file_data.items() + } + ) def save(self): - data = {k: v[1] for k, v in self.data.raw_view.items() if v[0]} + data = { + k: d + for k, d in self.data.raw_view.items() + if d["persist"] + } pyon.store_file(self.persist_file, data) async def _do(self): @@ -59,20 +81,23 @@ class DatasetDB(TaskObject): self.save() def get(self, key): - return self.data.raw_view[key][1] + return self.data.raw_view[key] def update(self, mod): process_mod(self.data, mod) # convenience functions (update() can be used instead) - def set(self, key, value, persist=None): + def set(self, key, value, persist=None, **hdf5_options): if persist is None: if key in self.data.raw_view: - persist = self.data.raw_view[key][0] + persist = self.data.raw_view[key].persist else: persist = False - self.data[key] = (persist, value) + self.data[key] = make_dataset( + persist=persist, + value=value, + hdf5_options=hdf5_options, + ) def delete(self, key): del self.data[key] - # diff --git a/artiq/master/worker_db.py b/artiq/master/worker_db.py index 172846145..8a2200e05 100644 --- a/artiq/master/worker_db.py +++ b/artiq/master/worker_db.py @@ -8,9 +8,12 @@ from operator import setitem import importlib import logging +import numpy as np + from sipyco.sync_struct import Notifier from sipyco.pc_rpc import AutoTarget, Client, BestEffortClient +from artiq.master.databases import make_dataset logger = logging.getLogger(__name__) @@ -115,7 +118,8 @@ class DatasetManager: self.ddb = ddb self._broadcaster.publish = ddb.update - def set(self, key, value, broadcast=False, persist=False, archive=True): + def set(self, key, value, broadcast=False, persist=False, archive=True, + hdf5_options=None): if key in self.archive: logger.warning("Modifying dataset '%s' which is in archive, " "archive will remain untouched", @@ -125,12 +129,20 @@ class DatasetManager: broadcast = True if broadcast: - self._broadcaster[key] = persist, value + self._broadcaster[key] = make_dataset( + persist=persist, + value=value, + hdf5_options=hdf5_options, + ) elif key in self._broadcaster.raw_view: del self._broadcaster[key] if archive: - self.local[key] = value + self.local[key] = make_dataset( + persist=persist, + value=value, + hdf5_options=hdf5_options, + ) elif key in self.local: del self.local[key] @@ -138,11 +150,11 @@ class DatasetManager: target = self.local.get(key, None) if key in self._broadcaster.raw_view: if target is not None: - assert target is self._broadcaster.raw_view[key][1] - return self._broadcaster[key][1] + assert target["value"] is self._broadcaster.raw_view[key]["value"] + return self._broadcaster[key]["value"] if target is None: raise KeyError("Cannot mutate nonexistent dataset '{}'".format(key)) - return target + return target["value"] def mutate(self, key, index, value): target = self._get_mutation_target(key) @@ -158,15 +170,15 @@ class DatasetManager: def get(self, key, archive=False): if key in self.local: - return self.local[key] - - data = self.ddb.get(key) + return self.local[key]["value"] + + dataset = self.ddb.get(key) if archive: if key in self.archive: logger.warning("Dataset '%s' is already in archive, " "overwriting", key, stack_info=True) - self.archive[key] = data - return data + self.archive[key] = dataset + return dataset["value"] def write_hdf5(self, f): datasets_group = f.create_group("datasets") @@ -182,7 +194,7 @@ def _write(group, k, v): # Add context to exception message when the user writes a dataset that is # not representable in HDF5. try: - group[k] = v + group.create_dataset(k, data=v["value"], **v["hdf5_options"]) except TypeError as e: raise TypeError("Error writing dataset '{}' of type '{}': {}".format( - k, type(v), e)) + k, type(v["value"]), e)) diff --git a/artiq/test/test_dataset_db.py b/artiq/test/test_dataset_db.py new file mode 100644 index 000000000..74aff8219 --- /dev/null +++ b/artiq/test/test_dataset_db.py @@ -0,0 +1,99 @@ +"""Test internal dataset representation (persistence, applets)""" +import unittest +import tempfile + +from artiq.master.databases import DatasetDB +from sipyco import pyon + +KEY1 = "key1" +KEY2 = "key2" +KEY3 = "key3" +DATA = list(range(10)) +COMP = "gzip" + + +class TestDatasetDB(unittest.TestCase): + def setUp(self): + # empty dataset persistance file + self.persist_file = tempfile.NamedTemporaryFile(mode="w+") + print("{}", file=self.persist_file, flush=True) + + self.ddb = DatasetDB(self.persist_file.name) + + self.ddb.set(KEY1, DATA, persist=True) + self.ddb.set(KEY2, DATA, persist=True, compression=COMP) + self.ddb.set(KEY3, DATA, shuffle=True) + + self.save_ddb_to_disk() + + def save_ddb_to_disk(self): + self.ddb.save() + self.persist_file.flush() + + def load_ddb_from_disk(self): + return pyon.load_file(self.persist_file.name) + + def test_persist_format(self): + data = pyon.load_file(self.persist_file.name) + + for key in [KEY1, KEY2]: + self.assertTrue(data[key]["persist"]) + self.assertEqual(data[key]["value"], DATA) + + self.assertEqual(data[KEY2]["hdf5_options"]["compression"], COMP) + self.assertEqual(data[KEY1]["hdf5_options"], dict()) + + def test_only_persist_marked_datasets(self): + data = self.load_ddb_from_disk() + + with self.assertRaises(KeyError): + data[KEY3] + + def test_memory_format(self): + ds = self.ddb.get(KEY2) + self.assertTrue(ds["persist"]) + self.assertEqual(ds["value"], DATA) + self.assertEqual(ds["hdf5_options"]["compression"], COMP) + + ds = self.ddb.get(KEY3) + self.assertFalse(ds["persist"]) + self.assertEqual(ds["value"], DATA) + self.assertTrue(ds["hdf5_options"]["shuffle"]) + + def test_delete(self): + self.ddb.delete(KEY1) + self.save_ddb_to_disk() + + data = self.load_ddb_from_disk() + + with self.assertRaises(KeyError): + data[KEY1] + + self.assertTrue(data[KEY2]["persist"]) + + def test_update(self): + self.assertFalse(self.ddb.get(KEY3)["persist"]) + + mod = { + "action": "setitem", + "path": [KEY3], + "key": "persist", + "value": True, + } + + self.ddb.update(mod) + self.assertTrue(self.ddb.get(KEY3)["persist"]) + + def test_update_hdf5_options(self): + with self.assertRaises(KeyError): + self.ddb.get(KEY1)["hdf5_options"]["shuffle"] + + mod = { + "action": "setitem", + "path": [KEY1, "hdf5_options"], + "key": "shuffle", + "value": False, + } + + self.ddb.update(mod) + self.assertFalse(self.ddb.get(KEY1)["hdf5_options"]["shuffle"]) diff --git a/artiq/test/test_datasets.py b/artiq/test/test_datasets.py index 871568a2a..0d86a4b7c 100644 --- a/artiq/test/test_datasets.py +++ b/artiq/test/test_datasets.py @@ -3,6 +3,9 @@ import copy import unittest +import h5py +import numpy as np + from sipyco.sync_struct import process_mod from artiq.experiment import EnvExperiment @@ -14,7 +17,7 @@ class MockDatasetDB: self.data = dict() def get(self, key): - return self.data[key][1] + return self.data[key]["value"] def update(self, mod): # Copy mod before applying to avoid sharing references to objects @@ -82,9 +85,9 @@ class ExperimentDatasetCase(unittest.TestCase): def test_append_broadcast(self): self.exp.set(KEY, [], broadcast=True) self.exp.append(KEY, 0) - self.assertEqual(self.dataset_db.data[KEY][1], [0]) + self.assertEqual(self.dataset_db.data[KEY]["value"], [0]) self.exp.append(KEY, 1) - self.assertEqual(self.dataset_db.data[KEY][1], [0, 1]) + self.assertEqual(self.dataset_db.data[KEY]["value"], [0, 1]) def test_append_array(self): for broadcast in (True, False): @@ -103,3 +106,37 @@ class ExperimentDatasetCase(unittest.TestCase): with self.assertRaises(KeyError): self.exp.append(KEY, 0) + def test_write_hdf5_options(self): + data = np.random.randint(0, 1024, 1024) + self.exp.set(KEY, data, + compression="gzip", compression_opts=6, + shuffle=True, fletcher32=True) + + with h5py.File("test.h5", "a", "core", backing_store=False) as f: + self.dataset_mgr.write_hdf5(f) + + self.assertTrue(np.array_equal(f["datasets"][KEY][()], data)) + self.assertEqual(f["datasets"][KEY].compression, "gzip") + self.assertEqual(f["datasets"][KEY].compression_opts, 6) + self.assertTrue(f["datasets"][KEY].shuffle) + self.assertTrue(f["datasets"][KEY].fletcher32) + + def test_write_hdf5_no_options(self): + data = np.random.randint(0, 1024, 1024) + self.exp.set(KEY, data) + + with h5py.File("test.h5", "a", "core", backing_store=False) as f: + self.dataset_mgr.write_hdf5(f) + self.assertTrue(np.array_equal(f["datasets"][KEY][()], data)) + self.assertIsNone(f["datasets"][KEY].compression) + + def test_write_hdf5_invalid_type(self): + class CustomType: + def __init__(self, x): + self.x = x + + self.exp.set(KEY, CustomType(42)) + + with h5py.File("test.h5", "w", "core", backing_store=False) as f: + with self.assertRaisesRegex(TypeError, "CustomType"): + self.dataset_mgr.write_hdf5(f) diff --git a/artiq/test/test_scheduler.py b/artiq/test/test_scheduler.py index ad4f243bd..5a8cdb6bc 100644 --- a/artiq/test/test_scheduler.py +++ b/artiq/test/test_scheduler.py @@ -7,6 +7,7 @@ from time import time, sleep from artiq.experiment import * from artiq.master.scheduler import Scheduler +from artiq.master.databases import make_dataset class EmptyExperiment(EnvExperiment): @@ -291,8 +292,13 @@ class SchedulerCase(unittest.TestCase): nonlocal termination_ok self.assertEqual( mod, - {"action": "setitem", "key": "termination_ok", - "value": (False, True), "path": []}) + { + "action": "setitem", + "key": "termination_ok", + "value": make_dataset(value=True), + "path": [] + } + ) termination_ok = True handlers = { "update_dataset": check_termination From 12ef907f34ddcbd9ff0473aa4b34e78b657c659e Mon Sep 17 00:00:00 2001 From: Etienne Wodey Date: Thu, 17 Jun 2021 16:30:38 +0200 Subject: [PATCH 02/59] master/databases: fix AttributeError in DatasetDB.set() Add corresponding unit test. Signed-off-by: Etienne Wodey --- artiq/master/databases.py | 2 +- artiq/test/test_dataset_db.py | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/artiq/master/databases.py b/artiq/master/databases.py index fcf1ad31c..310b5caec 100644 --- a/artiq/master/databases.py +++ b/artiq/master/databases.py @@ -90,7 +90,7 @@ class DatasetDB(TaskObject): def set(self, key, value, persist=None, **hdf5_options): if persist is None: if key in self.data.raw_view: - persist = self.data.raw_view[key].persist + persist = self.data.raw_view[key]["persist"] else: persist = False self.data[key] = make_dataset( diff --git a/artiq/test/test_dataset_db.py b/artiq/test/test_dataset_db.py index 74aff8219..3fa4b1f8a 100644 --- a/artiq/test/test_dataset_db.py +++ b/artiq/test/test_dataset_db.py @@ -97,3 +97,15 @@ class TestDatasetDB(unittest.TestCase): self.ddb.update(mod) self.assertFalse(self.ddb.get(KEY1)["hdf5_options"]["shuffle"]) + + def test_reset_copies_persist(self): + self.assertTrue(self.ddb.get(KEY1)["persist"]) + self.ddb.set(KEY1, DATA) + self.assertTrue(self.ddb.get(KEY1)["persist"]) + + self.assertFalse(self.ddb.get(KEY3)["persist"]) + self.ddb.set(KEY3, DATA) + self.assertFalse(self.ddb.get(KEY3)["persist"]) + + self.ddb.set(KEY3, DATA, persist=True) + self.assertTrue(self.ddb.get(KEY3)["persist"]) From 8bedf278f0457b570dedc6aa16dd0533c1baefb0 Mon Sep 17 00:00:00 2001 From: Etienne Wodey Date: Thu, 17 Jun 2021 16:43:05 +0200 Subject: [PATCH 03/59] set_dataset: pass HDF5 options as a dict, not as loose kwargs Signed-off-by: Etienne Wodey --- RELEASE_NOTES.rst | 2 +- artiq/language/environment.py | 6 +++--- artiq/master/databases.py | 2 +- artiq/test/test_dataset_db.py | 4 ++-- artiq/test/test_datasets.py | 13 ++++++++++--- 5 files changed, 17 insertions(+), 10 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index bbd2cd113..c0cd659a5 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -15,7 +15,7 @@ Highlights: - Exposes upconverter calibration and enabling/disabling of upconverter LO & RF outputs. * HDF5 options can now be passed when creating datasets with ``set_dataset``. This allows in particular to use transparent compression filters as follows: - ``set_dataset(name, value, compression="gzip")``. + ``set_dataset(name, value, hdf5_options={"compression": "gzip"})``. Breaking changes: diff --git a/artiq/language/environment.py b/artiq/language/environment.py index fa64c7906..9647325cb 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -332,7 +332,7 @@ class HasEnvironment: @rpc(flags={"async"}) def set_dataset(self, key, value, broadcast=False, persist=False, archive=True, save=None, - **hdf5_options): + hdf5_options=None): """Sets the contents and handling modes of a dataset. Datasets must be scalars (``bool``, ``int``, ``float`` or NumPy scalar) @@ -345,8 +345,8 @@ class HasEnvironment: :param archive: the data is saved into the local storage of the current run (archived as a HDF5 file). :param save: deprecated. - :param hdf5_options: additional keyword arguments are passed to - :meth:`h5py.Group.create_dataset`. For example, pass ``compression="gzip"`` + :param hdf5_options: dict of keyword arguments to pass to + :meth:`h5py.Group.create_dataset`. For example, pass ``{"compression": "gzip"}`` to enable transparent zlib compression of this dataset in the HDF5 archive. See the `h5py documentation `_ for a list of valid options. diff --git a/artiq/master/databases.py b/artiq/master/databases.py index 310b5caec..8ef71c6a2 100644 --- a/artiq/master/databases.py +++ b/artiq/master/databases.py @@ -87,7 +87,7 @@ class DatasetDB(TaskObject): process_mod(self.data, mod) # convenience functions (update() can be used instead) - def set(self, key, value, persist=None, **hdf5_options): + def set(self, key, value, persist=None, hdf5_options=None): if persist is None: if key in self.data.raw_view: persist = self.data.raw_view[key]["persist"] diff --git a/artiq/test/test_dataset_db.py b/artiq/test/test_dataset_db.py index 3fa4b1f8a..3d087a806 100644 --- a/artiq/test/test_dataset_db.py +++ b/artiq/test/test_dataset_db.py @@ -21,8 +21,8 @@ class TestDatasetDB(unittest.TestCase): self.ddb = DatasetDB(self.persist_file.name) self.ddb.set(KEY1, DATA, persist=True) - self.ddb.set(KEY2, DATA, persist=True, compression=COMP) - self.ddb.set(KEY3, DATA, shuffle=True) + self.ddb.set(KEY2, DATA, persist=True, hdf5_options=dict(compression=COMP)) + self.ddb.set(KEY3, DATA, hdf5_options=dict(shuffle=True)) self.save_ddb_to_disk() diff --git a/artiq/test/test_datasets.py b/artiq/test/test_datasets.py index 0d86a4b7c..3fa6d6bb7 100644 --- a/artiq/test/test_datasets.py +++ b/artiq/test/test_datasets.py @@ -108,9 +108,16 @@ class ExperimentDatasetCase(unittest.TestCase): def test_write_hdf5_options(self): data = np.random.randint(0, 1024, 1024) - self.exp.set(KEY, data, - compression="gzip", compression_opts=6, - shuffle=True, fletcher32=True) + self.exp.set( + KEY, + data, + hdf5_options=dict( + compression="gzip", + compression_opts=6, + shuffle=True, + fletcher32=True + ), + ) with h5py.File("test.h5", "a", "core", backing_store=False) as f: self.dataset_mgr.write_hdf5(f) From 20e079a381b777550ce7872151b17fb4a11cd7ae Mon Sep 17 00:00:00 2001 From: Peter Drmota <49479443+pmldrmota@users.noreply.github.com> Date: Mon, 15 Nov 2021 05:09:16 +0100 Subject: [PATCH 04/59] AD9910 driver feature extension and SUServo IIR readability (#1500) * coredevice.ad9910: Add set_cfr2 function and extend arguments of set_cfr1 and set_sync * SUServo: Wrap CPLD and DDS devices in a list * SUServo: Refactor [nfc] Co-authored-by: drmota Co-authored-by: David Nadlinger --- RELEASE_NOTES.rst | 3 + artiq/coredevice/ad9910.py | 72 +++++-- artiq/coredevice/suservo.py | 56 ++--- artiq/examples/kasli_suservo/device_db.py | 6 +- artiq/frontend/artiq_ddb_template.py | 15 +- artiq/gateware/suservo/__init__.py | 10 + artiq/gateware/suservo/iir.py | 246 +++++++++++++--------- artiq/gateware/suservo/servo.py | 66 +++++- artiq/gateware/targets/kasli.py | 6 +- 9 files changed, 301 insertions(+), 179 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index c743f2005..467e4e9c4 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -104,6 +104,9 @@ Breaking changes: * ``quamash`` has been replaced with ``qasync``. * Protocols are updated to use device endian. * Analyzer dump format includes a byte for device endianness. +* To support variable numbers of Urukul cards in the future, the + ``artiq.coredevice.suservo.SUServo`` constructor now accepts two device name lists, + ``cpld_devices`` and ``dds_devices``, rather than four individual arguments. * Experiment classes with underscore-prefixed names are now ignored when ``artiq_client`` determines which experiment to submit (consistent with ``artiq_run``). diff --git a/artiq/coredevice/ad9910.py b/artiq/coredevice/ad9910.py index 95ad66896..49bfe9a90 100644 --- a/artiq/coredevice/ad9910.py +++ b/artiq/coredevice/ad9910.py @@ -374,18 +374,25 @@ class AD9910: data[(n - preload) + i] = self.bus.read() @kernel - def set_cfr1(self, power_down: TInt32 = 0b0000, + def set_cfr1(self, + power_down: TInt32 = 0b0000, phase_autoclear: TInt32 = 0, - drg_load_lrr: TInt32 = 0, drg_autoclear: TInt32 = 0, - internal_profile: TInt32 = 0, ram_destination: TInt32 = 0, - ram_enable: TInt32 = 0, manual_osk_external: TInt32 = 0, - osk_enable: TInt32 = 0, select_auto_osk: TInt32 = 0): + drg_load_lrr: TInt32 = 0, + drg_autoclear: TInt32 = 0, + phase_clear: TInt32 = 0, + internal_profile: TInt32 = 0, + ram_destination: TInt32 = 0, + ram_enable: TInt32 = 0, + manual_osk_external: TInt32 = 0, + osk_enable: TInt32 = 0, + select_auto_osk: TInt32 = 0): """Set CFR1. See the AD9910 datasheet for parameter meanings. This method does not pulse IO_UPDATE. :param power_down: Power down bits. :param phase_autoclear: Autoclear phase accumulator. + :param phase_clear: Asynchronous, static reset of the phase accumulator. :param drg_load_lrr: Load digital ramp generator LRR. :param drg_autoclear: Autoclear digital ramp generator. :param internal_profile: Internal profile control. @@ -405,11 +412,41 @@ class AD9910: (drg_load_lrr << 15) | (drg_autoclear << 14) | (phase_autoclear << 13) | + (phase_clear << 11) | (osk_enable << 9) | (select_auto_osk << 8) | (power_down << 4) | 2) # SDIO input only, MSB first + @kernel + def set_cfr2(self, + asf_profile_enable: TInt32 = 1, + drg_enable: TInt32 = 0, + effective_ftw: TInt32 = 1, + sync_validation_disable: TInt32 = 0, + matched_latency_enable: TInt32 = 0): + """Set CFR2. See the AD9910 datasheet for parameter meanings. + + This method does not pulse IO_UPDATE. + + :param asf_profile_enable: Enable amplitude scale from single tone profiles. + :param drg_enable: Digital ramp enable. + :param effective_ftw: Read effective FTW. + :param sync_validation_disable: Disable the SYNC_SMP_ERR pin indicating + (active high) detection of a synchronization pulse sampling error. + :param matched_latency_enable: Simultaneous application of amplitude, + phase, and frequency changes to the DDS arrive at the output + + * matched_latency_enable = 0: in the order listed + * matched_latency_enable = 1: simultaneously. + """ + self.write32(_AD9910_REG_CFR2, + (asf_profile_enable << 24) | + (drg_enable << 19) | + (effective_ftw << 16) | + (matched_latency_enable << 7) | + (sync_validation_disable << 5)) + @kernel def init(self, blind: TBool = False): """Initialize and configure the DDS. @@ -442,7 +479,7 @@ class AD9910: # enable amplitude scale from profiles # read effective FTW # sync timing validation disable (enabled later) - self.write32(_AD9910_REG_CFR2, 0x01010020) + self.set_cfr2(sync_validation_disable=1) self.cpld.io_update.pulse(1 * us) cfr3 = (0x0807c000 | (self.pll_vco << 24) | (self.pll_cp << 19) | (self.pll_en << 8) | @@ -465,7 +502,7 @@ class AD9910: if i >= 100 - 1: raise ValueError("PLL lock timeout") delay(10 * us) # slack - if self.sync_data.sync_delay_seed >= 0: + if self.sync_data.sync_delay_seed >= 0 and not blind: self.tune_sync_delay(self.sync_data.sync_delay_seed) delay(1 * ms) @@ -875,20 +912,26 @@ class AD9910: self.cpld.cfg_sw(self.chip_select - 4, state) @kernel - def set_sync(self, in_delay: TInt32, window: TInt32): + def set_sync(self, + in_delay: TInt32, + window: TInt32, + en_sync_gen: TInt32 = 0): """Set the relevant parameters in the multi device synchronization register. See the AD9910 datasheet for details. The SYNC clock generator preset value is set to zero, and the SYNC_OUT generator is - disabled. + disabled by default. :param in_delay: SYNC_IN delay tap (0-31) in steps of ~75ps :param window: Symmetric SYNC_IN validation window (0-15) in steps of ~75ps for both hold and setup margin. + :param en_sync_gen: Whether to enable the DDS-internal sync generator + (SYNC_OUT, cf. sync_sel == 1). Should be left off for the normal + use case, where the SYNC clock is supplied by the core device. """ self.write32(_AD9910_REG_SYNC, (window << 28) | # SYNC S/H validation delay (1 << 27) | # SYNC receiver enable - (0 << 26) | # SYNC generator disable + (en_sync_gen << 26) | # SYNC generator enable (0 << 25) | # SYNC generator SYS rising edge (0 << 18) | # SYNC preset (0 << 11) | # SYNC output delay @@ -904,9 +947,10 @@ class AD9910: Also modifies CFR2. """ - self.write32(_AD9910_REG_CFR2, 0x01010020) # clear SMP_ERR + self.set_cfr2(sync_validation_disable=1) # clear SMP_ERR self.cpld.io_update.pulse(1 * us) - self.write32(_AD9910_REG_CFR2, 0x01010000) # enable SMP_ERR + delay(10 * us) # slack + self.set_cfr2(sync_validation_disable=0) # enable SMP_ERR self.cpld.io_update.pulse(1 * us) @kernel @@ -984,7 +1028,7 @@ class AD9910: # set up DRG self.set_cfr1(drg_load_lrr=1, drg_autoclear=1) # DRG -> FTW, DRG enable - self.write32(_AD9910_REG_CFR2, 0x01090000) + self.set_cfr2(drg_enable=1) # no limits self.write64(_AD9910_REG_RAMP_LIMIT, -1, 0) # DRCTL=0, dt=1 t_SYNC_CLK @@ -1005,7 +1049,7 @@ class AD9910: ftw = self.read32(_AD9910_REG_FTW) # read out effective FTW delay(100 * us) # slack # disable DRG - self.write32(_AD9910_REG_CFR2, 0x01010000) + self.set_cfr2(drg_enable=0) self.cpld.io_update.pulse_mu(8) return ftw & 1 diff --git a/artiq/coredevice/suservo.py b/artiq/coredevice/suservo.py index 932adf35b..1d0a72dad 100644 --- a/artiq/coredevice/suservo.py +++ b/artiq/coredevice/suservo.py @@ -57,32 +57,26 @@ class SUServo: :param channel: RTIO channel number :param pgia_device: Name of the Sampler PGIA gain setting SPI bus - :param cpld0_device: Name of the first Urukul CPLD SPI bus - :param cpld1_device: Name of the second Urukul CPLD SPI bus - :param dds0_device: Name of the AD9910 device for the DDS on the first - Urukul - :param dds1_device: Name of the AD9910 device for the DDS on the second - Urukul + :param cpld_devices: Names of the Urukul CPLD SPI buses + :param dds_devices: Names of the AD9910 devices :param gains: Initial value for PGIA gains shift register (default: 0x0000). Knowledge of this state is not transferred between experiments. :param core_device: Core device name """ - kernel_invariants = {"channel", "core", "pgia", "cpld0", "cpld1", - "dds0", "dds1", "ref_period_mu"} + kernel_invariants = {"channel", "core", "pgia", "cplds", "ddses", + "ref_period_mu"} def __init__(self, dmgr, channel, pgia_device, - cpld0_device, cpld1_device, - dds0_device, dds1_device, + cpld_devices, dds_devices, gains=0x0000, core_device="core"): self.core = dmgr.get(core_device) self.pgia = dmgr.get(pgia_device) self.pgia.update_xfer_duration_mu(div=4, length=16) - self.dds0 = dmgr.get(dds0_device) - self.dds1 = dmgr.get(dds1_device) - self.cpld0 = dmgr.get(cpld0_device) - self.cpld1 = dmgr.get(cpld1_device) + assert len(dds_devices) == len(cpld_devices) + self.ddses = [dmgr.get(dds) for dds in dds_devices] + self.cplds = [dmgr.get(cpld) for cpld in cpld_devices] self.channel = channel self.gains = gains self.ref_period_mu = self.core.seconds_to_mu( @@ -109,17 +103,15 @@ class SUServo: sampler.SPI_CONFIG | spi.SPI_END, 16, 4, sampler.SPI_CS_PGIA) - self.cpld0.init(blind=True) - cfg0 = self.cpld0.cfg_reg - self.cpld0.cfg_write(cfg0 | (0xf << urukul.CFG_MASK_NU)) - self.dds0.init(blind=True) - self.cpld0.cfg_write(cfg0) + for i in range(len(self.cplds)): + cpld = self.cplds[i] + dds = self.ddses[i] - self.cpld1.init(blind=True) - cfg1 = self.cpld1.cfg_reg - self.cpld1.cfg_write(cfg1 | (0xf << urukul.CFG_MASK_NU)) - self.dds1.init(blind=True) - self.cpld1.cfg_write(cfg1) + cpld.init(blind=True) + prev_cpld_cfg = cpld.cfg_reg + cpld.cfg_write(prev_cpld_cfg | (0xf << urukul.CFG_MASK_NU)) + dds.init(blind=True) + cpld.cfg_write(prev_cpld_cfg) @kernel def write(self, addr, value): @@ -257,9 +249,11 @@ class Channel: self.servo = dmgr.get(servo_device) self.core = self.servo.core self.channel = channel - # FIXME: this assumes the mem channel is right after the control - # channels - self.servo_channel = self.channel + 8 - self.servo.channel + # This assumes the mem channel is right after the control channels + # Make sure this is always the case in eem.py + self.servo_channel = (self.channel + 4 * len(self.servo.cplds) - + self.servo.channel) + self.dds = self.servo.ddses[self.servo_channel // 4] @kernel def set(self, en_out, en_iir=0, profile=0): @@ -311,12 +305,8 @@ class Channel: see :meth:`dds_offset_to_mu` :param phase: DDS phase in turns """ - if self.servo_channel < 4: - dds = self.servo.dds0 - else: - dds = self.servo.dds1 - ftw = dds.frequency_to_ftw(frequency) - pow_ = dds.turns_to_pow(phase) + ftw = self.dds.frequency_to_ftw(frequency) + pow_ = self.dds.turns_to_pow(phase) offs = self.dds_offset_to_mu(offset) self.set_dds_mu(profile, ftw, offs, pow_) diff --git a/artiq/examples/kasli_suservo/device_db.py b/artiq/examples/kasli_suservo/device_db.py index d33bfb280..fdb85dc47 100644 --- a/artiq/examples/kasli_suservo/device_db.py +++ b/artiq/examples/kasli_suservo/device_db.py @@ -191,10 +191,8 @@ device_db = { "arguments": { "channel": 24, "pgia_device": "spi_sampler0_pgia", - "cpld0_device": "urukul0_cpld", - "cpld1_device": "urukul1_cpld", - "dds0_device": "urukul0_dds", - "dds1_device": "urukul1_dds" + "cpld_devices": ["urukul0_cpld", "urukul1_cpld"], + "dds_devices": ["urukul0_dds", "urukul1_dds"], } }, diff --git a/artiq/frontend/artiq_ddb_template.py b/artiq/frontend/artiq_ddb_template.py index 52408a0d4..0a14a06be 100755 --- a/artiq/frontend/artiq_ddb_template.py +++ b/artiq/frontend/artiq_ddb_template.py @@ -364,8 +364,7 @@ class PeripheralManager: def process_suservo(self, rtio_offset, peripheral): suservo_name = self.get_name("suservo") sampler_name = self.get_name("sampler") - urukul0_name = self.get_name("urukul") - urukul1_name = self.get_name("urukul") + urukul_names = [self.get_name("urukul") for _ in range(2)] channel = count(0) for i in range(8): self.gen(""" @@ -386,16 +385,14 @@ class PeripheralManager: "arguments": {{ "channel": 0x{suservo_channel:06x}, "pgia_device": "spi_{sampler_name}_pgia", - "cpld0_device": "{urukul0_name}_cpld", - "cpld1_device": "{urukul1_name}_cpld", - "dds0_device": "{urukul0_name}_dds", - "dds1_device": "{urukul1_name}_dds" + "cpld_devices": {cpld_names_list}, + "dds_devices": {dds_names_list} }} }}""", suservo_name=suservo_name, sampler_name=sampler_name, - urukul0_name=urukul0_name, - urukul1_name=urukul1_name, + cpld_names_list=[urukul_name + "_cpld" for urukul_name in urukul_names], + dds_names_list=[urukul_name + "_dds" for urukul_name in urukul_names], suservo_channel=rtio_offset+next(channel)) self.gen(""" device_db["spi_{sampler_name}_pgia"] = {{ @@ -407,7 +404,7 @@ class PeripheralManager: sampler_name=sampler_name, sampler_channel=rtio_offset+next(channel)) pll_vco = peripheral.get("pll_vco") - for urukul_name in (urukul0_name, urukul1_name): + for urukul_name in urukul_names: self.gen(""" device_db["spi_{urukul_name}"] = {{ "type": "local", diff --git a/artiq/gateware/suservo/__init__.py b/artiq/gateware/suservo/__init__.py index e69de29bb..7a1df77ac 100644 --- a/artiq/gateware/suservo/__init__.py +++ b/artiq/gateware/suservo/__init__.py @@ -0,0 +1,10 @@ +"""Gateware implementation of the Sampler-Urukul (AD9910) DDS amplitude servo. + +General conventions: + + - ``t_...`` signals and constants refer to time spans measured in the gateware + module's default clock (typically a 125 MHz RTIO clock). + - ``start`` signals cause modules to proceed with the next servo iteration iff + they are currently idle (i.e. their value is irrelevant while the module is + busy, so they are not necessarily one-clock-period strobes). +""" diff --git a/artiq/gateware/suservo/iir.py b/artiq/gateware/suservo/iir.py index 0ebab3f13..0ec9bfa09 100644 --- a/artiq/gateware/suservo/iir.py +++ b/artiq/gateware/suservo/iir.py @@ -1,9 +1,7 @@ from collections import namedtuple import logging - from migen import * - logger = logging.getLogger(__name__) @@ -222,31 +220,30 @@ class IIR(Module): assert w.word <= w.coeff # same memory assert w.state + w.coeff + 3 <= w.accu - # m_coeff of active profiles should only be accessed during + # m_coeff of active profiles should only be accessed externally during # ~processing self.specials.m_coeff = Memory( width=2*w.coeff, # Cat(pow/ftw/offset, cfg/a/b) depth=4 << w.profile + w.channel) - # m_state[x] should only be read during ~(shifting | - # loading) - # m_state[y] of active profiles should only be read during + # m_state[x] should only be read externally during ~(shifting | loading) + # m_state[y] of active profiles should only be read externally during # ~processing self.specials.m_state = Memory( width=w.state, # y1,x0,x1 depth=(1 << w.profile + w.channel) + (2 << w.channel)) # ctrl should only be updated synchronously self.ctrl = [Record([ - ("profile", w.profile), - ("en_out", 1), - ("en_iir", 1), - ("clip", 1), - ("stb", 1)]) - for i in range(1 << w.channel)] + ("profile", w.profile), + ("en_out", 1), + ("en_iir", 1), + ("clip", 1), + ("stb", 1)]) + for i in range(1 << w.channel)] # only update during ~loading self.adc = [Signal((w.adc, True), reset_less=True) for i in range(1 << w.channel)] # Cat(ftw0, ftw1, pow, asf) - # only read during ~processing + # only read externally during ~processing self.dds = [Signal(4*w.word, reset_less=True) for i in range(1 << w.channel)] # perform one IIR iteration, start with loading, @@ -270,100 +267,116 @@ class IIR(Module): en_iirs = Array([ch.en_iir for ch in self.ctrl]) clips = Array([ch.clip for ch in self.ctrl]) - # state counter - state = Signal(w.channel + 2) - # pipeline group activity flags (SR) - stage = Signal(3) + # Main state machine sequencing the steps of each servo iteration. The + # module IDLEs until self.start is asserted, and then runs through LOAD, + # PROCESS and SHIFT in order (see description of corresponding flags + # above). The steps share the same memory ports, and are executed + # strictly sequentially. + # + # LOAD/SHIFT just read/write one address per cycle; the duration needed + # to iterate over all channels is determined by counting cycles. + # + # The PROCESSing step is split across a three-stage pipeline, where each + # stage has up to four clock cycles latency. We feed the first stage + # using the (MSBs of) t_current_step, and, after all channels have been + # covered, proceed once the pipeline has completely drained. self.submodules.fsm = fsm = FSM("IDLE") - state_clr = Signal() - stage_en = Signal() + t_current_step = Signal(w.channel + 2) + t_current_step_clr = Signal() + + # pipeline group activity flags (SR) + # 0: load from memory + # 1: compute + # 2: write to output registers (DDS profiles, clip flags) + stages_active = Signal(3) fsm.act("IDLE", self.done.eq(1), - state_clr.eq(1), + t_current_step_clr.eq(1), If(self.start, NextState("LOAD") ) ) fsm.act("LOAD", self.loading.eq(1), - If(state == (1 << w.channel) - 1, - state_clr.eq(1), - stage_en.eq(1), + If(t_current_step == (1 << w.channel) - 1, + t_current_step_clr.eq(1), + NextValue(stages_active[0], 1), NextState("PROCESS") ) ) fsm.act("PROCESS", self.processing.eq(1), # this is technically wasting three cycles - # (one for setting stage, and phase=2,3 with stage[2]) - If(stage == 0, - state_clr.eq(1), - NextState("SHIFT") + # (one for setting stages_active, and phase=2,3 with stages_active[2]) + If(stages_active == 0, + t_current_step_clr.eq(1), + NextState("SHIFT"), ) ) fsm.act("SHIFT", self.shifting.eq(1), - If(state == (2 << w.channel) - 1, + If(t_current_step == (2 << w.channel) - 1, NextState("IDLE") ) ) self.sync += [ - state.eq(state + 1), - If(state_clr, - state.eq(0), - ), - If(stage_en, - stage[0].eq(1) + If(t_current_step_clr, + t_current_step.eq(0) + ).Else( + t_current_step.eq(t_current_step + 1) ) ] - # pipeline group channel pointer + # global pipeline phase (lower two bits of t_current_step) + pipeline_phase = Signal(2, reset_less=True) + # pipeline group channel pointer (SR) # for each pipeline stage, this is the channel currently being # processed channel = [Signal(w.channel, reset_less=True) for i in range(3)] + self.comb += Cat(pipeline_phase, channel[0]).eq(t_current_step) + self.sync += [ + If(pipeline_phase == 3, + Cat(channel[1:]).eq(Cat(channel[:-1])), + stages_active[1:].eq(stages_active[:-1]), + If(channel[0] == (1 << w.channel) - 1, + stages_active[0].eq(0) + ) + ) + ] + # pipeline group profile pointer (SR) # for each pipeline stage, this is the profile currently being # processed profile = [Signal(w.profile, reset_less=True) for i in range(2)] - # pipeline phase (lower two bits of state) - phase = Signal(2, reset_less=True) - - self.comb += Cat(phase, channel[0]).eq(state) self.sync += [ - Case(phase, { - 0: [ - profile[0].eq(profiles[channel[0]]), - profile[1].eq(profile[0]) - ], - 3: [ - Cat(channel[1:]).eq(Cat(channel[:-1])), - stage[1:].eq(stage[:-1]), - If(channel[0] == (1 << w.channel) - 1, - stage[0].eq(0) - ) - ] - }) + If(pipeline_phase == 0, + profile[0].eq(profiles[channel[0]]), + profile[1].eq(profile[0]), + ) ] m_coeff = self.m_coeff.get_port() m_state = self.m_state.get_port(write_capable=True) # mode=READ_FIRST self.specials += m_state, m_coeff + # + # Hook up main IIR filter. + # + dsp = DSP(w) self.submodules += dsp offset_clr = Signal() - self.comb += [ - m_coeff.adr.eq(Cat(phase, profile[0], - Mux(phase==0, channel[1], channel[0]))), + m_coeff.adr.eq(Cat(pipeline_phase, profile[0], + Mux(pipeline_phase == 0, channel[1], channel[0]))), dsp.offset[-w.coeff - 1:].eq(Mux(offset_clr, 0, Cat(m_coeff.dat_r[:w.coeff], m_coeff.dat_r[w.coeff - 1]) )), dsp.coeff.eq(m_coeff.dat_r[w.coeff:]), dsp.state.eq(m_state.dat_r), - Case(phase, { + Case(pipeline_phase, { 0: dsp.accu_clr.eq(1), 2: [ offset_clr.eq(1), @@ -373,6 +386,11 @@ class IIR(Module): }) ] + + # + # Arbitrate state memory access between steps. + # + # selected adc and profile delay (combinatorial from dat_r) # both share the same coeff word (sel in the lower 8 bits) sel_profile = Signal(w.channel) @@ -389,13 +407,13 @@ class IIR(Module): sel_profile.eq(m_coeff.dat_r[w.coeff:]), dly_profile.eq(m_coeff.dat_r[w.coeff + 8:]), If(self.shifting, - m_state.adr.eq(state | (1 << w.profile + w.channel)), + m_state.adr.eq(t_current_step | (1 << w.profile + w.channel)), m_state.dat_w.eq(m_state.dat_r), - m_state.we.eq(state[0]) + m_state.we.eq(t_current_step[0]) ), If(self.loading, - m_state.adr.eq((state << 1) | (1 << w.profile + w.channel)), - m_state.dat_w[-w.adc - 1:-1].eq(Array(self.adc)[state]), + m_state.adr.eq((t_current_step << 1) | (1 << w.profile + w.channel)), + m_state.dat_w[-w.adc - 1:-1].eq(Array(self.adc)[t_current_step]), m_state.dat_w[-1].eq(m_state.dat_w[-2]), m_state.we.eq(1) ), @@ -405,16 +423,20 @@ class IIR(Module): Cat(profile[1], channel[2]), # read old y Cat(profile[0], channel[0]), - # x0 (recent) + # read x0 (recent) 0 | (sel_profile << 1) | (1 << w.profile + w.channel), - # x1 (old) + # read x1 (old) 1 | (sel << 1) | (1 << w.profile + w.channel), - ])[phase]), + ])[pipeline_phase]), m_state.dat_w.eq(dsp.output), - m_state.we.eq((phase == 0) & stage[2] & en[1]), + m_state.we.eq((pipeline_phase == 0) & stages_active[2] & en[1]), ) ] + # + # Compute auxiliary signals (delayed servo enable, clip indicators, etc.). + # + # internal channel delay counters dlys = Array([Signal(w.dly) for i in range(1 << w.channel)]) @@ -434,51 +456,65 @@ class IIR(Module): en_out = Signal(reset_less=True) # latched channel en_iir en_iir = Signal(reset_less=True) + + self.sync += [ + Case(pipeline_phase, { + 0: [ + dly.eq(dlys[channel[0]]), + en_out.eq(en_outs[channel[0]]), + en_iir.eq(en_iirs[channel[0]]), + If(stages_active[2] & en[1] & dsp.clip, + clips[channel[2]].eq(1) + ) + ], + 2: [ + en[0].eq(0), + en[1].eq(en[0]), + sel.eq(sel_profile), + If(stages_active[0] & en_out, + If(dly != dly_profile, + dlys[channel[0]].eq(dly + 1) + ).Elif(en_iir, + en[0].eq(1) + ) + ) + ], + }), + ] + + # + # Update DDS profile with FTW/POW/ASF + # Stage 0 loads the POW, stage 1 the FTW, and stage 2 writes + # the ASF computed by the IIR filter. + # + # muxing ddss = Array(self.dds) self.sync += [ - Case(phase, { - 0: [ - dly.eq(dlys[channel[0]]), - en_out.eq(en_outs[channel[0]]), - en_iir.eq(en_iirs[channel[0]]), - If(stage[1], - ddss[channel[1]][:w.word].eq(m_coeff.dat_r) - ), - If(stage[2] & en[1] & dsp.clip, - clips[channel[2]].eq(1) - ) - ], - 1: [ - If(stage[1], - ddss[channel[1]][w.word:2*w.word].eq( - m_coeff.dat_r), - ), - If(stage[2], - ddss[channel[2]][3*w.word:].eq( - m_state.dat_r[w.state - w.asf - 1:w.state - 1]) - ) - ], - 2: [ - en[0].eq(0), - en[1].eq(en[0]), - sel.eq(sel_profile), - If(stage[0], - ddss[channel[0]][2*w.word:3*w.word].eq( - m_coeff.dat_r), - If(en_out, - If(dly != dly_profile, - dlys[channel[0]].eq(dly + 1) - ).Elif(en_iir, - en[0].eq(1) - ) - ) - ) - ], - 3: [ - ], - }), + Case(pipeline_phase, { + 0: [ + If(stages_active[1], + ddss[channel[1]][:w.word].eq(m_coeff.dat_r), # ftw0 + ), + ], + 1: [ + If(stages_active[1], + ddss[channel[1]][w.word:2 * w.word].eq(m_coeff.dat_r), # ftw1 + ), + If(stages_active[2], + ddss[channel[2]][3*w.word:].eq( # asf + m_state.dat_r[w.state - w.asf - 1:w.state - 1]) + ) + ], + 2: [ + If(stages_active[0], + ddss[channel[0]][2*w.word:3*w.word].eq(m_coeff.dat_r), # pow + ), + ], + 3: [ + ], + }), ] def _coeff(self, channel, profile, coeff): diff --git a/artiq/gateware/suservo/servo.py b/artiq/gateware/suservo/servo.py index 08b31a3bc..1aec95f02 100644 --- a/artiq/gateware/suservo/servo.py +++ b/artiq/gateware/suservo/servo.py @@ -5,32 +5,76 @@ from .iir import IIR, IIRWidths from .dds_ser import DDS, DDSParams +def predict_timing(adc_p, iir_p, dds_p): + """ + The following is a sketch of the timing for 1 Sampler (8 ADCs) and N Urukuls + Shown here, the cycle duration is limited by the IIR loading+processing time. + + ADC|CONVH|CONV|READ|RTT|IDLE|CONVH|CONV|READ|RTT|IDLE|CONVH|CONV|READ|RTT|... + |4 |57 |16 |8 | .. |4 |57 |16 |8 | .. |4 |57 |16 |8 |... + ---+-------------------+------------------------+------------------------+--- + IIR| |LOAD|PROC |SHIFT|LOAD|PROC |SHIFT|... + | |8 |16*N+9 |16 |8 |16*N+9 |16 |... + ---+--------------------------------------+------------------------+--------- + DDS| |CMD|PROF|WAIT|IO_UP|IDLE|CMD|PR... + | |16 |128 |1 |1 | .. |16 | ... + + IIR loading starts once the ADC presents its data, the DDSes are updated + once the IIR processing is over. These are the only blocking processes. + IIR shifting happens in parallel to writing to the DDSes and ADC conversions + take place while the IIR filter is processing or the DDSes are being + written to, depending on the cycle duration (given by whichever module + takes the longest). + """ + t_adc = (adc_p.t_cnvh + adc_p.t_conv + adc_p.t_rtt + + adc_p.channels*adc_p.width//adc_p.lanes) + 1 + # load adc_p.channels values, process dds_p.channels + # (4 processing phases and 2 additional stages à 4 phases + # to complete the processing of the last channel) + t_iir = adc_p.channels + 4*dds_p.channels + 8 + 1 + t_dds = (dds_p.width*2 + 1)*dds_p.clk + 1 + t_cycle = max(t_adc, t_iir, t_dds) + return t_adc, t_iir, t_dds, t_cycle + class Servo(Module): def __init__(self, adc_pads, dds_pads, adc_p, iir_p, dds_p): + t_adc, t_iir, t_dds, t_cycle = predict_timing(adc_p, iir_p, dds_p) + assert t_iir + 2*adc_p.channels < t_cycle, "need shifting time" + self.submodules.adc = ADC(adc_pads, adc_p) self.submodules.iir = IIR(iir_p) self.submodules.dds = DDS(dds_pads, dds_p) # adc channels are reversed on Sampler - for i, j, k, l in zip(reversed(self.adc.data), self.iir.adc, - self.iir.dds, self.dds.profile): - self.comb += j.eq(i), l.eq(k) - - t_adc = (adc_p.t_cnvh + adc_p.t_conv + adc_p.t_rtt + - adc_p.channels*adc_p.width//adc_p.lanes) + 1 - t_iir = ((1 + 4 + 1) << iir_p.channel) + 1 - t_dds = (dds_p.width*2 + 1)*dds_p.clk + 1 - - t_cycle = max(t_adc, t_iir, t_dds) - assert t_iir + (2 << iir_p.channel) < t_cycle, "need shifting time" + for iir, adc in zip(self.iir.adc, reversed(self.adc.data)): + self.comb += iir.eq(adc) + for dds, iir in zip(self.dds.profile, self.iir.dds): + self.comb += dds.eq(iir) + # If high, a new cycle is started if the current cycle (if any) is + # finished. Consequently, if low, servo iterations cease after the + # current cycle is finished. Don't care while the first step (ADC) + # is active. self.start = Signal() + + # Counter for delay between end of ADC cycle and start of next one, + # depending on the duration of the other steps. t_restart = t_cycle - t_adc + 1 assert t_restart > 1 cnt = Signal(max=t_restart) cnt_done = Signal() active = Signal(3) + + # Indicates whether different steps (0: ADC, 1: IIR, 2: DDS) are + # currently active (exposed for simulation only), with each bit being + # reset once the successor step is launched. Depending on the + # timing details of the different steps, any number can be concurrently + # active (e.g. ADC read from iteration n, IIR computation from iteration + # n - 1, and DDS write from iteration n - 2). + + # Asserted once per cycle when the DDS write has been completed. self.done = Signal() + self.sync += [ If(self.dds.done, active[2].eq(0) diff --git a/artiq/gateware/targets/kasli.py b/artiq/gateware/targets/kasli.py index 311028fcb..cf8b5760f 100755 --- a/artiq/gateware/targets/kasli.py +++ b/artiq/gateware/targets/kasli.py @@ -228,9 +228,9 @@ class SUServo(StandaloneBase): ttl_serdes_7series.Output_8X, ttl_serdes_7series.Output_8X) # EEM3/2: Sampler, EEM5/4: Urukul, EEM7/6: Urukul - eem.SUServo.add_std( - self, eems_sampler=(3, 2), - eems_urukul0=(5, 4), eems_urukul1=(7, 6)) + eem.SUServo.add_std(self, + eems_sampler=(3, 2), + eems_urukul=[[5, 4], [7, 6]]) for i in (1, 2): sfp_ctl = self.platform.request("sfp_ctl", i) From b49f813b17de89933d075bde433939d61480b05f Mon Sep 17 00:00:00 2001 From: Harry Ho Date: Thu, 18 Nov 2021 16:42:51 +0800 Subject: [PATCH 05/59] artiq_flash: ignore checking non-RTM artifacts if unused --- artiq/frontend/artiq_flash.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/artiq/frontend/artiq_flash.py b/artiq/frontend/artiq_flash.py index 1641e31fd..62b8dc39c 100755 --- a/artiq/frontend/artiq_flash.py +++ b/artiq/frontend/artiq_flash.py @@ -362,7 +362,10 @@ def main(): variants.remove("rtm") except ValueError: pass - if len(variants) == 0: + if all(action in ["rtm_gateware", "storage", "rtm_load", "erase", "start"] + for action in args.action) and args.action: + pass + elif len(variants) == 0: raise FileNotFoundError("no variants found, did you install a board binary package?") elif len(variants) == 1: variant = variants[0] From 7307b30213d6b9b7ff3979024b1f7de77137f9f9 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 23 Nov 2021 12:15:17 +0800 Subject: [PATCH 06/59] flake: update to nixpkgs 21.11 --- flake.lock | 14 +++++++------- flake.nix | 2 +- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/flake.lock b/flake.lock index b01ba2a41..4f321cd7b 100644 --- a/flake.lock +++ b/flake.lock @@ -3,11 +3,11 @@ "mozilla-overlay": { "flake": false, "locked": { - "lastModified": 1636569584, - "narHash": "sha256-iDFogua24bhFJZSxG/jhZbbNxDXuKP9S/pyRIYzrRPM=", + "lastModified": 1637337116, + "narHash": "sha256-LKqAcdL+woWeYajs02bDQ7q8rsqgXuzhC354NoRaV80=", "owner": "mozilla", "repo": "nixpkgs-mozilla", - "rev": "9f70f86d73fa97e043bebeb58e5676d157069cfb", + "rev": "cbc7435f5b0b3d17b16fb1d20cf7b616eec5e093", "type": "github" }, "original": { @@ -18,16 +18,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1636552551, - "narHash": "sha256-k7Hq/bvUnRlAfFjPGuw3FsSqqspQdRHsCHpgadw6UkQ=", + "lastModified": 1637636156, + "narHash": "sha256-E2ym4Vcpqu9JYoQDXJZR48gVD+LPPbaCoYveIk7Xu3Y=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "9e86f5f7a19db6da2445f07bafa6694b556f9c6d", + "rev": "b026e1cf87a108dd06fe521f224fdc72fd0b013d", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-21.05", + "ref": "release-21.11", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index 4630d3ec8..867166534 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,7 @@ { description = "A leading-edge control system for quantum information experiments"; - inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-21.05; + inputs.nixpkgs.url = github:NixOS/nixpkgs/release-21.11; inputs.mozilla-overlay = { url = github:mozilla/nixpkgs-mozilla; flake = false; }; inputs.src-sipyco = { url = github:m-labs/sipyco; flake = false; }; inputs.src-pythonparser = { url = github:m-labs/pythonparser; flake = false; }; From 9423428bb006fbd104c52a494eb31df79cd3e8d6 Mon Sep 17 00:00:00 2001 From: occheung Date: Mon, 22 Nov 2021 16:53:57 +0800 Subject: [PATCH 07/59] drtio: fix crc32 offset address --- artiq/firmware/libboard_artiq/drtioaux.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/artiq/firmware/libboard_artiq/drtioaux.rs b/artiq/firmware/libboard_artiq/drtioaux.rs index f72072702..818775d7b 100644 --- a/artiq/firmware/libboard_artiq/drtioaux.rs +++ b/artiq/firmware/libboard_artiq/drtioaux.rs @@ -137,11 +137,10 @@ pub fn send(linkno: u8, packet: &Packet) -> Result<(), Error> { packet.write_to(&mut writer)?; - let padding = 4 - (writer.position() % 4); - if padding != 4 { - for _ in 0..padding { - writer.write_u8(0)?; - } + // Pad till offset 4, insert checksum there + let padding = (12 - (writer.position() % 8)) % 8; + for _ in 0..padding { + writer.write_u8(0)?; } let checksum = crc::crc32::checksum_ieee(&writer.get_ref()[0..writer.position()]); From 5ed9e49b94630bdafebb261907d66dff0533ec4c Mon Sep 17 00:00:00 2001 From: occheung Date: Wed, 24 Nov 2021 11:52:59 +0800 Subject: [PATCH 08/59] changelog: update drtio protocol --- RELEASE_NOTES.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 467e4e9c4..54cb3a284 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -38,6 +38,7 @@ Breaking changes: * Phaser: fixed coarse mixer frequency configuration * Mirny: Added extra delays in ``ADF5356.sync()``. This avoids the need of an extra delay before calling `ADF5356.init()`. +* DRTIO: Changed message alignment from 32-bits to 64-bits. ARTIQ-6 From 6a433b2fcefc30dc9882b0421b6bb7d62f335b22 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 24 Nov 2021 18:57:16 +0800 Subject: [PATCH 09/59] artiq_sinara_tester: test Urukul attenuator digital control --- artiq/frontend/artiq_sinara_tester.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/artiq/frontend/artiq_sinara_tester.py b/artiq/frontend/artiq_sinara_tester.py index 22ba53740..2a0392eec 100755 --- a/artiq/frontend/artiq_sinara_tester.py +++ b/artiq/frontend/artiq_sinara_tester.py @@ -229,6 +229,17 @@ class SinaraTester(EnvExperiment): self.core.break_realtime() cpld.init() + @kernel + def test_urukul_att(self, cpld): + self.core.break_realtime() + for i in range(32): + test_word = 1 << i + cpld.set_all_att_mu(test_word) + readback_word = cpld.get_att_mu() + if readback_word != test_word: + print(readback_word, test_word) + raise ValueError + @kernel def calibrate_urukul(self, channel): self.core.break_realtime() @@ -268,11 +279,12 @@ class SinaraTester(EnvExperiment): def test_urukuls(self): print("*** Testing Urukul DDSes.") - print("Initializing CPLDs...") for name, cpld in sorted(self.urukul_cplds.items(), key=lambda x: x[0]): - print(name + "...") + print(name + ": initializing CPLD...") self.init_urukul(cpld) - print("...done") + print(name + ": testing attenuator digital control...") + self.test_urukul_att(cpld) + print(name + ": done") print("Calibrating inter-device synchronization...") for channel_name, channel_dev in self.urukuls: From 9b01db3d112622a5a7ebbcb0b353d89b91807fc2 Mon Sep 17 00:00:00 2001 From: David Nadlinger Date: Sat, 6 Nov 2021 22:34:32 +0000 Subject: [PATCH 10/59] compiler: Emit sret call site argument attributes LLVM 6 seemed not to mind the mismatch, but more recent versions produce miscompilations without this. Needs llvmlite support (GitHub: numba/llvmlite#702). --- artiq/compiler/transforms/llvm_ir_generator.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/artiq/compiler/transforms/llvm_ir_generator.py b/artiq/compiler/transforms/llvm_ir_generator.py index 084e5ae09..9e3482c18 100644 --- a/artiq/compiler/transforms/llvm_ir_generator.py +++ b/artiq/compiler/transforms/llvm_ir_generator.py @@ -1355,7 +1355,7 @@ class LLVMIRGenerator: llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), []) llresultslot = self.llbuilder.alloca(llfun.type.pointee.args[0].pointee) - llcall = self.llbuilder.call(llfun, [llresultslot] + llargs) + self.llbuilder.call(llfun, [llresultslot] + llargs, arg_attrs={0: "sret"}) llresult = self.llbuilder.load(llresultslot) self.llbuilder.call(self.llbuiltin("llvm.stackrestore"), [llstackptr]) @@ -1388,7 +1388,8 @@ class LLVMIRGenerator: llresultslot = self.llbuilder.alloca(llfun.type.pointee.args[0].pointee) llcall = self.llbuilder.invoke(llfun, [llresultslot] + llargs, - llnormalblock, llunwindblock, name=insn.name) + llnormalblock, llunwindblock, name=insn.name, + arg_attrs={0: "sret"}) self.llbuilder.position_at_start(llnormalblock) llresult = self.llbuilder.load(llresultslot) From 63b5727a0c69069d0096d6146b26c51b404640ce Mon Sep 17 00:00:00 2001 From: David Nadlinger Date: Sat, 6 Nov 2021 22:56:52 +0000 Subject: [PATCH 11/59] compiler: Also emit byval argument attributes at call sites See previous commit. GitHub: Fixes #1599. --- .../compiler/transforms/llvm_ir_generator.py | 42 ++++++++++--------- 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/artiq/compiler/transforms/llvm_ir_generator.py b/artiq/compiler/transforms/llvm_ir_generator.py index 9e3482c18..5f61db822 100644 --- a/artiq/compiler/transforms/llvm_ir_generator.py +++ b/artiq/compiler/transforms/llvm_ir_generator.py @@ -1174,26 +1174,32 @@ class LLVMIRGenerator: else: llfun = self.map(insn.static_target_function) llenv = self.llbuilder.extract_value(llclosure, 0, name="env.fun") - return llfun, [llenv] + list(llargs) + return llfun, [llenv] + list(llargs), {} def _prepare_ffi_call(self, insn): llargs = [] - byvals = [] + llarg_attrs = {} for i, arg in enumerate(insn.arguments()): llarg = self.map(arg) if isinstance(llarg.type, (ll.LiteralStructType, ll.IdentifiedStructType)): llslot = self.llbuilder.alloca(llarg.type) self.llbuilder.store(llarg, llslot) llargs.append(llslot) - byvals.append(i) + llarg_attrs[i] = "byval" else: llargs.append(llarg) + llretty = self.llty_of_type(insn.type, for_return=True) + is_sret = self.needs_sret(llretty) + if is_sret: + llarg_attrs = {i + 1: a for (i, a) in llarg_attrs.items()} + llarg_attrs[0] = "sret" + llfunname = insn.target_function().type.name llfun = self.llmodule.globals.get(llfunname) if llfun is None: - llretty = self.llty_of_type(insn.type, for_return=True) - if self.needs_sret(llretty): + # Function has not been declared in the current LLVM module, do it now. + if is_sret: llfunty = ll.FunctionType(llvoid, [llretty.as_pointer()] + [llarg.type for llarg in llargs]) else: @@ -1201,17 +1207,14 @@ class LLVMIRGenerator: llfun = ll.Function(self.llmodule, llfunty, insn.target_function().type.name) - if self.needs_sret(llretty): - llfun.args[0].add_attribute('sret') - byvals = [i + 1 for i in byvals] - for i in byvals: - llfun.args[i].add_attribute('byval') + for idx, attr in llarg_attrs.items(): + llfun.args[idx].add_attribute(attr) if 'nounwind' in insn.target_function().type.flags: llfun.attributes.add('nounwind') if 'nowrite' in insn.target_function().type.flags: llfun.attributes.add('inaccessiblememonly') - return llfun, list(llargs) + return llfun, list(llargs), llarg_attrs def _build_rpc(self, fun_loc, fun_type, args, llnormalblock, llunwindblock): llservice = ll.Constant(lli32, fun_type.service) @@ -1347,20 +1350,21 @@ class LLVMIRGenerator: insn.arguments(), llnormalblock=None, llunwindblock=None) elif types.is_external_function(functiontyp): - llfun, llargs = self._prepare_ffi_call(insn) + llfun, llargs, llarg_attrs = self._prepare_ffi_call(insn) else: - llfun, llargs = self._prepare_closure_call(insn) + llfun, llargs, llarg_attrs = self._prepare_closure_call(insn) if self.has_sret(functiontyp): llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), []) llresultslot = self.llbuilder.alloca(llfun.type.pointee.args[0].pointee) - self.llbuilder.call(llfun, [llresultslot] + llargs, arg_attrs={0: "sret"}) + self.llbuilder.call(llfun, [llresultslot] + llargs, arg_attrs=llarg_attrs) llresult = self.llbuilder.load(llresultslot) self.llbuilder.call(self.llbuiltin("llvm.stackrestore"), [llstackptr]) else: - llcall = llresult = self.llbuilder.call(llfun, llargs, name=insn.name) + llresult = self.llbuilder.call(llfun, llargs, name=insn.name, + arg_attrs=llarg_attrs) if isinstance(llresult.type, ll.VoidType): # We have NoneType-returning functions return void, but None is @@ -1379,9 +1383,9 @@ class LLVMIRGenerator: insn.arguments(), llnormalblock, llunwindblock) elif types.is_external_function(functiontyp): - llfun, llargs = self._prepare_ffi_call(insn) + llfun, llargs, llarg_attrs = self._prepare_ffi_call(insn) else: - llfun, llargs = self._prepare_closure_call(insn) + llfun, llargs, llarg_attrs = self._prepare_closure_call(insn) if self.has_sret(functiontyp): llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), []) @@ -1389,7 +1393,7 @@ class LLVMIRGenerator: llresultslot = self.llbuilder.alloca(llfun.type.pointee.args[0].pointee) llcall = self.llbuilder.invoke(llfun, [llresultslot] + llargs, llnormalblock, llunwindblock, name=insn.name, - arg_attrs={0: "sret"}) + arg_attrs=llarg_attrs) self.llbuilder.position_at_start(llnormalblock) llresult = self.llbuilder.load(llresultslot) @@ -1397,7 +1401,7 @@ class LLVMIRGenerator: self.llbuilder.call(self.llbuiltin("llvm.stackrestore"), [llstackptr]) else: llcall = self.llbuilder.invoke(llfun, llargs, llnormalblock, llunwindblock, - name=insn.name) + name=insn.name, arg_attrs=llarg_attrs) llresult = llcall # The !tbaa metadata is not legal to use with the invoke instruction, From c6039479e47e56743657355184c5f249e5137743 Mon Sep 17 00:00:00 2001 From: David Nadlinger Date: Sat, 27 Nov 2021 04:43:52 +0000 Subject: [PATCH 12/59] compiler: Add lit test for call site attributes [nfc] --- artiq/test/lit/embedding/syscall_arg_attrs.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 artiq/test/lit/embedding/syscall_arg_attrs.py diff --git a/artiq/test/lit/embedding/syscall_arg_attrs.py b/artiq/test/lit/embedding/syscall_arg_attrs.py new file mode 100644 index 000000000..67207dc32 --- /dev/null +++ b/artiq/test/lit/embedding/syscall_arg_attrs.py @@ -0,0 +1,30 @@ +# RUN: env ARTIQ_DUMP_LLVM=%t %python -m artiq.compiler.testbench.embedding +compile %s +# RUN: OutputCheck %s --file-to-check=%t.ll + +from artiq.language.core import * +from artiq.language.types import * + +# Make sure `byval` and `sret` are specified both at the call site and the +# declaration. This isn't caught by the LLVM IR validator, but mismatches +# lead to miscompilations (at least in LLVM 11). + + +@kernel +def entrypoint(): + # CHECK: call void @accept_str\({ i8\*, i32 }\* nonnull byval + accept_str("foo") + + # CHECK: call void @return_str\({ i8\*, i32 }\* nonnull sret + return_str() + + +# CHECK: declare void @accept_str\({ i8\*, i32 }\* byval\) +@syscall +def accept_str(name: TStr) -> TNone: + pass + + +# CHECK: declare void @return_str\({ i8\*, i32 }\* sret\) +@syscall +def return_str() -> TStr: + pass From 5a923a095674a8192186847426a08c6f8743d800 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 1 Dec 2021 22:39:24 +0800 Subject: [PATCH 13/59] flake: switch to nixos- branch --- flake.lock | 8 ++++---- flake.nix | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/flake.lock b/flake.lock index 4f321cd7b..bf378e2eb 100644 --- a/flake.lock +++ b/flake.lock @@ -18,16 +18,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1637636156, - "narHash": "sha256-E2ym4Vcpqu9JYoQDXJZR48gVD+LPPbaCoYveIk7Xu3Y=", + "lastModified": 1638279546, + "narHash": "sha256-1KCwN7twjp1dBdp0jPgVdYFztDkCR8+roo0B34J9oBY=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "b026e1cf87a108dd06fe521f224fdc72fd0b013d", + "rev": "96b4157790fc96e70d6e6c115e3f34bba7be490f", "type": "github" }, "original": { "owner": "NixOS", - "ref": "release-21.11", + "ref": "nixos-21.11", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index 867166534..834a8eab9 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,7 @@ { description = "A leading-edge control system for quantum information experiments"; - inputs.nixpkgs.url = github:NixOS/nixpkgs/release-21.11; + inputs.nixpkgs.url = github:NixOS/nixpkgs/nixos-21.11; inputs.mozilla-overlay = { url = github:mozilla/nixpkgs-mozilla; flake = false; }; inputs.src-sipyco = { url = github:m-labs/sipyco; flake = false; }; inputs.src-pythonparser = { url = github:m-labs/pythonparser; flake = false; }; From b8e7add785526ff599db5e892d83d482bf85951c Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 1 Dec 2021 22:41:34 +0800 Subject: [PATCH 14/59] language: remove deprecated set_dataset(..., save=...) --- RELEASE_NOTES.rst | 1 + artiq/language/environment.py | 8 +------- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 54cb3a284..048e9876c 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -39,6 +39,7 @@ Breaking changes: * Mirny: Added extra delays in ``ADF5356.sync()``. This avoids the need of an extra delay before calling `ADF5356.init()`. * DRTIO: Changed message alignment from 32-bits to 64-bits. +* The deprecated ``set_dataset(..., save=...)`` is no longer supported. ARTIQ-6 diff --git a/artiq/language/environment.py b/artiq/language/environment.py index 7992fe3af..a1de09e5b 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -1,4 +1,3 @@ -import warnings from collections import OrderedDict from inspect import isclass @@ -331,7 +330,7 @@ class HasEnvironment: @rpc(flags={"async"}) def set_dataset(self, key, value, - broadcast=False, persist=False, archive=True, save=None): + broadcast=False, persist=False, archive=True): """Sets the contents and handling modes of a dataset. Datasets must be scalars (``bool``, ``int``, ``float`` or NumPy scalar) @@ -343,12 +342,7 @@ class HasEnvironment: broadcast. :param archive: the data is saved into the local storage of the current run (archived as a HDF5 file). - :param save: deprecated. """ - if save is not None: - warnings.warn("set_dataset save parameter is deprecated, " - "use archive instead", FutureWarning) - archive = save self.__dataset_mgr.set(key, value, broadcast, persist, archive) @rpc(flags={"async"}) From 9f830b86c059fe356da2962d8755d258f5b86233 Mon Sep 17 00:00:00 2001 From: Etienne Wodey <44871469+airwoodix@users.noreply.github.com> Date: Fri, 3 Dec 2021 10:05:35 +0100 Subject: [PATCH 15/59] kasli: add SED lanes count option to HW description JSON file (#1745) Signed-off-by: Etienne Wodey --- RELEASE_NOTES.rst | 2 ++ artiq/coredevice/coredevice_generic.schema.json | 7 +++++++ artiq/gateware/targets/kasli.py | 12 ++++++------ artiq/gateware/targets/kasli_generic.py | 7 ++++--- 4 files changed, 19 insertions(+), 9 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 048e9876c..6dc247b04 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -20,6 +20,8 @@ Highlights: - Exposes upconverter calibration and enabling/disabling of upconverter LO & RF outputs. - Add helpers to align Phaser updates to the RTIO timeline (``get_next_frame_mu()``) * ``get()``, ``get_mu()``, ``get_att()``, and ``get_att_mu()`` functions added for AD9910 and AD9912 +* On Kasli, the number of FIFO lanes in the scalable events dispatcher (SED) can now be configured in + the JSON hardware description file. * New hardware support: - HVAMP_8CH 8 channel HV amplifier for Fastino / Zotino * ``artiq_ddb_template`` generates edge-counter keys that start with the key of the corresponding diff --git a/artiq/coredevice/coredevice_generic.schema.json b/artiq/coredevice/coredevice_generic.schema.json index 64e4a9251..274d7e2aa 100644 --- a/artiq/coredevice/coredevice_generic.schema.json +++ b/artiq/coredevice/coredevice_generic.schema.json @@ -64,6 +64,13 @@ "type": "boolean", "default": false }, + "sed_lanes": { + "type": "number", + "minimum": 1, + "maximum": 32, + "default": 8, + "description": "Number of FIFOs in the SED, must be a power of 2" + }, "peripherals": { "type": "array", "items": { diff --git a/artiq/gateware/targets/kasli.py b/artiq/gateware/targets/kasli.py index cf8b5760f..71ee933a5 100755 --- a/artiq/gateware/targets/kasli.py +++ b/artiq/gateware/targets/kasli.py @@ -135,12 +135,12 @@ class StandaloneBase(MiniSoC, AMPSoC): self.config["HAS_SI5324"] = None self.config["SI5324_SOFT_RESET"] = None - def add_rtio(self, rtio_channels): + def add_rtio(self, rtio_channels, sed_lanes=8): self.submodules.rtio_crg = _RTIOCRG(self.platform) self.csr_devices.append("rtio_crg") fix_serdes_timing_path(self.platform) self.submodules.rtio_tsc = rtio.TSC("async", glbl_fine_ts_width=3) - self.submodules.rtio_core = rtio.Core(self.rtio_tsc, rtio_channels) + self.submodules.rtio_core = rtio.Core(self.rtio_tsc, rtio_channels, lane_count=sed_lanes) self.csr_devices.append("rtio_core") self.submodules.rtio = rtio.KernelInitiator(self.rtio_tsc) self.submodules.rtio_dma = ClockDomainsRenamer("sys_kernel")( @@ -375,13 +375,13 @@ class MasterBase(MiniSoC, AMPSoC): self.csr_devices.append("rtio_crg") fix_serdes_timing_path(platform) - def add_rtio(self, rtio_channels): + def add_rtio(self, rtio_channels, sed_lanes=8): # Only add MonInj core if there is anything to monitor if any([len(c.probes) for c in rtio_channels]): self.submodules.rtio_moninj = rtio.MonInj(rtio_channels) self.csr_devices.append("rtio_moninj") - self.submodules.rtio_core = rtio.Core(self.rtio_tsc, rtio_channels) + self.submodules.rtio_core = rtio.Core(self.rtio_tsc, rtio_channels, lane_count=sed_lanes) self.csr_devices.append("rtio_core") self.submodules.rtio = rtio.KernelInitiator(self.rtio_tsc) @@ -608,13 +608,13 @@ class SatelliteBase(BaseSoC): self.csr_devices.append("rtio_crg") fix_serdes_timing_path(platform) - def add_rtio(self, rtio_channels): + def add_rtio(self, rtio_channels, sed_lanes=8): # Only add MonInj core if there is anything to monitor if any([len(c.probes) for c in rtio_channels]): self.submodules.rtio_moninj = rtio.MonInj(rtio_channels) self.csr_devices.append("rtio_moninj") - self.submodules.local_io = SyncRTIO(self.rtio_tsc, rtio_channels) + self.submodules.local_io = SyncRTIO(self.rtio_tsc, rtio_channels, lane_count=sed_lanes) self.comb += self.drtiosat.async_errors.eq(self.local_io.async_errors) self.submodules.cri_con = rtio.CRIInterconnectShared( [self.drtiosat.cri], diff --git a/artiq/gateware/targets/kasli_generic.py b/artiq/gateware/targets/kasli_generic.py index 122c3e0cf..bb822f41e 100755 --- a/artiq/gateware/targets/kasli_generic.py +++ b/artiq/gateware/targets/kasli_generic.py @@ -57,7 +57,8 @@ class GenericStandalone(StandaloneBase): self.config["RTIO_LOG_CHANNEL"] = len(self.rtio_channels) self.rtio_channels.append(rtio.LogChannel()) - self.add_rtio(self.rtio_channels) + self.add_rtio(self.rtio_channels, sed_lanes=description["sed_lanes"]) + if has_grabber: self.config["HAS_GRABBER"] = None self.add_csr_group("grabber", self.grabber_csr_group) @@ -94,7 +95,7 @@ class GenericMaster(MasterBase): self.config["RTIO_LOG_CHANNEL"] = len(self.rtio_channels) self.rtio_channels.append(rtio.LogChannel()) - self.add_rtio(self.rtio_channels) + self.add_rtio(self.rtio_channels, sed_lanes=description["sed_lanes"]) if has_grabber: self.config["HAS_GRABBER"] = None self.add_csr_group("grabber", self.grabber_csr_group) @@ -127,7 +128,7 @@ class GenericSatellite(SatelliteBase): self.config["RTIO_LOG_CHANNEL"] = len(self.rtio_channels) self.rtio_channels.append(rtio.LogChannel()) - self.add_rtio(self.rtio_channels) + self.add_rtio(self.rtio_channels, sed_lanes=description["sed_lanes"]) if has_grabber: self.config["HAS_GRABBER"] = None self.add_csr_group("grabber", self.grabber_csr_group) From 163f5d91281c1fe01f47f2c8aad9160242c2acd9 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 3 Dec 2021 17:16:54 +0800 Subject: [PATCH 16/59] flake: debug hitl auth failures --- flake.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/flake.nix b/flake.nix index 834a8eab9..75ef864af 100644 --- a/flake.nix +++ b/flake.nix @@ -405,6 +405,7 @@ phases = [ "buildPhase" ]; buildPhase = '' + whoami export HOME=`mktemp -d` mkdir $HOME/.ssh cp /opt/hydra_id_rsa $HOME/.ssh/id_rsa From eec3ea6589fe8aafeadab2f07dc826b522b5d4e8 Mon Sep 17 00:00:00 2001 From: mwojcik Date: Fri, 26 Nov 2021 13:17:40 +0800 Subject: [PATCH 17/59] siphaser: add support for 100mhz rtio --- artiq/gateware/drtio/siphaser.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/artiq/gateware/drtio/siphaser.py b/artiq/gateware/drtio/siphaser.py index 81dacaed0..9dbee2d11 100644 --- a/artiq/gateware/drtio/siphaser.py +++ b/artiq/gateware/drtio/siphaser.py @@ -4,7 +4,7 @@ from migen.genlib.cdc import MultiReg, PulseSynchronizer from misoc.interconnect.csr import * -# This code assumes 125/62.5MHz reference clock and 125MHz or 150MHz RTIO +# This code assumes 125/62.5MHz reference clock and 100MHz, 125MHz or 150MHz RTIO # frequency. class SiPhaser7Series(Module, AutoCSR): @@ -15,9 +15,9 @@ class SiPhaser7Series(Module, AutoCSR): self.phase_shift_done = CSRStatus(reset=1) self.error = CSR() - assert rtio_clk_freq in (125e6, 150e6) + assert rtio_clk_freq in (100e6, 125e6, 150e6) - # 125MHz/62.5MHz reference clock to 125MHz/150MHz. VCO @ 750MHz. + # 125MHz/62.5MHz reference clock to 100MHz/125MHz/150MHz. VCO @ 750MHz. # Used to provide a startup clock to the transceiver through the Si, # we do not use the crystal reference so that the PFD (f3) frequency # can be high. @@ -43,7 +43,7 @@ class SiPhaser7Series(Module, AutoCSR): else: mmcm_freerun_output = mmcm_freerun_output_raw - # 125MHz/150MHz to 125MHz/150MHz with controllable phase shift, + # 100MHz/125MHz/150MHz to 100MHz/125MHz/150MHz with controllable phase shift, # VCO @ 1000MHz/1200MHz. # Inserted between CDR and output to Si, used to correct # non-determinstic skew of Si5324. From f281112779606f279a3f8dde05a25b71080082fb Mon Sep 17 00:00:00 2001 From: mwojcik Date: Fri, 3 Dec 2021 11:22:15 +0800 Subject: [PATCH 18/59] satman: add 100mhz si5324 settings siphaser: add calculated vco for 100mhz comment --- artiq/firmware/satman/main.rs | 13 +++++++++++++ artiq/gateware/drtio/siphaser.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/artiq/firmware/satman/main.rs b/artiq/firmware/satman/main.rs index e0ec83612..c6685f895 100644 --- a/artiq/firmware/satman/main.rs +++ b/artiq/firmware/satman/main.rs @@ -447,6 +447,19 @@ const SI5324_SETTINGS: si5324::FrequencySettings crystal_ref: true }; +#[cfg(all(has_si5324, rtio_frequency = "100.0"))] +const SI5324_SETTINGS: si5324::FrequencySettings + = si5324::FrequencySettings { + n1_hs : 5, + nc1_ls : 10, + n2_hs : 10, + n2_ls : 250, + n31 : 50, + n32 : 50, + bwsel : 4, + crystal_ref: true +}; + #[no_mangle] pub extern fn main() -> i32 { extern { diff --git a/artiq/gateware/drtio/siphaser.py b/artiq/gateware/drtio/siphaser.py index 9dbee2d11..5237b7453 100644 --- a/artiq/gateware/drtio/siphaser.py +++ b/artiq/gateware/drtio/siphaser.py @@ -44,7 +44,7 @@ class SiPhaser7Series(Module, AutoCSR): mmcm_freerun_output = mmcm_freerun_output_raw # 100MHz/125MHz/150MHz to 100MHz/125MHz/150MHz with controllable phase shift, - # VCO @ 1000MHz/1200MHz. + # VCO @ 800MHz/1000MHz/1200MHz. # Inserted between CDR and output to Si, used to correct # non-determinstic skew of Si5324. mmcm_ps_fb = Signal() From 7953f3d7054404a7c4a7729ef04af65f20eabf92 Mon Sep 17 00:00:00 2001 From: mwojcik Date: Fri, 3 Dec 2021 11:53:48 +0800 Subject: [PATCH 19/59] kc705: add drtio 100mhz clk switch --- artiq/gateware/targets/kc705.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/artiq/gateware/targets/kc705.py b/artiq/gateware/targets/kc705.py index 4cec96e87..e04088a92 100755 --- a/artiq/gateware/targets/kc705.py +++ b/artiq/gateware/targets/kc705.py @@ -129,7 +129,7 @@ class _StandaloneBase(MiniSoC, AMPSoC): } mem_map.update(MiniSoC.mem_map) - def __init__(self, gateware_identifier_str=None, **kwargs): + def __init__(self, gateware_identifier_str=None, drtio_100mhz=False, **kwargs): MiniSoC.__init__(self, cpu_type="vexriscv", cpu_bus_width=64, @@ -207,7 +207,7 @@ class _MasterBase(MiniSoC, AMPSoC): } mem_map.update(MiniSoC.mem_map) - def __init__(self, gateware_identifier_str=None, **kwargs): + def __init__(self, gateware_identifier_str=None, drtio_100mhz=False, **kwargs): MiniSoC.__init__(self, cpu_type="vexriscv", cpu_bus_width=64, @@ -236,11 +236,14 @@ class _MasterBase(MiniSoC, AMPSoC): platform.request("sfp"), platform.request("user_sma_mgt") ] - # 1000BASE_BX10 Ethernet compatible, 125MHz RTIO clock + rtio_clk_freq = 100e6 if drtio_100mhz else 125e6 + + # 1000BASE_BX10 Ethernet compatible, 100/125MHz RTIO clock self.submodules.drtio_transceiver = gtx_7series.GTX( clock_pads=platform.request("si5324_clkout"), pads=data_pads, - sys_clk_freq=self.clk_freq) + sys_clk_freq=self.clk_freq, + rtio_clk_freq=rtio_clk_freq) self.csr_devices.append("drtio_transceiver") self.submodules.rtio_tsc = rtio.TSC("async", glbl_fine_ts_width=3) @@ -341,7 +344,7 @@ class _SatelliteBase(BaseSoC): } mem_map.update(BaseSoC.mem_map) - def __init__(self, gateware_identifier_str=None, sma_as_sat=False, **kwargs): + def __init__(self, gateware_identifier_str=None, sma_as_sat=False, drtio_100mhz=False, **kwargs): BaseSoC.__init__(self, cpu_type="vexriscv", cpu_bus_width=64, @@ -369,11 +372,14 @@ class _SatelliteBase(BaseSoC): if sma_as_sat: data_pads = data_pads[::-1] - # 1000BASE_BX10 Ethernet compatible, 125MHz RTIO clock + rtio_clk_freq = 100e6 if drtio_100mhz else 125e6 + + # 1000BASE_BX10 Ethernet compatible, 100/125MHz RTIO clock self.submodules.drtio_transceiver = gtx_7series.GTX( clock_pads=platform.request("si5324_clkout"), pads=data_pads, - sys_clk_freq=self.clk_freq) + sys_clk_freq=self.clk_freq, + rtio_clk_freq=rtio_clk_freq) self.csr_devices.append("drtio_transceiver") self.submodules.rtio_tsc = rtio.TSC("sync", glbl_fine_ts_width=3) @@ -673,6 +679,8 @@ def main(): "(default: %(default)s)") parser.add_argument("--gateware-identifier-str", default=None, help="Override ROM identifier") + parser.add_argument("--drtio100mhz", action="store_true", default=False, + help="DRTIO systems only - use 100MHz RTIO clock") args = parser.parse_args() variant = args.variant.lower() @@ -681,7 +689,7 @@ def main(): except KeyError: raise SystemExit("Invalid variant (-V/--variant)") - soc = cls(gateware_identifier_str=args.gateware_identifier_str, **soc_kc705_argdict(args)) + soc = cls(gateware_identifier_str=args.gateware_identifier_str, drtio_100mhz=args.drtio100mhz, **soc_kc705_argdict(args)) build_artiq_soc(soc, builder_argdict(args)) From f8a649deda89460f3d56a43b798e2190b9039597 Mon Sep 17 00:00:00 2001 From: mwojcik Date: Fri, 3 Dec 2021 11:55:23 +0800 Subject: [PATCH 20/59] release notes: mention 100mhz support --- RELEASE_NOTES.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 6dc247b04..a1c807008 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -30,6 +30,7 @@ Highlights: repository when building the list of experiments. * The configuration entry ``rtio_clock`` supports multiple clocking settings, deprecating the usage of compile-time options. +* DRTIO: added support for 100MHz clock. Breaking changes: From 9bbf7eb48539082ee57e3f9b6faef7b0246a042b Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 3 Dec 2021 18:34:49 +0800 Subject: [PATCH 21/59] flake: use ed25519 key for hitl --- flake.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index 75ef864af..5fa94518d 100644 --- a/flake.nix +++ b/flake.nix @@ -408,15 +408,15 @@ whoami export HOME=`mktemp -d` mkdir $HOME/.ssh - cp /opt/hydra_id_rsa $HOME/.ssh/id_rsa - cp /opt/hydra_id_rsa.pub $HOME/.ssh/id_rsa.pub + cp /opt/hydra_id_ed25519 $HOME/.ssh/id_ed25519 + cp /opt/hydra_id_ed25519.pub $HOME/.ssh/id_ed25519.pub echo "rpi-1 ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPOBQVcsvk6WgRj18v4m0zkFeKrcN9gA+r6sxQxNwFpv" > $HOME/.ssh/known_hosts - chmod 600 $HOME/.ssh/id_rsa + chmod 600 $HOME/.ssh/id_ed25519 LOCKCTL=$(mktemp -d) mkfifo $LOCKCTL/lockctl cat $LOCKCTL/lockctl | ${pkgs.openssh}/bin/ssh \ - -i $HOME/.ssh/id_rsa \ + -i $HOME/.ssh/id_ed25519 \ -o UserKnownHostsFile=$HOME/.ssh/known_hosts \ rpi-1 \ 'mkdir -p /tmp/board_lock && flock /tmp/board_lock/kc705-1 -c "echo Ok; cat"' \ From 4a6bea479af03bca5a3583977bbdebf4c7f6b14e Mon Sep 17 00:00:00 2001 From: Steve Fan <19037626d@connect.polyu.hk> Date: Sat, 4 Dec 2021 13:33:24 +0800 Subject: [PATCH 22/59] Host report for async error upon kernel termination (#1791) Closes #1644 --- RELEASE_NOTES.rst | 4 +++- artiq/coredevice/comm_kernel.py | 12 ++++++++++++ artiq/firmware/libproto_artiq/session_proto.rs | 14 ++++++++++---- artiq/firmware/runtime/rtio_mgt.rs | 9 +++++++++ artiq/firmware/runtime/session.rs | 8 ++++++-- 5 files changed, 40 insertions(+), 7 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index a1c807008..46cd999ab 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -31,6 +31,9 @@ Highlights: * The configuration entry ``rtio_clock`` supports multiple clocking settings, deprecating the usage of compile-time options. * DRTIO: added support for 100MHz clock. +* Previously detected RTIO async errors are reported to the host after each kernel terminates and a + warning is logged. The warning is additional to the one already printed in the core device log upon + detection of the error. Breaking changes: @@ -44,7 +47,6 @@ Breaking changes: * DRTIO: Changed message alignment from 32-bits to 64-bits. * The deprecated ``set_dataset(..., save=...)`` is no longer supported. - ARTIQ-6 ------- diff --git a/artiq/coredevice/comm_kernel.py b/artiq/coredevice/comm_kernel.py index cdb54a118..1b0111c49 100644 --- a/artiq/coredevice/comm_kernel.py +++ b/artiq/coredevice/comm_kernel.py @@ -621,6 +621,7 @@ class CommKernel: function = self._read_string() backtrace = [self._read_int32() for _ in range(self._read_int32())] + self._process_async_error() traceback = list(reversed(symbolizer(backtrace))) + \ [(filename, line, column, *demangler([function]), None)] @@ -635,6 +636,16 @@ class CommKernel: python_exn.artiq_core_exception = core_exn raise python_exn + def _process_async_error(self): + errors = self._read_int8() + if errors > 0: + map_name = lambda y, z: [f"{y}(s)"] if z else [] + errors = map_name("collision", errors & 2 ** 0) + \ + map_name("busy error", errors & 2 ** 1) + \ + map_name("sequence error", errors & 2 ** 2) + logger.warning(f"{(', '.join(errors[:-1]) + ' and ') if len(errors) > 1 else ''}{errors[-1]} " + f"reported during kernel execution") + def serve(self, embedding_map, symbolizer, demangler): while True: self._read_header() @@ -646,4 +657,5 @@ class CommKernel: raise exceptions.ClockFailure else: self._read_expect(Reply.KernelFinished) + self._process_async_error() return diff --git a/artiq/firmware/libproto_artiq/session_proto.rs b/artiq/firmware/libproto_artiq/session_proto.rs index 99412de10..0475a4489 100644 --- a/artiq/firmware/libproto_artiq/session_proto.rs +++ b/artiq/firmware/libproto_artiq/session_proto.rs @@ -90,7 +90,9 @@ pub enum Reply<'a> { LoadCompleted, LoadFailed(&'a str), - KernelFinished, + KernelFinished { + async_errors: u8 + }, KernelStartupFailed, KernelException { name: &'a str, @@ -100,7 +102,8 @@ pub enum Reply<'a> { line: u32, column: u32, function: &'a str, - backtrace: &'a [usize] + backtrace: &'a [usize], + async_errors: u8 }, RpcRequest { async: bool }, @@ -160,14 +163,16 @@ impl<'a> Reply<'a> { writer.write_string(reason)?; }, - Reply::KernelFinished => { + Reply::KernelFinished { async_errors } => { writer.write_u8(7)?; + writer.write_u8(async_errors)?; }, Reply::KernelStartupFailed => { writer.write_u8(8)?; }, Reply::KernelException { - name, message, param, file, line, column, function, backtrace + name, message, param, file, line, column, function, backtrace, + async_errors } => { writer.write_u8(9)?; writer.write_string(name)?; @@ -183,6 +188,7 @@ impl<'a> Reply<'a> { for &addr in backtrace { writer.write_u32(addr as u32)? } + writer.write_u8(async_errors)?; }, Reply::RpcRequest { async } => { diff --git a/artiq/firmware/runtime/rtio_mgt.rs b/artiq/firmware/runtime/rtio_mgt.rs index 825900b78..1a1d1660b 100644 --- a/artiq/firmware/runtime/rtio_mgt.rs +++ b/artiq/firmware/runtime/rtio_mgt.rs @@ -326,6 +326,14 @@ pub mod drtio { pub fn reset(_io: &Io, _aux_mutex: &Mutex) {} } +static mut SEEN_ASYNC_ERRORS: u8 = 0; + +pub unsafe fn get_async_errors() -> u8 { + let mut errors = SEEN_ASYNC_ERRORS; + SEEN_ASYNC_ERRORS = 0; + errors +} + fn async_error_thread(io: Io) { loop { unsafe { @@ -343,6 +351,7 @@ fn async_error_thread(io: Io) { error!("RTIO sequence error involving channel {}", csr::rtio_core::sequence_error_channel_read()); } + SEEN_ASYNC_ERRORS = errors; csr::rtio_core::async_error_write(errors); } } diff --git a/artiq/firmware/runtime/session.rs b/artiq/firmware/runtime/session.rs index 7d0935667..260a1b385 100644 --- a/artiq/firmware/runtime/session.rs +++ b/artiq/firmware/runtime/session.rs @@ -9,6 +9,7 @@ use urc::Urc; use sched::{ThreadHandle, Io, Mutex, TcpListener, TcpStream, Error as SchedError}; use rtio_clocking; use rtio_dma::Manager as DmaManager; +use rtio_mgt::get_async_errors; use cache::Cache; use kern_hwreq; use board_artiq::drtio_routing; @@ -431,7 +432,9 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex, match stream { None => return Ok(true), Some(ref mut stream) => - host_write(stream, host::Reply::KernelFinished).map_err(|e| e.into()) + host_write(stream, host::Reply::KernelFinished { + async_errors: unsafe { get_async_errors() } + }).map_err(|e| e.into()) } } &kern::RunException { @@ -458,7 +461,8 @@ fn process_kern_message(io: &Io, aux_mutex: &Mutex, line: line, column: column, function: function, - backtrace: backtrace + backtrace: backtrace, + async_errors: unsafe { get_async_errors() } }).map_err(|e| e.into()) } } From 12512bfb2ff0b0c83a965cb8fe14a4e39b488f44 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sun, 5 Dec 2021 14:31:49 +0800 Subject: [PATCH 23/59] flake: get rid of TARGET_AR --- flake.lock | 8 ++++---- flake.nix | 2 -- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/flake.lock b/flake.lock index bf378e2eb..35c1fb29a 100644 --- a/flake.lock +++ b/flake.lock @@ -61,11 +61,11 @@ "src-misoc": { "flake": false, "locked": { - "lastModified": 1636527305, - "narHash": "sha256-/2XTejqj0Bo81HaTrlTSWwInnWwsuqnq+CURXbpIrkA=", + "lastModified": 1638683371, + "narHash": "sha256-sm2SxHmEGfE56+V+joDHMjpOaxg8+t3EJEk1d11C1E0=", "ref": "master", - "rev": "f5203e406520874e15ab5d070058ef642fc57fd9", - "revCount": 2417, + "rev": "71b74f87b41c56a6c6d767cdfde0356c15a379a7", + "revCount": 2418, "submodules": true, "type": "git", "url": "https://github.com/m-labs/misoc.git" diff --git a/flake.nix b/flake.nix index 5fa94518d..f5a29ff4a 100644 --- a/flake.nix +++ b/flake.nix @@ -270,7 +270,6 @@ ln -s $ARTIQ_PATH/firmware/Cargo.lock . cargoSetupPostUnpackHook cargoSetupPostPatchHook - export TARGET_AR=llvm-ar ${buildCommand} ''; doCheck = true; @@ -384,7 +383,6 @@ packages.x86_64-linux.vivado packages.x86_64-linux.openocd-bscanspi ]; - TARGET_AR="llvm-ar"; }; hydraJobs = { From 9e3ea4e8ef8881d4913458533aa2505a8ce1539f Mon Sep 17 00:00:00 2001 From: Leon Riesebos Date: Fri, 18 Jun 2021 17:49:20 +0200 Subject: [PATCH 24/59] coredevice: fixed type annotations for AD9910 --- artiq/coredevice/ad9910.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/artiq/coredevice/ad9910.py b/artiq/coredevice/ad9910.py index 49bfe9a90..e93d3202b 100644 --- a/artiq/coredevice/ad9910.py +++ b/artiq/coredevice/ad9910.py @@ -518,7 +518,7 @@ class AD9910: @kernel def set_mu(self, ftw: TInt32, pow_: TInt32 = 0, asf: TInt32 = 0x3fff, phase_mode: TInt32 = _PHASE_MODE_DEFAULT, - ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 0): + ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 0) -> TInt32: """Set profile 0 data in machine units. This uses machine units (FTW, POW, ASF). The frequency tuning word @@ -823,7 +823,7 @@ class AD9910: @kernel def set(self, frequency: TFloat, phase: TFloat = 0.0, amplitude: TFloat = 1.0, phase_mode: TInt32 = _PHASE_MODE_DEFAULT, - ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 0): + ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 0) -> TFloat: """Set profile 0 data in SI units. .. seealso:: :meth:`set_mu` From 7ffe4dc2e3ff9a83cf861be7c4b0ec78e4d4c772 Mon Sep 17 00:00:00 2001 From: Leon Riesebos Date: Thu, 24 Jun 2021 19:40:30 -0400 Subject: [PATCH 25/59] coredevice: set default pow for ad9912 set_mu() --- artiq/coredevice/ad9912.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/coredevice/ad9912.py b/artiq/coredevice/ad9912.py index b214b9496..cb018c103 100644 --- a/artiq/coredevice/ad9912.py +++ b/artiq/coredevice/ad9912.py @@ -156,7 +156,7 @@ class AD9912: return self.cpld.get_channel_att(self.chip_select - 4) @kernel - def set_mu(self, ftw: TInt64, pow_: TInt32): + def set_mu(self, ftw: TInt64, pow_: TInt32 = 0): """Set profile 0 data in machine units. After the SPI transfer, the shared IO update pin is pulsed to From 33a9ca26848f63db70490289087d1a46e63f54f8 Mon Sep 17 00:00:00 2001 From: Etienne Wodey Date: Wed, 8 Dec 2021 23:41:38 +0100 Subject: [PATCH 26/59] tools/file_import: use SourceFileLoader This allows loading modules from files with extensions not in importlib.machinery.SOURCE_SUFFIXES Signed-off-by: Etienne Wodey --- artiq/test/test_tools.py | 14 +++++++++++--- artiq/tools.py | 7 +++++-- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/artiq/test/test_tools.py b/artiq/test/test_tools.py index 0c24dd29e..8d104281d 100644 --- a/artiq/test/test_tools.py +++ b/artiq/test/test_tools.py @@ -1,7 +1,8 @@ from contextlib import contextmanager -import unittest +import importlib from pathlib import Path import tempfile +import unittest from artiq import tools @@ -10,13 +11,13 @@ from artiq import tools # Very simplified version of CPython's # Lib/test/test_importlib/util.py:create_modules @contextmanager -def create_modules(*names): +def create_modules(*names, extension=".py"): mapping = {} with tempfile.TemporaryDirectory() as temp_dir: mapping[".root"] = Path(temp_dir) for name in names: - file_path = Path(temp_dir) / f"{name}.py" + file_path = Path(temp_dir) / f"{name}{extension}" with file_path.open("w") as fp: print(f"_MODULE_NAME = {name!r}", file=fp) mapping[name] = file_path @@ -45,6 +46,13 @@ class TestFileImport(unittest.TestCase): self.assertEqual(mod2._M1_NAME, mod1._MODULE_NAME) + def test_can_import_not_in_source_suffixes(self): + for extension in ["", ".something"]: + self.assertNotIn(extension, importlib.machinery.SOURCE_SUFFIXES) + with create_modules(MODNAME, extension=extension) as mods: + mod = tools.file_import(str(mods[MODNAME])) + self.assertEqual(Path(mod.__file__).name, f"{MODNAME}{extension}") + class TestGetExperiment(unittest.TestCase): def test_fail_no_experiments(self): diff --git a/artiq/tools.py b/artiq/tools.py index 167f8cf74..d98059356 100644 --- a/artiq/tools.py +++ b/artiq/tools.py @@ -1,5 +1,5 @@ import asyncio -import importlib.util +import importlib import inspect import logging import os @@ -78,7 +78,10 @@ def file_import(filename, prefix="file_import_"): sys.path.insert(0, path) try: - spec = importlib.util.spec_from_file_location(modname, filename) + spec = importlib.util.spec_from_loader( + modname, + importlib.machinery.SourceFileLoader(modname, str(filename)), + ) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) finally: From 7c664142a56a3143b390720e7a8783f8b912e17e Mon Sep 17 00:00:00 2001 From: Peter Drmota <49479443+pmldrmota@users.noreply.github.com> Date: Mon, 13 Dec 2021 15:44:03 +0000 Subject: [PATCH 27/59] Simplified use of the AD9910 RAM feature (#1584) * coredevice: Change Urukul default single-tone profile to 7 This allows using the internal profile control in RAM modulation mode (which always starts to play back at profile 0) without competing for the content of the profile 0 register used in single tone mode. Signed-off-by: Peter Drmota * ad9910/set_mu: comment on caveats when setting register * ad9910: avoid unnecessary write/param Credit: Solution proposed by @pmldrmota in https://github.com/m-labs/artiq/pull/1584#issuecomment-987774353 * revert 1064fdff (`set_mu()` comments) 158a7be7 had addressed this issue. Co-authored-by: occheung --- RELEASE_NOTES.rst | 7 +++++-- artiq/coredevice/ad9910.py | 42 +++++++++++++++++++++++++++----------- artiq/coredevice/urukul.py | 2 +- 3 files changed, 36 insertions(+), 15 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 75b1d3347..7c722c936 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -84,8 +84,11 @@ Highlights: - Improved performance for kernel RPC involving list and array. * Coredevice SI to mu conversions now always return valid codes, or raise a ``ValueError``. * Zotino now exposes ``voltage_to_mu()`` -* ``ad9910``: The maximum amplitude scale factor is now ``0x3fff`` (was ``0x3ffe`` - before). +* ``ad9910``: + - The maximum amplitude scale factor is now ``0x3fff`` (was ``0x3ffe`` before). + - The default single-tone profile is now 7 (was 0). + - Added option to ``set_mu()`` that affects the ASF, FTW and POW registers + instead of the single-tone profile register. * Mirny now supports HW revision independent, human readable ``clk_sel`` parameters: "XO", "SMA", and "MMCX". Passing an integer is backwards compatible. * Dashboard: diff --git a/artiq/coredevice/ad9910.py b/artiq/coredevice/ad9910.py index e93d3202b..f0b8879d0 100644 --- a/artiq/coredevice/ad9910.py +++ b/artiq/coredevice/ad9910.py @@ -236,7 +236,7 @@ class AD9910: """ self.bus.set_config_mu(urukul.SPI_CONFIG | spi.SPI_END, 24, urukul.SPIT_DDS_WR, self.chip_select) - self.bus.write((addr << 24) | (data << 8)) + self.bus.write((addr << 24) | ((data & 0xffff) << 8)) @kernel def write32(self, addr: TInt32, data: TInt32): @@ -516,10 +516,11 @@ class AD9910: self.cpld.io_update.pulse(1 * us) @kernel - def set_mu(self, ftw: TInt32, pow_: TInt32 = 0, asf: TInt32 = 0x3fff, + def set_mu(self, ftw: TInt32 = 0, pow_: TInt32 = 0, asf: TInt32 = 0x3fff, phase_mode: TInt32 = _PHASE_MODE_DEFAULT, - ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 0) -> TInt32: - """Set profile 0 data in machine units. + ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 7, + ram_destination: TInt32 = -1) -> TInt32: + """Set DDS data in machine units. This uses machine units (FTW, POW, ASF). The frequency tuning word width is 32, the phase offset word width is 16, and the amplitude @@ -538,7 +539,13 @@ class AD9910: by :meth:`set_phase_mode` for this call. :param ref_time_mu: Fiducial time used to compute absolute or tracking phase updates. In machine units as obtained by `now_mu()`. - :param profile: Profile number to set (0-7, default: 0). + :param profile: Single tone profile number to set (0-7, default: 7). + Ineffective if `ram_destination` is specified. + :param ram_destination: RAM destination (:const:`RAM_DEST_FTW`, + :const:`RAM_DEST_POW`, :const:`RAM_DEST_ASF`, + :const:`RAM_DEST_POWASF`). If specified, write free DDS parameters + to the ASF/FTW/POW registers instead of to the single tone profile + register (default behaviour, see `profile`). :return: Resulting phase offset word after application of phase tracking offset. When using :const:`PHASE_MODE_CONTINUOUS` in subsequent calls, use this value as the "current" phase. @@ -561,8 +568,17 @@ class AD9910: # is equivalent to an output pipeline latency. dt = int32(now_mu()) - int32(ref_time_mu) pow_ += dt * ftw * self.sysclk_per_mu >> 16 - self.write64(_AD9910_REG_PROFILE0 + profile, - (asf << 16) | (pow_ & 0xffff), ftw) + if ram_destination == -1: + self.write64(_AD9910_REG_PROFILE0 + profile, + (asf << 16) | (pow_ & 0xffff), ftw) + else: + if not ram_destination == RAM_DEST_FTW: + self.set_ftw(ftw) + if not ram_destination == RAM_DEST_POWASF: + if not ram_destination == RAM_DEST_ASF: + self.set_asf(asf) + if not ram_destination == RAM_DEST_POW: + self.set_pow(pow_) delay_mu(int64(self.sync_data.io_update_delay)) self.cpld.io_update.pulse_mu(8) # assumes 8 mu > t_SYN_CCLK at_mu(now_mu() & ~7) # clear fine TSC again @@ -821,10 +837,11 @@ class AD9910: return self.pow_to_turns(self.get_pow()) @kernel - def set(self, frequency: TFloat, phase: TFloat = 0.0, + def set(self, frequency: TFloat = 0.0, phase: TFloat = 0.0, amplitude: TFloat = 1.0, phase_mode: TInt32 = _PHASE_MODE_DEFAULT, - ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 0) -> TFloat: - """Set profile 0 data in SI units. + ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 7, + ram_destination: TInt32 = -1) -> TFloat: + """Set DDS data in SI units. .. seealso:: :meth:`set_mu` @@ -833,13 +850,14 @@ class AD9910: :param amplitude: Amplitude in units of full scale :param phase_mode: Phase mode constant :param ref_time_mu: Fiducial time stamp in machine units - :param profile: Profile to affect + :param profile: Single tone profile to affect. + :param ram_destination: RAM destination. :return: Resulting phase offset in turns """ return self.pow_to_turns(self.set_mu( self.frequency_to_ftw(frequency), self.turns_to_pow(phase), self.amplitude_to_asf(amplitude), phase_mode, ref_time_mu, - profile)) + profile, ram_destination)) @kernel def get(self, profile: TInt32 = 0) -> TTuple([TFloat, TFloat, TFloat]): diff --git a/artiq/coredevice/urukul.py b/artiq/coredevice/urukul.py index 7cda7a4ce..92b951036 100644 --- a/artiq/coredevice/urukul.py +++ b/artiq/coredevice/urukul.py @@ -188,7 +188,7 @@ class CPLD: assert sync_div is None sync_div = 0 - self.cfg_reg = urukul_cfg(rf_sw=rf_sw, led=0, profile=0, + self.cfg_reg = urukul_cfg(rf_sw=rf_sw, led=0, profile=7, io_update=0, mask_nu=0, clk_sel=clk_sel, sync_sel=sync_sel, rst=0, io_rst=0, clk_div=clk_div) From 4f723e19a6bc9e84c0eb627461fe625007307504 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 14 Dec 2021 00:05:49 +0800 Subject: [PATCH 28/59] RELEASE_NOTES: update --- RELEASE_NOTES.rst | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 7c722c936..2e1390cf0 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -8,12 +8,11 @@ ARTIQ-7 Highlights: -* Support for Kasli-SoC, a new EEM carrier based on a Zynq SoC, enabling much faster kernel execution. +* New hardware support: + - Kasli-SoC, a new EEM carrier based on a Zynq SoC, enabling much faster kernel execution. + - HVAMP_8CH 8 channel HV amplifier for Fastino / Zotinos * Softcore targets now use the RISC-V architecture (VexRiscv) instead of OR1K (mor1kx). -* WRPLL -* Compiler: - - Supports kernel decorator with paths. - - Faster compilation for large arrays/lists. +* Faster compilation for large arrays/lists. * Phaser: - Improved documentation - Expose the DAC coarse mixer and ``sif_sync`` @@ -22,8 +21,6 @@ Highlights: * ``get()``, ``get_mu()``, ``get_att()``, and ``get_att_mu()`` functions added for AD9910 and AD9912 * On Kasli, the number of FIFO lanes in the scalable events dispatcher (SED) can now be configured in the JSON hardware description file. -* New hardware support: - - HVAMP_8CH 8 channel HV amplifier for Fastino / Zotino * ``artiq_ddb_template`` generates edge-counter keys that start with the key of the corresponding TTL device (e.g. ``"ttl_0_counter"`` for the edge counter on TTL device``"ttl_0"``) * ``artiq_master`` now has an ``--experiment-subdir`` option to scan only a subdirectory of the @@ -40,6 +37,9 @@ Highlights: Breaking changes: +* Due to the new RISC-V CPU, the device database entry for the core device needs to be updated. + The ``target`` parameter needs to be set to ``rv32ima`` for Kasli 1.x and to ``rv32g`` for all + other boards. Freshly generated device database templates already contain this update. * Updated Phaser-Upconverter default frequency 2.875 GHz. The new default uses the target PFD frequency of the hardware design. * ``Phaser.init()`` now disables all Kasli-oscillators. This avoids full power RF output being From 37f14d94d04490a1effa504f3b1dca73ecd60214 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 14 Dec 2021 19:07:17 +0800 Subject: [PATCH 29/59] test_dataset_db: fix for windows --- artiq/test/test_dataset_db.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/artiq/test/test_dataset_db.py b/artiq/test/test_dataset_db.py index 3d087a806..cafcf97d1 100644 --- a/artiq/test/test_dataset_db.py +++ b/artiq/test/test_dataset_db.py @@ -1,6 +1,7 @@ """Test internal dataset representation (persistence, applets)""" import unittest import tempfile +import os from artiq.master.databases import DatasetDB from sipyco import pyon @@ -11,11 +12,14 @@ KEY3 = "key3" DATA = list(range(10)) COMP = "gzip" +# tempfile.NamedTemporaryFile: +# use delete=False and manual cleanup +# for Windows compatibility class TestDatasetDB(unittest.TestCase): def setUp(self): # empty dataset persistance file - self.persist_file = tempfile.NamedTemporaryFile(mode="w+") + self.persist_file = tempfile.NamedTemporaryFile(mode="w+", delete=False) print("{}", file=self.persist_file, flush=True) self.ddb = DatasetDB(self.persist_file.name) @@ -26,6 +30,9 @@ class TestDatasetDB(unittest.TestCase): self.save_ddb_to_disk() + def tearDown(self): + os.unlink(self.persist_file.name) + def save_ddb_to_disk(self): self.ddb.save() self.persist_file.flush() From a518963a47bb830850fbf1cc0dad6bcb0930dd21 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 14 Dec 2021 19:19:22 +0800 Subject: [PATCH 30/59] test_dataset_db: disable tests broken on windows --- artiq/test/test_dataset_db.py | 51 ++++++++++++++++++----------------- 1 file changed, 27 insertions(+), 24 deletions(-) diff --git a/artiq/test/test_dataset_db.py b/artiq/test/test_dataset_db.py index cafcf97d1..fafdff3ad 100644 --- a/artiq/test/test_dataset_db.py +++ b/artiq/test/test_dataset_db.py @@ -28,33 +28,35 @@ class TestDatasetDB(unittest.TestCase): self.ddb.set(KEY2, DATA, persist=True, hdf5_options=dict(compression=COMP)) self.ddb.set(KEY3, DATA, hdf5_options=dict(shuffle=True)) - self.save_ddb_to_disk() + # broken on Windows + # self.save_ddb_to_disk() def tearDown(self): os.unlink(self.persist_file.name) - def save_ddb_to_disk(self): - self.ddb.save() - self.persist_file.flush() + # broken on Windows + # def save_ddb_to_disk(self): + # self.ddb.save() + # self.persist_file.flush() - def load_ddb_from_disk(self): - return pyon.load_file(self.persist_file.name) + # def load_ddb_from_disk(self): + # return pyon.load_file(self.persist_file.name) - def test_persist_format(self): - data = pyon.load_file(self.persist_file.name) + # def test_persist_format(self): + # data = pyon.load_file(self.persist_file.name) - for key in [KEY1, KEY2]: - self.assertTrue(data[key]["persist"]) - self.assertEqual(data[key]["value"], DATA) + # for key in [KEY1, KEY2]: + # self.assertTrue(data[key]["persist"]) + # self.assertEqual(data[key]["value"], DATA) - self.assertEqual(data[KEY2]["hdf5_options"]["compression"], COMP) - self.assertEqual(data[KEY1]["hdf5_options"], dict()) + # self.assertEqual(data[KEY2]["hdf5_options"]["compression"], COMP) + # self.assertEqual(data[KEY1]["hdf5_options"], dict()) - def test_only_persist_marked_datasets(self): - data = self.load_ddb_from_disk() + # def test_only_persist_marked_datasets(self): + # data = self.load_ddb_from_disk() - with self.assertRaises(KeyError): - data[KEY3] + # with self.assertRaises(KeyError): + # data[KEY3] def test_memory_format(self): ds = self.ddb.get(KEY2) @@ -67,16 +69,17 @@ class TestDatasetDB(unittest.TestCase): self.assertEqual(ds["value"], DATA) self.assertTrue(ds["hdf5_options"]["shuffle"]) - def test_delete(self): - self.ddb.delete(KEY1) - self.save_ddb_to_disk() + # broken on Windows + # def test_delete(self): + # self.ddb.delete(KEY1) + # self.save_ddb_to_disk() - data = self.load_ddb_from_disk() + # data = self.load_ddb_from_disk() - with self.assertRaises(KeyError): - data[KEY1] + # with self.assertRaises(KeyError): + # data[KEY1] - self.assertTrue(data[KEY2]["persist"]) + # self.assertTrue(data[KEY2]["persist"]) def test_update(self): self.assertFalse(self.ddb.get(KEY3)["persist"]) From 352317df11f64e56c71b832d2132c50c408b6121 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 14 Dec 2021 19:27:15 +0800 Subject: [PATCH 31/59] test_dataset_db: remove (too much breakage on Windows) --- artiq/test/test_dataset_db.py | 121 ---------------------------------- 1 file changed, 121 deletions(-) delete mode 100644 artiq/test/test_dataset_db.py diff --git a/artiq/test/test_dataset_db.py b/artiq/test/test_dataset_db.py deleted file mode 100644 index fafdff3ad..000000000 --- a/artiq/test/test_dataset_db.py +++ /dev/null @@ -1,121 +0,0 @@ -"""Test internal dataset representation (persistence, applets)""" -import unittest -import tempfile -import os - -from artiq.master.databases import DatasetDB -from sipyco import pyon - -KEY1 = "key1" -KEY2 = "key2" -KEY3 = "key3" -DATA = list(range(10)) -COMP = "gzip" - -# tempfile.NamedTemporaryFile: -# use delete=False and manual cleanup -# for Windows compatibility - -class TestDatasetDB(unittest.TestCase): - def setUp(self): - # empty dataset persistance file - self.persist_file = tempfile.NamedTemporaryFile(mode="w+", delete=False) - print("{}", file=self.persist_file, flush=True) - - self.ddb = DatasetDB(self.persist_file.name) - - self.ddb.set(KEY1, DATA, persist=True) - self.ddb.set(KEY2, DATA, persist=True, hdf5_options=dict(compression=COMP)) - self.ddb.set(KEY3, DATA, hdf5_options=dict(shuffle=True)) - - # broken on Windows - # self.save_ddb_to_disk() - - def tearDown(self): - os.unlink(self.persist_file.name) - - # broken on Windows - # def save_ddb_to_disk(self): - # self.ddb.save() - # self.persist_file.flush() - - # def load_ddb_from_disk(self): - # return pyon.load_file(self.persist_file.name) - - # def test_persist_format(self): - # data = pyon.load_file(self.persist_file.name) - - # for key in [KEY1, KEY2]: - # self.assertTrue(data[key]["persist"]) - # self.assertEqual(data[key]["value"], DATA) - - # self.assertEqual(data[KEY2]["hdf5_options"]["compression"], COMP) - # self.assertEqual(data[KEY1]["hdf5_options"], dict()) - - # def test_only_persist_marked_datasets(self): - # data = self.load_ddb_from_disk() - - # with self.assertRaises(KeyError): - # data[KEY3] - - def test_memory_format(self): - ds = self.ddb.get(KEY2) - self.assertTrue(ds["persist"]) - self.assertEqual(ds["value"], DATA) - self.assertEqual(ds["hdf5_options"]["compression"], COMP) - - ds = self.ddb.get(KEY3) - self.assertFalse(ds["persist"]) - self.assertEqual(ds["value"], DATA) - self.assertTrue(ds["hdf5_options"]["shuffle"]) - - # broken on Windows - # def test_delete(self): - # self.ddb.delete(KEY1) - # self.save_ddb_to_disk() - - # data = self.load_ddb_from_disk() - - # with self.assertRaises(KeyError): - # data[KEY1] - - # self.assertTrue(data[KEY2]["persist"]) - - def test_update(self): - self.assertFalse(self.ddb.get(KEY3)["persist"]) - - mod = { - "action": "setitem", - "path": [KEY3], - "key": "persist", - "value": True, - } - - self.ddb.update(mod) - self.assertTrue(self.ddb.get(KEY3)["persist"]) - - def test_update_hdf5_options(self): - with self.assertRaises(KeyError): - self.ddb.get(KEY1)["hdf5_options"]["shuffle"] - - mod = { - "action": "setitem", - "path": [KEY1, "hdf5_options"], - "key": "shuffle", - "value": False, - } - - self.ddb.update(mod) - self.assertFalse(self.ddb.get(KEY1)["hdf5_options"]["shuffle"]) - - def test_reset_copies_persist(self): - self.assertTrue(self.ddb.get(KEY1)["persist"]) - self.ddb.set(KEY1, DATA) - self.assertTrue(self.ddb.get(KEY1)["persist"]) - - self.assertFalse(self.ddb.get(KEY3)["persist"]) - self.ddb.set(KEY3, DATA) - self.assertFalse(self.ddb.get(KEY3)["persist"]) - - self.ddb.set(KEY3, DATA, persist=True) - self.assertTrue(self.ddb.get(KEY3)["persist"]) From 9e5e234af31a82d053bc6c1add9eedc8bcf1d99b Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 14 Dec 2021 20:06:38 +0800 Subject: [PATCH 32/59] stop using explicit ProactorEventLoop on Windows It is now the default in Python. --- artiq/frontend/artiq_master.py | 7 +------ artiq/test/test_scheduler.py | 6 +----- artiq/test/test_worker.py | 6 +----- 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/artiq/frontend/artiq_master.py b/artiq/frontend/artiq_master.py index ef46fb9bb..83c6615ae 100755 --- a/artiq/frontend/artiq_master.py +++ b/artiq/frontend/artiq_master.py @@ -3,7 +3,6 @@ import asyncio import argparse import atexit -import os import logging from sipyco.pc_rpc import Server as RPCServer @@ -75,11 +74,7 @@ class MasterConfig: def main(): args = get_argparser().parse_args() log_forwarder = init_log(args) - if os.name == "nt": - loop = asyncio.ProactorEventLoop() - asyncio.set_event_loop(loop) - else: - loop = asyncio.get_event_loop() + loop = asyncio.get_event_loop() atexit.register(loop.close) bind = common_args.bind_address_from_args(args) diff --git a/artiq/test/test_scheduler.py b/artiq/test/test_scheduler.py index 5a8cdb6bc..984804fcf 100644 --- a/artiq/test/test_scheduler.py +++ b/artiq/test/test_scheduler.py @@ -2,7 +2,6 @@ import unittest import logging import asyncio import sys -import os from time import time, sleep from artiq.experiment import * @@ -87,10 +86,7 @@ class _RIDCounter: class SchedulerCase(unittest.TestCase): def setUp(self): - if os.name == "nt": - self.loop = asyncio.ProactorEventLoop() - else: - self.loop = asyncio.new_event_loop() + self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) def test_steps(self): diff --git a/artiq/test/test_worker.py b/artiq/test/test_worker.py index 88e3ead82..c33daf430 100644 --- a/artiq/test/test_worker.py +++ b/artiq/test/test_worker.py @@ -2,7 +2,6 @@ import unittest import logging import asyncio import sys -import os from time import sleep from artiq.experiment import * @@ -77,10 +76,7 @@ def _run_experiment(class_name): class WorkerCase(unittest.TestCase): def setUp(self): - if os.name == "nt": - self.loop = asyncio.ProactorEventLoop() - else: - self.loop = asyncio.new_event_loop() + self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) def test_simple_run(self): From c0a7be0a908b78019efde7425f07771fee0719fd Mon Sep 17 00:00:00 2001 From: Steve Fan Date: Thu, 16 Dec 2021 16:21:16 +0800 Subject: [PATCH 33/59] llvm_ir: move stacksave before lltag alloca in build_rpc Signed-off-by: Steve Fan --- artiq/compiler/transforms/llvm_ir_generator.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/artiq/compiler/transforms/llvm_ir_generator.py b/artiq/compiler/transforms/llvm_ir_generator.py index 5f61db822..31b10d505 100644 --- a/artiq/compiler/transforms/llvm_ir_generator.py +++ b/artiq/compiler/transforms/llvm_ir_generator.py @@ -1249,13 +1249,13 @@ class LLVMIRGenerator: self.engine.process(diag) tag += ir.rpc_tag(fun_type.ret, ret_error_handler) + llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), [], + name="rpc.stack") + lltag = self.llconst_of_const(ir.Constant(tag, builtins.TStr())) lltagptr = self.llbuilder.alloca(lltag.type) self.llbuilder.store(lltag, lltagptr) - llstackptr = self.llbuilder.call(self.llbuiltin("llvm.stacksave"), [], - name="rpc.stack") - llargs = self.llbuilder.alloca(llptr, ll.Constant(lli32, len(args)), name="rpc.args") for index, arg in enumerate(args): From bbac4770926048b3c0b434aede371a63d198ecd5 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 21 Dec 2021 13:20:11 +0800 Subject: [PATCH 34/59] tools: fix importlib issue --- artiq/tools.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/artiq/tools.py b/artiq/tools.py index d98059356..1c4949a23 100644 --- a/artiq/tools.py +++ b/artiq/tools.py @@ -1,5 +1,6 @@ import asyncio -import importlib +import importlib.util +import importlib.machinery import inspect import logging import os From 9d493028e5c6f70e9c10bbf7571bbdea251f2415 Mon Sep 17 00:00:00 2001 From: occheung Date: Fri, 7 Jan 2022 16:36:28 +0800 Subject: [PATCH 35/59] gateware/suservo: write to profile 7 Fixes #1817. --- artiq/gateware/suservo/dds_ser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/gateware/suservo/dds_ser.py b/artiq/gateware/suservo/dds_ser.py index 8df30e9fe..7a53f352e 100644 --- a/artiq/gateware/suservo/dds_ser.py +++ b/artiq/gateware/suservo/dds_ser.py @@ -26,7 +26,7 @@ class DDS(spi.SPISimple): self.profile = [Signal(32 + 16 + 16, reset_less=True) for i in range(params.channels)] - cmd = Signal(8, reset=0x0e) # write to single tone profile 0 + cmd = Signal(8, reset=0x15) # write to single tone profile 7 assert params.width == len(cmd) + len(self.profile[0]) self.sync += [ From f42bea06a89ef80de4dcd4d996544a97287c969f Mon Sep 17 00:00:00 2001 From: Leon Riesebos Date: Thu, 6 Jan 2022 23:26:08 -0500 Subject: [PATCH 36/59] worker_db: removed warning for writing a dataset that is also in the archive Signed-off-by: Leon Riesebos --- RELEASE_NOTES.rst | 1 + artiq/master/worker_db.py | 5 ----- 2 files changed, 1 insertion(+), 5 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index 2e1390cf0..ceb7600d9 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -34,6 +34,7 @@ Highlights: * HDF5 options can now be passed when creating datasets with ``set_dataset``. This allows in particular to use transparent compression filters as follows: ``set_dataset(name, value, hdf5_options={"compression": "gzip"})``. +* Removed worker DB warning for writing a dataset that is also in the archive Breaking changes: diff --git a/artiq/master/worker_db.py b/artiq/master/worker_db.py index 8a2200e05..c739731a0 100644 --- a/artiq/master/worker_db.py +++ b/artiq/master/worker_db.py @@ -120,11 +120,6 @@ class DatasetManager: def set(self, key, value, broadcast=False, persist=False, archive=True, hdf5_options=None): - if key in self.archive: - logger.warning("Modifying dataset '%s' which is in archive, " - "archive will remain untouched", - key, stack_info=True) - if persist: broadcast = True From eaa1505c947c7987cdbd31c24056823c740e84e0 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Jan 2022 11:55:52 +0800 Subject: [PATCH 37/59] update documentation (#1820) --- artiq/language/environment.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/artiq/language/environment.py b/artiq/language/environment.py index ad7a165cb..819391925 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -392,11 +392,11 @@ class HasEnvironment: By default, datasets obtained by this method are archived into the output HDF5 file of the experiment. If an archived dataset is requested more - than one time (and therefore its value has potentially changed) or is - modified, a warning is emitted. + than one time or is modified, only the value at the time of the first call + is archived. This may impact reproducibility of experiments. :param archive: Set to ``False`` to prevent archival together with the run's results. - Default is ``True`` + Default is ``True``. """ try: return self.__dataset_mgr.get(key, archive) From 6f3c49528d928e144c961490e664c20064c0313c Mon Sep 17 00:00:00 2001 From: occheung Date: Mon, 10 Jan 2022 11:25:51 +0800 Subject: [PATCH 38/59] compiler: revert cabe5ac The lack of debug emitter causes #1821. --- .../compiler/transforms/llvm_ir_generator.py | 97 +++++++++++++++++++ 1 file changed, 97 insertions(+) diff --git a/artiq/compiler/transforms/llvm_ir_generator.py b/artiq/compiler/transforms/llvm_ir_generator.py index 31b10d505..1bd6ad25f 100644 --- a/artiq/compiler/transforms/llvm_ir_generator.py +++ b/artiq/compiler/transforms/llvm_ir_generator.py @@ -39,6 +39,94 @@ def memoize(generator): return memoized +class DebugInfoEmitter: + def __init__(self, llmodule): + self.llmodule = llmodule + self.llcompileunit = None + self.cache = {} + + llident = self.llmodule.add_named_metadata('llvm.ident') + llident.add(self.emit_metadata(["ARTIQ"])) + + llflags = self.llmodule.add_named_metadata('llvm.module.flags') + llflags.add(self.emit_metadata([2, "Debug Info Version", 3])) + llflags.add(self.emit_metadata([2, "Dwarf Version", 4])) + + def emit_metadata(self, operands): + def map_operand(operand): + if operand is None: + return ll.Constant(llmetadata, None) + elif isinstance(operand, str): + return ll.MetaDataString(self.llmodule, operand) + elif isinstance(operand, int): + return ll.Constant(lli32, operand) + elif isinstance(operand, (list, tuple)): + return self.emit_metadata(operand) + else: + assert isinstance(operand, ll.NamedValue) + return operand + return self.llmodule.add_metadata(list(map(map_operand, operands))) + + def emit_debug_info(self, kind, operands, is_distinct=False): + return self.llmodule.add_debug_info(kind, operands, is_distinct) + + @memoize + def emit_file(self, source_buffer): + source_dir, source_file = os.path.split(source_buffer.name) + return self.emit_debug_info("DIFile", { + "filename": source_file, + "directory": source_dir, + }) + + @memoize + def emit_compile_unit(self, source_buffer): + return self.emit_debug_info("DICompileUnit", { + "language": ll.DIToken("DW_LANG_Python"), + "file": self.emit_file(source_buffer), + "producer": "ARTIQ", + "runtimeVersion": 0, + "emissionKind": 2, # full=1, lines only=2 + }, is_distinct=True) + + @memoize + def emit_subroutine_type(self, typ): + return self.emit_debug_info("DISubroutineType", { + "types": self.emit_metadata([None]) + }) + + @memoize + def emit_subprogram(self, func, llfunc): + source_buffer = func.loc.source_buffer + + if self.llcompileunit is None: + self.llcompileunit = self.emit_compile_unit(source_buffer) + llcompileunits = self.llmodule.add_named_metadata('llvm.dbg.cu') + llcompileunits.add(self.llcompileunit) + + display_name = "{}{}".format(func.name, types.TypePrinter().name(func.type)) + return self.emit_debug_info("DISubprogram", { + "name": func.name, + "linkageName": llfunc.name, + "type": self.emit_subroutine_type(func.type), + "file": self.emit_file(source_buffer), + "line": func.loc.line(), + "unit": self.llcompileunit, + "scope": self.emit_file(source_buffer), + "scopeLine": func.loc.line(), + "isLocal": func.is_internal, + "isDefinition": True, + "variables": self.emit_metadata([]) + }, is_distinct=True) + + @memoize + def emit_loc(self, loc, scope): + return self.emit_debug_info("DILocation", { + "line": loc.line(), + "column": loc.column(), + "scope": scope + }) + + class ABILayoutInfo: """Caches DataLayout size/alignment lookup results. @@ -84,6 +172,7 @@ class LLVMIRGenerator: self.llmap = {} self.llobject_map = {} self.phis = [] + self.debug_info_emitter = DebugInfoEmitter(self.llmodule) self.empty_metadata = self.llmodule.add_metadata([]) self.quote_fail_msg = None @@ -564,6 +653,10 @@ class LLVMIRGenerator: self.llbuilder = ll.IRBuilder() llblock_map = {} + if not func.is_generated: + lldisubprogram = self.debug_info_emitter.emit_subprogram(func, self.llfunction) + self.llfunction.set_metadata('dbg', lldisubprogram) + # First, map arguments. if self.has_sret(func.type): llactualargs = self.llfunction.args[1:] @@ -583,6 +676,10 @@ class LLVMIRGenerator: for block in func.basic_blocks: self.llbuilder.position_at_end(self.llmap[block]) for insn in block.instructions: + if insn.loc is not None and not func.is_generated: + self.llbuilder.debug_metadata = \ + self.debug_info_emitter.emit_loc(insn.loc, lldisubprogram) + llinsn = getattr(self, "process_" + type(insn).__name__)(insn) assert llinsn is not None self.llmap[insn] = llinsn From 5e1847e7c17c671cfcad730bc97bb6c70b328fe4 Mon Sep 17 00:00:00 2001 From: occheung Date: Mon, 10 Jan 2022 11:28:32 +0800 Subject: [PATCH 39/59] compiler: rename `variables` to `retainedNodes` Part of the changes that was made to LLVM 6 by the time that LLVM 7 was released. LLVM commit: https://github.com/llvm/llvm-project/commit/2c864551df3945f50e1780b2f2c880ec358ae715 LLVM differential review: https://reviews.llvm.org/D45024 --- artiq/compiler/transforms/llvm_ir_generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/compiler/transforms/llvm_ir_generator.py b/artiq/compiler/transforms/llvm_ir_generator.py index 1bd6ad25f..521e0de14 100644 --- a/artiq/compiler/transforms/llvm_ir_generator.py +++ b/artiq/compiler/transforms/llvm_ir_generator.py @@ -115,7 +115,7 @@ class DebugInfoEmitter: "scopeLine": func.loc.line(), "isLocal": func.is_internal, "isDefinition": True, - "variables": self.emit_metadata([]) + "retainedNodes": self.emit_metadata([]) }, is_distinct=True) @memoize From 9f90088fa6d677370bc12cb6af55a09bc8d176a9 Mon Sep 17 00:00:00 2001 From: pca006132 Date: Thu, 6 Jan 2022 13:36:19 +0800 Subject: [PATCH 40/59] compiler: generate appropriate landingpad IR When used together with modified personality function, we got ~20% performance improvement in exception unwinding with zynq. --- artiq/compiler/ir.py | 1 + .../compiler/transforms/artiq_ir_generator.py | 2 + .../compiler/transforms/llvm_ir_generator.py | 43 +++++++++++++------ 3 files changed, 32 insertions(+), 14 deletions(-) diff --git a/artiq/compiler/ir.py b/artiq/compiler/ir.py index 3f984606f..ae786f8a8 100644 --- a/artiq/compiler/ir.py +++ b/artiq/compiler/ir.py @@ -1347,6 +1347,7 @@ class LandingPad(Terminator): def __init__(self, cleanup, name=""): super().__init__([cleanup], builtins.TException(), name) self.types = [] + self.has_cleanup = True def copy(self, mapper): self_copy = super().copy(mapper) diff --git a/artiq/compiler/transforms/artiq_ir_generator.py b/artiq/compiler/transforms/artiq_ir_generator.py index 4520ec049..5873664ed 100644 --- a/artiq/compiler/transforms/artiq_ir_generator.py +++ b/artiq/compiler/transforms/artiq_ir_generator.py @@ -718,6 +718,8 @@ class ARTIQIRGenerator(algorithm.Visitor): cleanup = self.add_block('handler.cleanup') landingpad = dispatcher.append(ir.LandingPad(cleanup)) + if not any(node.finalbody): + landingpad.has_cleanup = False handlers = [] for handler_node in node.handlers: diff --git a/artiq/compiler/transforms/llvm_ir_generator.py b/artiq/compiler/transforms/llvm_ir_generator.py index 521e0de14..1a206391e 100644 --- a/artiq/compiler/transforms/llvm_ir_generator.py +++ b/artiq/compiler/transforms/llvm_ir_generator.py @@ -1722,7 +1722,8 @@ class LLVMIRGenerator: def process_LandingPad(self, insn): # Layout on return from landing pad: {%_Unwind_Exception*, %Exception*} lllandingpadty = ll.LiteralStructType([llptr, llptr]) - lllandingpad = self.llbuilder.landingpad(lllandingpadty, cleanup=True) + lllandingpad = self.llbuilder.landingpad(lllandingpadty, + cleanup=insn.has_cleanup) llrawexn = self.llbuilder.extract_value(lllandingpad, 1) llexn = self.llbuilder.bitcast(llrawexn, self.llty_of_type(insn.type)) llexnnameptr = self.llbuilder.gep(llexn, [self.llindex(0), self.llindex(0)], @@ -1731,23 +1732,34 @@ class LLVMIRGenerator: for target, typ in insn.clauses(): if typ is None: - exnname = "" # see the comment in ksupport/eh.rs + # we use a null pointer here, similar to how cpp does it + # https://llvm.org/docs/ExceptionHandling.html#try-catch + # > If @ExcType is null, any exception matches, so the + # landingpad should always be entered. This is used for C++ + # catch-all blocks (“catch (...)”). + lllandingpad.add_clause( + ll.CatchClause( + ll.Constant(lli32, 0).inttoptr(llptr) + ) + ) else: exnname = "{}:{}".format(typ.id, typ.name) - llclauseexnname = self.llconst_of_const( - ir.Constant(exnname, builtins.TStr())) - llclauseexnnameptr = self.llmodule.globals.get("exn.{}".format(exnname)) - if llclauseexnnameptr is None: - llclauseexnnameptr = ll.GlobalVariable(self.llmodule, llclauseexnname.type, - name="exn.{}".format(exnname)) - llclauseexnnameptr.global_constant = True - llclauseexnnameptr.initializer = llclauseexnname - llclauseexnnameptr.linkage = "private" - llclauseexnnameptr.unnamed_addr = True - lllandingpad.add_clause(ll.CatchClause(llclauseexnnameptr)) + llclauseexnname = self.llconst_of_const( + ir.Constant(exnname, builtins.TStr())) + llclauseexnnameptr = self.llmodule.globals.get("exn.{}".format(exnname)) + if llclauseexnnameptr is None: + llclauseexnnameptr = ll.GlobalVariable(self.llmodule, llclauseexnname.type, + name="exn.{}".format(exnname)) + llclauseexnnameptr.global_constant = True + llclauseexnnameptr.initializer = llclauseexnname + llclauseexnnameptr.linkage = "private" + llclauseexnnameptr.unnamed_addr = True + lllandingpad.add_clause(ll.CatchClause(llclauseexnnameptr)) if typ is None: + # typ is None means that we match all exceptions, so no need to + # compare self.llbuilder.branch(self.map(target)) else: llexnlen = self.llbuilder.extract_value(llexnname, 1) @@ -1764,6 +1776,9 @@ class LLVMIRGenerator: self.llbuilder.branch(self.map(target)) if self.llbuilder.basic_block.terminator is None: - self.llbuilder.branch(self.map(insn.cleanup())) + if insn.has_cleanup: + self.llbuilder.branch(self.map(insn.cleanup())) + else: + self.llbuilder.resume(lllandingpad) return llexn From 6542b65db39e8c97ff5f0119d56adfc07e487298 Mon Sep 17 00:00:00 2001 From: pca006132 Date: Mon, 10 Jan 2022 15:21:39 +0800 Subject: [PATCH 41/59] compiler: fixed exception codegen issues --- artiq/compiler/ir.py | 8 +++++ .../compiler/transforms/artiq_ir_generator.py | 30 ++++++++++++++++--- 2 files changed, 34 insertions(+), 4 deletions(-) diff --git a/artiq/compiler/ir.py b/artiq/compiler/ir.py index ae786f8a8..92bc4fb3a 100644 --- a/artiq/compiler/ir.py +++ b/artiq/compiler/ir.py @@ -1360,6 +1360,14 @@ class LandingPad(Terminator): def cleanup(self): return self.operands[0] + def erase(self): + self.remove_from_parent() + # we should erase all clauses as well + for block in set(self.operands): + block.uses.remove(self) + block.erase() + assert not any(self.uses) + def clauses(self): return zip(self.operands[1:], self.types) diff --git a/artiq/compiler/transforms/artiq_ir_generator.py b/artiq/compiler/transforms/artiq_ir_generator.py index 5873664ed..069422d1b 100644 --- a/artiq/compiler/transforms/artiq_ir_generator.py +++ b/artiq/compiler/transforms/artiq_ir_generator.py @@ -626,6 +626,11 @@ class ARTIQIRGenerator(algorithm.Visitor): self.final_branch(raise_proxy, self.current_block) self.current_block = raise_proxy + if exn is not None: + # if we need to raise the exception in a final body, we have to + # lazy-evaluate the exception object to make sure that we generate + # it in the raise_proxy block + exn = exn() if exn is not None: assert loc is not None loc_file = ir.Constant(loc.source_buffer.name, builtins.TStr()) @@ -650,9 +655,9 @@ class ARTIQIRGenerator(algorithm.Visitor): def visit_Raise(self, node): if node.exc is not None and types.is_exn_constructor(node.exc.type): - self.raise_exn(self.alloc_exn(node.exc.type.instance), loc=self.current_loc) + self.raise_exn(lambda: self.alloc_exn(node.exc.type.instance), loc=self.current_loc) else: - self.raise_exn(self.visit(node.exc), loc=self.current_loc) + self.raise_exn(lambda: self.visit(node.exc), loc=self.current_loc) def visit_Try(self, node): dispatcher = self.add_block("try.dispatch") @@ -671,6 +676,15 @@ class ARTIQIRGenerator(algorithm.Visitor): final_targets.append(target) final_paths.append(block) + final_exn_targets = [] + final_exn_paths = [] + # raise has to be treated differently + # we cannot follow indirectbr for local access validation, so we + # have to construct the control flow explicitly + def exception_final_branch(target, block): + final_exn_targets.append(target) + final_exn_paths.append(block) + if self.break_target is not None: break_proxy = self.add_block("try.break") old_break, self.break_target = self.break_target, break_proxy @@ -714,7 +728,7 @@ class ARTIQIRGenerator(algorithm.Visitor): self.continue_target = old_continue self.return_target = old_return - old_final_branch, self.final_branch = self.final_branch, final_branch + old_final_branch, self.final_branch = self.final_branch, exception_final_branch cleanup = self.add_block('handler.cleanup') landingpad = dispatcher.append(ir.LandingPad(cleanup)) @@ -745,6 +759,14 @@ class ARTIQIRGenerator(algorithm.Visitor): # Finalize and continue after try statement. self.final_branch = old_final_branch + for (i, (target, block)) in enumerate(zip(final_exn_targets, final_exn_paths)): + finalizer = self.add_block(f"finally{i}") + self.current_block = block + self.terminate(ir.Branch(finalizer)) + self.current_block = finalizer + self.visit(node.finalbody) + self.terminate(ir.Branch(target)) + finalizer = self.add_block("finally") self.current_block = finalizer @@ -997,7 +1019,7 @@ class ARTIQIRGenerator(algorithm.Visitor): old_final_branch, self.final_branch = self.final_branch, None old_unwind, self.unwind_target = self.unwind_target, None - self.raise_exn(exn_gen(*args[1:]), loc=loc) + self.raise_exn(lambda: exn_gen(*args[1:]), loc=loc) finally: self.current_function = old_func self.current_block = old_block From 8b45f917d1010ee7f1891b18d0073fcb316b68b2 Mon Sep 17 00:00:00 2001 From: occheung Date: Thu, 6 Jan 2022 16:15:17 +0800 Subject: [PATCH 42/59] urukul: use default profile --- artiq/coredevice/ad9910.py | 24 ++++++++++++++++-------- artiq/coredevice/urukul.py | 5 ++++- 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/artiq/coredevice/ad9910.py b/artiq/coredevice/ad9910.py index f0b8879d0..801b689ca 100644 --- a/artiq/coredevice/ad9910.py +++ b/artiq/coredevice/ad9910.py @@ -7,6 +7,7 @@ from artiq.language.types import TBool, TInt32, TInt64, TFloat, TList, TTuple from artiq.coredevice import spi2 as spi from artiq.coredevice import urukul +from artiq.coredevice.urukul import DEFAULT_PROFILE # Work around ARTIQ-Python import machinery urukul_sta_pll_lock = urukul.urukul_sta_pll_lock @@ -60,6 +61,9 @@ RAM_MODE_BIDIR_RAMP = 2 RAM_MODE_CONT_BIDIR_RAMP = 3 RAM_MODE_CONT_RAMPUP = 4 +# Default profile for RAM mode +_DEFAULT_PROFILE_RAM = 0 + class SyncDataUser: def __init__(self, core, sync_delay_seed, io_update_delay): @@ -518,7 +522,8 @@ class AD9910: @kernel def set_mu(self, ftw: TInt32 = 0, pow_: TInt32 = 0, asf: TInt32 = 0x3fff, phase_mode: TInt32 = _PHASE_MODE_DEFAULT, - ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 7, + ref_time_mu: TInt64 = int64(-1), + profile: TInt32 = DEFAULT_PROFILE, ram_destination: TInt32 = -1) -> TInt32: """Set DDS data in machine units. @@ -588,13 +593,14 @@ class AD9910: return pow_ @kernel - def get_mu(self, profile: TInt32 = 0) -> TTuple([TInt32, TInt32, TInt32]): + def get_mu(self, profile: TInt32 = DEFAULT_PROFILE + ) -> TTuple([TInt32, TInt32, TInt32]): """Get the frequency tuning word, phase offset word, and amplitude scale factor. .. seealso:: :meth:`get` - :param profile: Profile number to get (0-7, default: 0) + :param profile: Profile number to get (0-7, default: 7) :return: A tuple ``(ftw, pow, asf)`` """ @@ -608,8 +614,9 @@ class AD9910: @kernel def set_profile_ram(self, start: TInt32, end: TInt32, step: TInt32 = 1, - profile: TInt32 = 0, nodwell_high: TInt32 = 0, - zero_crossing: TInt32 = 0, mode: TInt32 = 1): + profile: TInt32 = _DEFAULT_PROFILE_RAM, + nodwell_high: TInt32 = 0, zero_crossing: TInt32 = 0, + mode: TInt32 = 1): """Set the RAM profile settings. :param start: Profile start address in RAM. @@ -839,7 +846,7 @@ class AD9910: @kernel def set(self, frequency: TFloat = 0.0, phase: TFloat = 0.0, amplitude: TFloat = 1.0, phase_mode: TInt32 = _PHASE_MODE_DEFAULT, - ref_time_mu: TInt64 = int64(-1), profile: TInt32 = 7, + ref_time_mu: TInt64 = int64(-1), profile: TInt32 = DEFAULT_PROFILE, ram_destination: TInt32 = -1) -> TFloat: """Set DDS data in SI units. @@ -860,12 +867,13 @@ class AD9910: profile, ram_destination)) @kernel - def get(self, profile: TInt32 = 0) -> TTuple([TFloat, TFloat, TFloat]): + def get(self, profile: TInt32 = DEFAULT_PROFILE + ) -> TTuple([TFloat, TFloat, TFloat]): """Get the frequency, phase, and amplitude. .. seealso:: :meth:`get_mu` - :param profile: Profile number to get (0-7, default: 0) + :param profile: Profile number to get (0-7, default: 7) :return: A tuple ``(frequency, phase, amplitude)`` """ diff --git a/artiq/coredevice/urukul.py b/artiq/coredevice/urukul.py index 92b951036..2fd66bd65 100644 --- a/artiq/coredevice/urukul.py +++ b/artiq/coredevice/urukul.py @@ -52,6 +52,9 @@ CS_DDS_CH1 = 5 CS_DDS_CH2 = 6 CS_DDS_CH3 = 7 +# Default profile +DEFAULT_PROFILE = 7 + @portable def urukul_cfg(rf_sw, led, profile, io_update, mask_nu, @@ -188,7 +191,7 @@ class CPLD: assert sync_div is None sync_div = 0 - self.cfg_reg = urukul_cfg(rf_sw=rf_sw, led=0, profile=7, + self.cfg_reg = urukul_cfg(rf_sw=rf_sw, led=0, profile=DEFAULT_PROFILE, io_update=0, mask_nu=0, clk_sel=clk_sel, sync_sel=sync_sel, rst=0, io_rst=0, clk_div=clk_div) From cea0a15e1e94521ff80f24ebdeb05303fb08b4ef Mon Sep 17 00:00:00 2001 From: occheung Date: Thu, 6 Jan 2022 16:15:51 +0800 Subject: [PATCH 43/59] suservo: use default urukul profile --- artiq/gateware/suservo/dds_ser.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/artiq/gateware/suservo/dds_ser.py b/artiq/gateware/suservo/dds_ser.py index 7a53f352e..38d1f6d94 100644 --- a/artiq/gateware/suservo/dds_ser.py +++ b/artiq/gateware/suservo/dds_ser.py @@ -2,6 +2,8 @@ import logging from migen import * +from artiq.coredevice.urukul import DEFAULT_PROFILE + from . import spi @@ -26,7 +28,8 @@ class DDS(spi.SPISimple): self.profile = [Signal(32 + 16 + 16, reset_less=True) for i in range(params.channels)] - cmd = Signal(8, reset=0x15) # write to single tone profile 7 + # write to single tone default profile + cmd = Signal(8, reset=0x0e + DEFAULT_PROFILE) assert params.width == len(cmd) + len(self.profile[0]) self.sync += [ From 61349f968564750ec9dcd91817b814031b1adce4 Mon Sep 17 00:00:00 2001 From: occheung Date: Mon, 10 Jan 2022 17:17:01 +0800 Subject: [PATCH 44/59] sinara_tester: fix outdated API --- artiq/frontend/artiq_sinara_tester.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/artiq/frontend/artiq_sinara_tester.py b/artiq/frontend/artiq_sinara_tester.py index 2a0392eec..702f22efb 100755 --- a/artiq/frontend/artiq_sinara_tester.py +++ b/artiq/frontend/artiq_sinara_tester.py @@ -115,11 +115,10 @@ class SinaraTester(EnvExperiment): del self.ttl_outs[io_update_device] # check for suservos and delete respective urukuls elif (module, cls) == ("artiq.coredevice.suservo", "SUServo"): - del self.urukuls[desc["arguments"]["dds0_device"]] - del self.urukul_cplds[desc["arguments"]["cpld0_device"]] - if "dds1_device" in desc["arguments"]: - del self.urukuls[desc["arguments"]["dds1_device"]] - del self.urukul_cplds[desc["arguments"]["cpld1_device"]] + for cpld in desc["arguments"]["cpld_devices"]: + del self.urukul_cplds[cpld] + for dds in desc["arguments"]["dds_devices"]: + del self.urukuls[dds] elif (module, cls) == ("artiq.coredevice.sampler", "Sampler"): cnv_device = desc["arguments"]["cnv_device"] del self.ttl_outs[cnv_device] @@ -597,8 +596,8 @@ class SinaraTester(EnvExperiment): delay(10*us) # DDS attenuator 10dB for i in range(4): - channel.cpld0.set_att(i, 10.) - channel.cpld1.set_att(i, 10.) + for cpld in channel.cplds: + cpld.set_att(i, 10.) delay(1*us) # Servo is done and disabled assert channel.get_status() & 0xff == 2 From 12ee326fb4ab2ff9ea8338f4461ed8b35de3307e Mon Sep 17 00:00:00 2001 From: pca006132 Date: Mon, 10 Jan 2022 19:48:57 +0800 Subject: [PATCH 45/59] firmware: fixed personality function --- artiq/firmware/ksupport/eh_artiq.rs | 4 +- artiq/firmware/libeh/dwarf.rs | 159 ++++++++++++++++++++++++---- artiq/firmware/libeh/eh_rust.rs | 2 +- artiq/test/libartiq_support/lib.rs | 4 + 4 files changed, 145 insertions(+), 24 deletions(-) diff --git a/artiq/firmware/ksupport/eh_artiq.rs b/artiq/firmware/ksupport/eh_artiq.rs index 86473e2ba..82be0aa4e 100644 --- a/artiq/firmware/ksupport/eh_artiq.rs +++ b/artiq/firmware/ksupport/eh_artiq.rs @@ -88,7 +88,9 @@ pub extern fn personality(version: c_int, let exception_info = &mut *(uw_exception as *mut ExceptionInfo); let exception = &exception_info.exception.unwrap(); - let eh_action = match dwarf::find_eh_action(lsda, &eh_context) { + let name_ptr = exception.name.as_ptr(); + let len = exception.name.len(); + let eh_action = match dwarf::find_eh_action(lsda, &eh_context, name_ptr, len) { Ok(action) => action, Err(_) => return uw::_URC_FATAL_PHASE1_ERROR, }; diff --git a/artiq/firmware/libeh/dwarf.rs b/artiq/firmware/libeh/dwarf.rs index 4dfc04a67..f70290c46 100644 --- a/artiq/firmware/libeh/dwarf.rs +++ b/artiq/firmware/libeh/dwarf.rs @@ -12,6 +12,7 @@ #![allow(unused)] use core::mem; +use cslice::CSlice; pub const DW_EH_PE_omit: u8 = 0xFF; pub const DW_EH_PE_absptr: u8 = 0x00; @@ -63,6 +64,10 @@ impl DwarfReader { result } + pub unsafe fn offset(&mut self, offset: isize) { + self.ptr = self.ptr.offset(offset); + } + // ULEB128 and SLEB128 encodings are defined in Section 7.6 - "Variable // Length Data". pub unsafe fn read_uleb128(&mut self) -> u64 { @@ -104,11 +109,20 @@ unsafe fn read_encoded_pointer( reader: &mut DwarfReader, context: &EHContext<'_>, encoding: u8, +) -> Result { + read_encoded_pointer_with_base(reader, encoding, get_base(encoding, context)?) +} + +unsafe fn read_encoded_pointer_with_base( + reader: &mut DwarfReader, + encoding: u8, + base: usize, ) -> Result { if encoding == DW_EH_PE_omit { return Err(()); } + let original_ptr = reader.ptr; // DW_EH_PE_aligned implies it's an absolute pointer value if encoding == DW_EH_PE_aligned { reader.ptr = round_up(reader.ptr as usize, mem::size_of::())? as *const u8; @@ -128,19 +142,10 @@ unsafe fn read_encoded_pointer( _ => return Err(()), }; - result += match encoding & 0x70 { - DW_EH_PE_absptr => 0, - // relative to address of the encoded value, despite the name - DW_EH_PE_pcrel => reader.ptr as usize, - DW_EH_PE_funcrel => { - if context.func_start == 0 { - return Err(()); - } - context.func_start - } - DW_EH_PE_textrel => (*context.get_text_start)(), - DW_EH_PE_datarel => (*context.get_data_start)(), - _ => return Err(()), + result += if (encoding & 0x70) == DW_EH_PE_pcrel { + original_ptr as usize + } else { + base }; if encoding & DW_EH_PE_indirect != 0 { @@ -150,6 +155,7 @@ unsafe fn read_encoded_pointer( Ok(result) } +#[derive(Debug)] pub enum EHAction { None, Cleanup(usize), @@ -159,7 +165,46 @@ pub enum EHAction { pub const USING_SJLJ_EXCEPTIONS: bool = cfg!(all(target_os = "ios", target_arch = "arm")); -pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) -> Result { +fn size_of_encoded_value(encoding: u8) -> usize { + if encoding == DW_EH_PE_omit { + 0 + } else { + let encoding = encoding & 0x07; + match encoding { + DW_EH_PE_absptr => core::mem::size_of::<*const ()>(), + DW_EH_PE_udata2 => 2, + DW_EH_PE_udata4 => 4, + DW_EH_PE_udata8 => 8, + _ => unreachable!(), + } + } +} + +unsafe fn get_ttype_entry( + offset: usize, + encoding: u8, + ttype_base: usize, + ttype: *const u8, +) -> Result<*const u8, ()> { + let i = (offset * size_of_encoded_value(encoding)) as isize; + read_encoded_pointer_with_base( + &mut DwarfReader::new(ttype.offset(-i)), + // the DW_EH_PE_pcrel is a hack. + // It seems that the default encoding is absolute, but we have to take reallocation into + // account. Unsure if we can fix this in the compiler setting or if this would be affected + // by updating the compiler + encoding, + ttype_base, + ) + .map(|v| v as *const u8) +} + +pub unsafe fn find_eh_action( + lsda: *const u8, + context: &EHContext<'_>, + name: *const u8, + len: usize, +) -> Result { if lsda.is_null() { return Ok(EHAction::None); } @@ -176,10 +221,14 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) -> Result }; let ttype_encoding = reader.read::(); - if ttype_encoding != DW_EH_PE_omit { - // Rust doesn't analyze exception types, so we don't care about the type table - reader.read_uleb128(); - } + // we do care about the type table + let ttype_offset = if ttype_encoding != DW_EH_PE_omit { + reader.read_uleb128() + } else { + 0 + }; + let ttype_base = get_base(ttype_encoding, context).unwrap_or(0); + let ttype_table = reader.ptr.offset(ttype_offset as isize); let call_site_encoding = reader.read::(); let call_site_table_length = reader.read_uleb128(); @@ -198,11 +247,62 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) -> Result break; } if ip < func_start + cs_start + cs_len { + // https://github.com/gcc-mirror/gcc/blob/master/libstdc%2B%2B-v3/libsupc%2B%2B/eh_personality.cc#L528 + let lpad = lpad_base + cs_lpad; if cs_lpad == 0 { + // no cleanups/handler return Ok(EHAction::None); + } else if cs_action == 0 { + return Ok(EHAction::Cleanup(lpad)); } else { - let lpad = lpad_base + cs_lpad; - return Ok(interpret_cs_action(cs_action, lpad)); + let mut saw_cleanup = false; + let mut action_record = action_table.offset(cs_action as isize - 1); + loop { + let mut reader = DwarfReader::new(action_record); + let ar_filter = reader.read_sleb128(); + action_record = reader.ptr; + let ar_disp = reader.read_sleb128(); + if ar_filter == 0 { + saw_cleanup = true; + } else if ar_filter > 0 { + let catch_type = get_ttype_entry( + ar_filter as usize, + ttype_encoding, + ttype_base, + ttype_table, + )?; + if (catch_type as *const CSlice).is_null() { + return Ok(EHAction::Catch(lpad)); + } + // this seems to be target dependent + let clause_ptr = *(catch_type as *const CSlice); + let clause_name_ptr = (clause_ptr).as_ptr(); + let clause_name_len = (clause_ptr).len(); + if clause_name_len == len { + if (clause_name_ptr == core::ptr::null() || + clause_name_ptr == name || + // somehow their name pointers might differ, but the content is the + // same + core::slice::from_raw_parts(clause_name_ptr, clause_name_len) == + core::slice::from_raw_parts(name, len)) + { + return Ok(EHAction::Catch(lpad)); + } + } + } else if ar_filter < 0 { + // FIXME: how to handle this? + break; + } + if ar_disp == 0 { + break; + } + action_record = action_record.offset((ar_disp as usize) as isize); + } + if saw_cleanup { + return Ok(EHAction::Cleanup(lpad)); + } else { + return Ok(EHAction::None); + } } } } @@ -210,7 +310,7 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) -> Result // So rather than returning EHAction::Terminate, we do this. Ok(EHAction::None) } else { - // SjLj version: + // SjLj version: (not yet modified) // The "IP" is an index into the call-site table, with two exceptions: // -1 means 'no-action', and 0 means 'terminate'. match ip as isize { @@ -246,5 +346,20 @@ fn interpret_cs_action(cs_action: u64, lpad: usize) -> EHAction { #[inline] fn round_up(unrounded: usize, align: usize) -> Result { - if align.is_power_of_two() { Ok((unrounded + align - 1) & !(align - 1)) } else { Err(()) } + if align.is_power_of_two() { + Ok((unrounded + align - 1) & !(align - 1)) + } else { + Err(()) + } } + +fn get_base(encoding: u8, context: &EHContext<'_>) -> Result { + match encoding & 0x70 { + DW_EH_PE_absptr | DW_EH_PE_pcrel | DW_EH_PE_aligned => Ok(0), + DW_EH_PE_textrel => Ok((*context.get_text_start)()), + DW_EH_PE_datarel => Ok((*context.get_data_start)()), + DW_EH_PE_funcrel if context.func_start != 0 => Ok(context.func_start), + _ => return Err(()), + } +} + diff --git a/artiq/firmware/libeh/eh_rust.rs b/artiq/firmware/libeh/eh_rust.rs index 166aae843..43e17eeac 100644 --- a/artiq/firmware/libeh/eh_rust.rs +++ b/artiq/firmware/libeh/eh_rust.rs @@ -87,5 +87,5 @@ unsafe fn find_eh_action(context: *mut uw::_Unwind_Context) -> Result usize { self.len as usize } + + pub fn as_ptr(&self) -> *const T { + self.base + } } impl<'a, T> AsRef<[T]> for CSlice<'a, T> { From 4e3e0d129c84d5336e5a54c9a5eb3fd965341912 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 11 Jan 2022 09:31:26 +0800 Subject: [PATCH 46/59] firmware: fix compilation warning --- artiq/firmware/runtime/rtio_mgt.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/firmware/runtime/rtio_mgt.rs b/artiq/firmware/runtime/rtio_mgt.rs index 1a1d1660b..2869cf744 100644 --- a/artiq/firmware/runtime/rtio_mgt.rs +++ b/artiq/firmware/runtime/rtio_mgt.rs @@ -329,7 +329,7 @@ pub mod drtio { static mut SEEN_ASYNC_ERRORS: u8 = 0; pub unsafe fn get_async_errors() -> u8 { - let mut errors = SEEN_ASYNC_ERRORS; + let errors = SEEN_ASYNC_ERRORS; SEEN_ASYNC_ERRORS = 0; errors } From 095fb9e3332ded9013d9872c3f855cf725141653 Mon Sep 17 00:00:00 2001 From: Spaqin Date: Tue, 11 Jan 2022 09:55:39 +0800 Subject: [PATCH 47/59] add Almazny support (#1780) --- RELEASE_NOTES.rst | 1 + .../coredevice/coredevice_generic.schema.json | 4 + artiq/coredevice/mirny.py | 117 +++++++++++++++++- artiq/frontend/artiq_ddb_template.py | 12 ++ artiq/frontend/artiq_sinara_tester.py | 67 +++++++++- 5 files changed, 198 insertions(+), 3 deletions(-) diff --git a/RELEASE_NOTES.rst b/RELEASE_NOTES.rst index ceb7600d9..a97f43d5e 100644 --- a/RELEASE_NOTES.rst +++ b/RELEASE_NOTES.rst @@ -11,6 +11,7 @@ Highlights: * New hardware support: - Kasli-SoC, a new EEM carrier based on a Zynq SoC, enabling much faster kernel execution. - HVAMP_8CH 8 channel HV amplifier for Fastino / Zotinos + - Almazny mezzanine board for Mirny * Softcore targets now use the RISC-V architecture (VexRiscv) instead of OR1K (mor1kx). * Faster compilation for large arrays/lists. * Phaser: diff --git a/artiq/coredevice/coredevice_generic.schema.json b/artiq/coredevice/coredevice_generic.schema.json index 274d7e2aa..f5d096788 100644 --- a/artiq/coredevice/coredevice_generic.schema.json +++ b/artiq/coredevice/coredevice_generic.schema.json @@ -409,6 +409,10 @@ } ], "default": 0 + }, + "almazny": { + "type": "boolean", + "default": false } }, "required": ["ports"] diff --git a/artiq/coredevice/mirny.py b/artiq/coredevice/mirny.py index ddcdd1931..70daf1f61 100644 --- a/artiq/coredevice/mirny.py +++ b/artiq/coredevice/mirny.py @@ -31,6 +31,16 @@ WE = 1 << 24 # supported CPLD code version PROTO_REV_MATCH = 0x0 +# almazny-specific data +ALMAZNY_REG_BASE = 0x0C +ALMAZNY_OE_SHIFT = 12 + +# higher SPI write divider to match almazny shift register timing +# min SER time before SRCLK rise = 125ns +# -> div=32 gives 125ns for data before clock rise +# works at faster dividers too but could be less reliable +ALMAZNY_SPIT_WR = 32 + class Mirny: """ @@ -132,11 +142,114 @@ class Mirny: self.bus.write(((channel | 8) << 25) | (att << 16)) @kernel - def write_ext(self, addr, length, data): + def write_ext(self, addr, length, data, ext_div=SPIT_WR): """Perform SPI write to a prefixed address""" self.bus.set_config_mu(SPI_CONFIG, 8, SPIT_WR, SPI_CS) self.bus.write(addr << 25) - self.bus.set_config_mu(SPI_CONFIG | spi.SPI_END, length, SPIT_WR, SPI_CS) + self.bus.set_config_mu(SPI_CONFIG | spi.SPI_END, length, ext_div, SPI_CS) if length < 32: data <<= 32 - length self.bus.write(data) + + +class Almazny: + """ + Almazny (High frequency mezzanine board for Mirny) + + :param host_mirny - Mirny device Almazny is connected to + """ + + def __init__(self, dmgr, host_mirny): + self.mirny_cpld = dmgr.get(host_mirny) + self.att_mu = [0x3f] * 4 + self.channel_sw = [0] * 4 + self.output_enable = False + + @kernel + def init(self): + self.output_toggle(self.output_enable) + + @kernel + def att_to_mu(self, att): + """ + Convert an attenuator setting in dB to machine units. + + :param att: attenuator setting in dB [0-31.5] + :return: attenuator setting in machine units + """ + mu = round(att * 2.0) + if mu > 63 or mu < 0: + raise ValueError("Invalid Almazny attenuator settings!") + return mu + + @kernel + def mu_to_att(self, att_mu): + """ + Convert a digital attenuator setting to dB. + + :param att_mu: attenuator setting in machine units + :return: attenuator setting in dB + """ + return att_mu / 2 + + @kernel + def set_att(self, channel, att, rf_switch=True): + """ + Sets attenuators on chosen shift register (channel). + :param channel - index of the register [0-3] + :param att_mu - attenuation setting in dBm [0-31.5] + :param rf_switch - rf switch (bool) + """ + self.set_att_mu(channel, self.att_to_mu(att), rf_switch) + + @kernel + def set_att_mu(self, channel, att_mu, rf_switch=True): + """ + Sets attenuators on chosen shift register (channel). + :param channel - index of the register [0-3] + :param att_mu - attenuation setting in machine units [0-63] + :param rf_switch - rf switch (bool) + """ + self.channel_sw[channel] = 1 if rf_switch else 0 + self.att_mu[channel] = att_mu + self._update_register(channel) + + @kernel + def output_toggle(self, oe): + """ + Toggles output on all shift registers on or off. + :param oe - toggle output enable (bool) + """ + self.output_enable = oe + cfg_reg = self.mirny_cpld.read_reg(1) + en = 1 if self.output_enable else 0 + delay(100 * us) + new_reg = (en << ALMAZNY_OE_SHIFT) | (cfg_reg & 0x3FF) + self.mirny_cpld.write_reg(1, new_reg) + delay(100 * us) + + @kernel + def _flip_mu_bits(self, mu): + # in this form MSB is actually 0.5dB attenuator + # unnatural for users, so we flip the six bits + return (((mu & 0x01) << 5) + | ((mu & 0x02) << 3) + | ((mu & 0x04) << 1) + | ((mu & 0x08) >> 1) + | ((mu & 0x10) >> 3) + | ((mu & 0x20) >> 5)) + + @kernel + def _update_register(self, ch): + self.mirny_cpld.write_ext( + ALMAZNY_REG_BASE + ch, + 8, + self._flip_mu_bits(self.att_mu[ch]) | (self.channel_sw[ch] << 6), + ALMAZNY_SPIT_WR + ) + delay(100 * us) + + @kernel + def _update_all_registers(self): + for i in range(4): + self._update_register(i) \ No newline at end of file diff --git a/artiq/frontend/artiq_ddb_template.py b/artiq/frontend/artiq_ddb_template.py index 0a14a06be..b24f63315 100755 --- a/artiq/frontend/artiq_ddb_template.py +++ b/artiq/frontend/artiq_ddb_template.py @@ -294,6 +294,18 @@ class PeripheralManager: name=mirny_name, refclk=peripheral["refclk"], clk_sel=clk_sel) + almazny = peripheral.get("almazny", False) + if almazny: + self.gen(""" + device_db["{name}_almazny"] = {{ + "type": "local", + "module": "artiq.coredevice.mirny", + "class": "Almazny", + "arguments": {{ + "host_mirny": "{name}_cpld", + }}, + }}""", + name=mirny_name) return next(channel) diff --git a/artiq/frontend/artiq_sinara_tester.py b/artiq/frontend/artiq_sinara_tester.py index 702f22efb..31f947631 100755 --- a/artiq/frontend/artiq_sinara_tester.py +++ b/artiq/frontend/artiq_sinara_tester.py @@ -59,6 +59,7 @@ class SinaraTester(EnvExperiment): self.mirnies = dict() self.suservos = dict() self.suschannels = dict() + self.almaznys = dict() ddb = self.get_device_db() for name, desc in ddb.items(): @@ -96,6 +97,8 @@ class SinaraTester(EnvExperiment): self.suservos[name] = self.get_device(name) elif (module, cls) == ("artiq.coredevice.suservo", "Channel"): self.suschannels[name] = self.get_device(name) + elif (module, cls) == ("artiq.coredevice.mirny", "Almazny"): + self.almaznys[name] = self.get_device(name) # Remove Urukul, Sampler, Zotino and Mirny control signals # from TTL outs (tested separately) and remove Urukuls covered by @@ -351,6 +354,68 @@ class SinaraTester(EnvExperiment): for channel in channels: channel.pulse(100*ms) delay(100*ms) + @kernel + def init_almazny(self, almazny): + self.core.break_realtime() + almazny.init() + almazny.output_toggle(True) + + @kernel + def almazny_set_attenuators_mu(self, almazny, ch, atts): + self.core.break_realtime() + almazny.set_att_mu(ch, atts) + + @kernel + def almazny_set_attenuators(self, almazny, ch, atts): + self.core.break_realtime() + almazny.set_att(ch, atts) + + @kernel + def almazny_toggle_output(self, almazny, rf_on): + self.core.break_realtime() + almazny.output_toggle(rf_on) + + def test_almaznys(self): + print("*** Testing Almaznys.") + for name, almazny in sorted(self.almaznys.items(), key=lambda x: x[0]): + print(name + "...") + print("Initializing Mirny CPLDs...") + for name, cpld in sorted(self.mirny_cplds.items(), key=lambda x: x[0]): + print(name + "...") + self.init_mirny(cpld) + print("...done") + + print("Testing attenuators. Frequencies:") + for card_n, channels in enumerate(chunker(self.mirnies, 4)): + for channel_n, (channel_name, channel_dev) in enumerate(channels): + frequency = 2000 + card_n * 250 + channel_n * 50 + print("{}\t{}MHz".format(channel_name, frequency*2)) + self.setup_mirny(channel_dev, frequency) + print("{} info: {}".format(channel_name, channel_dev.info())) + self.init_almazny(almazny) + print("RF ON, all attenuators ON. Press ENTER when done.") + for i in range(4): + self.almazny_set_attenuators_mu(almazny, i, 63) + input() + print("RF ON, half power attenuators ON. Press ENTER when done.") + for i in range(4): + self.almazny_set_attenuators(almazny, i, 15.5) + input() + print("RF ON, all attenuators OFF. Press ENTER when done.") + for i in range(4): + self.almazny_set_attenuators(almazny, i, 0) + input() + print("SR outputs are OFF. Press ENTER when done.") + self.almazny_toggle_output(almazny, False) + input() + print("RF ON, all attenuators are ON. Press ENTER when done.") + for i in range(4): + self.almazny_set_attenuators(almazny, i, 31.5) + self.almazny_toggle_output(almazny, True) + input() + print("RF OFF. Press ENTER when done.") + self.almazny_toggle_output(almazny, False) + input() def test_mirnies(self): print("*** Testing Mirny PLLs.") @@ -365,7 +430,7 @@ class SinaraTester(EnvExperiment): print("Frequencies:") for card_n, channels in enumerate(chunker(self.mirnies, 4)): for channel_n, (channel_name, channel_dev) in enumerate(channels): - frequency = 1000*(card_n + 1) + channel_n * 100 + 8 # Extra 8 Hz for easier observation + frequency = 1000*(card_n + 1) + channel_n * 100 print("{}\t{}MHz".format(channel_name, frequency)) self.setup_mirny(channel_dev, frequency) print("{} info: {}".format(channel_name, channel_dev.info())) From d7dd75e833fc99ad75e10220a5ec8f03d64ee5bd Mon Sep 17 00:00:00 2001 From: Steve Fan Date: Tue, 11 Jan 2022 10:12:40 +0800 Subject: [PATCH 48/59] comm_kernel: fix off-by-one error for numeric value range check --- artiq/coredevice/comm_kernel.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/artiq/coredevice/comm_kernel.py b/artiq/coredevice/comm_kernel.py index 1b0111c49..df080d2d1 100644 --- a/artiq/coredevice/comm_kernel.py +++ b/artiq/coredevice/comm_kernel.py @@ -437,12 +437,12 @@ class CommKernel: self._write_bool(value) elif tag == "i": check(isinstance(value, (int, numpy.int32)) and - (-2**31 < value < 2**31-1), + (-2**31 <= value < 2**31), lambda: "32-bit int") self._write_int32(value) elif tag == "I": check(isinstance(value, (int, numpy.int32, numpy.int64)) and - (-2**63 < value < 2**63-1), + (-2**63 <= value < 2**63), lambda: "64-bit int") self._write_int64(value) elif tag == "f": @@ -451,8 +451,8 @@ class CommKernel: self._write_float64(value) elif tag == "F": check(isinstance(value, Fraction) and - (-2**63 < value.numerator < 2**63-1) and - (-2**63 < value.denominator < 2**63-1), + (-2**63 <= value.numerator < 2**63) and + (-2**63 <= value.denominator < 2**63), lambda: "64-bit Fraction") self._write_int64(value.numerator) self._write_int64(value.denominator) From 9eee0e5a7b2bfabf70307ba0e1999cd6263a8e97 Mon Sep 17 00:00:00 2001 From: occheung Date: Tue, 11 Jan 2022 14:20:44 +0800 Subject: [PATCH 49/59] gateware/suservo: fix profile no. in test Follow-up/Test update for 9d49302. --- artiq/gateware/test/suservo/test_dds.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/gateware/test/suservo/test_dds.py b/artiq/gateware/test/suservo/test_dds.py index e8c221f51..bef67d430 100644 --- a/artiq/gateware/test/suservo/test_dds.py +++ b/artiq/gateware/test/suservo/test_dds.py @@ -79,7 +79,7 @@ def main(): data = [] run_simulation(tb, [tb.log(data), run(tb)], vcd_name="dds.vcd") - assert data[-1][1] == [[0xe, 0x40 | i, 0x30 | i, 0x20 | i] for i in + assert data[-1][1] == [[0x15, 0x40 | i, 0x30 | i, 0x20 | i] for i in range(4)] From 4eee49f8891bcf3c5efbe3e5b046b8cea2207908 Mon Sep 17 00:00:00 2001 From: Peter Drmota <49479443+pmldrmota@users.noreply.github.com> Date: Tue, 11 Jan 2022 17:15:31 +0800 Subject: [PATCH 50/59] gateware.test.suservo: Fix tests for python >=3.7 Closes #1748 --- artiq/gateware/test/suservo/test_adc.py | 10 +++++----- artiq/gateware/test/suservo/test_dds.py | 6 ++++-- artiq/gateware/test/suservo/test_servo.py | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/artiq/gateware/test/suservo/test_adc.py b/artiq/gateware/test/suservo/test_adc.py index 3b31b0708..37b23305f 100644 --- a/artiq/gateware/test/suservo/test_adc.py +++ b/artiq/gateware/test/suservo/test_adc.py @@ -43,7 +43,7 @@ class TB(Module): ) ] cnv_old = Signal(reset_less=True) - self.sync.async += [ + self.sync.async_ += [ cnv_old.eq(self.cnv), If(Cat(cnv_old, self.cnv) == 0b10, sr.eq(Cat(reversed(self.data[2*i:2*i + 2]))), @@ -62,7 +62,7 @@ class TB(Module): def _dly(self, sig, n=0): n += self.params.t_rtt*4//2 # t_{sys,adc,ret}/t_async half rtt dly = Signal(n, reset_less=True) - self.sync.async += dly.eq(Cat(sig, dly)) + self.sync.async_ += dly.eq(Cat(sig, dly)) return dly[-1] @@ -85,8 +85,8 @@ def main(): assert not (yield dut.done) while not (yield dut.done): yield - x = (yield from [(yield d) for d in dut.data]) - for i, ch in enumerate(x): + for i, d in enumerate(dut.data): + ch = yield d assert ch == i, (hex(ch), hex(i)) run_simulation(tb, [run(tb)], @@ -95,7 +95,7 @@ def main(): "sys": (8, 0), "adc": (8, 0), "ret": (8, 0), - "async": (2, 0), + "async_": (2, 0), }, ) diff --git a/artiq/gateware/test/suservo/test_dds.py b/artiq/gateware/test/suservo/test_dds.py index bef67d430..a666f14c5 100644 --- a/artiq/gateware/test/suservo/test_dds.py +++ b/artiq/gateware/test/suservo/test_dds.py @@ -44,8 +44,10 @@ class TB(Module): yield dat = [] for dds in self.ddss: - v = yield from [(yield getattr(dds, k)) - for k in "cmd ftw pow asf".split()] + v = [] + for k in "cmd ftw pow asf".split(): + f = yield getattr(dds, k) + v.append(f) dat.append(v) data.append((i, dat)) else: diff --git a/artiq/gateware/test/suservo/test_servo.py b/artiq/gateware/test/suservo/test_servo.py index c28557d89..cc1a73a2b 100644 --- a/artiq/gateware/test/suservo/test_servo.py +++ b/artiq/gateware/test/suservo/test_servo.py @@ -91,7 +91,7 @@ def main(): "sys": (8, 0), "adc": (8, 0), "ret": (8, 0), - "async": (2, 0), + "async_": (2, 0), }) From de5892a00a946f62e4b61b1d8460bf97bbb1160c Mon Sep 17 00:00:00 2001 From: Steve Fan <19037626d@connect.polyu.hk> Date: Tue, 11 Jan 2022 17:16:45 +0800 Subject: [PATCH 51/59] comm_kernel: check if elements are within bounds for RPC list (#1824) --- artiq/coredevice/comm_kernel.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/artiq/coredevice/comm_kernel.py b/artiq/coredevice/comm_kernel.py index df080d2d1..f566c5ff8 100644 --- a/artiq/coredevice/comm_kernel.py +++ b/artiq/coredevice/comm_kernel.py @@ -476,11 +476,19 @@ class CommKernel: if tag_element == "b": self._write(bytes(value)) elif tag_element == "i": - self._write(struct.pack(self.endian + "%sl" % - len(value), *value)) + try: + self._write(struct.pack(self.endian + "%sl" % len(value), *value)) + except struct.error: + raise RPCReturnValueError( + "type mismatch: cannot serialize {value} as {type}".format( + value=repr(value), type="32-bit integer list")) elif tag_element == "I": - self._write(struct.pack(self.endian + "%sq" % - len(value), *value)) + try: + self._write(struct.pack(self.endian + "%sq" % len(value), *value)) + except struct.error: + raise RPCReturnValueError( + "type mismatch: cannot serialize {value} as {type}".format( + value=repr(value), type="64-bit integer list")) elif tag_element == "f": self._write(struct.pack(self.endian + "%sd" % len(value), *value)) From b6c59a0cb31298d15697ec48429619368aa74617 Mon Sep 17 00:00:00 2001 From: occheung Date: Tue, 11 Jan 2022 17:32:12 +0800 Subject: [PATCH 52/59] update misoc dependencies Suppress warning when compiling libunwind. https://github.com/m-labs/misoc/commit/7242dc5a41732135425acc4871487461dfae6c66 --- flake.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/flake.lock b/flake.lock index 35c1fb29a..6647c1a02 100644 --- a/flake.lock +++ b/flake.lock @@ -61,11 +61,11 @@ "src-misoc": { "flake": false, "locked": { - "lastModified": 1638683371, - "narHash": "sha256-sm2SxHmEGfE56+V+joDHMjpOaxg8+t3EJEk1d11C1E0=", + "lastModified": 1641889368, + "narHash": "sha256-0Ai25lry9ju1HxFmfMRNKG8mamBqvw+kvDfpuK8Dtjo=", "ref": "master", - "rev": "71b74f87b41c56a6c6d767cdfde0356c15a379a7", - "revCount": 2418, + "rev": "7242dc5a41732135425acc4871487461dfae6c66", + "revCount": 2419, "submodules": true, "type": "git", "url": "https://github.com/m-labs/misoc.git" From 3f812c4c2c1c322b28a68b3e985006631db872cd Mon Sep 17 00:00:00 2001 From: Steve Fan <19037626d@connect.polyu.hk> Date: Wed, 12 Jan 2022 15:23:37 +0800 Subject: [PATCH 53/59] comm_kernel: fix RPC exception handling (#1801) --- artiq/coredevice/comm_kernel.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/artiq/coredevice/comm_kernel.py b/artiq/coredevice/comm_kernel.py index f566c5ff8..37ffd2aeb 100644 --- a/artiq/coredevice/comm_kernel.py +++ b/artiq/coredevice/comm_kernel.py @@ -563,14 +563,6 @@ class CommKernel: try: result = service(*args, **kwargs) - logger.debug("rpc service: %d %r %r = %r", - service_id, args, kwargs, result) - - self._write_header(Request.RPCReply) - self._write_bytes(return_tags) - self._send_rpc_value(bytearray(return_tags), - result, result, service) - self._flush() except RPCReturnValueError as exn: raise except Exception as exn: @@ -617,6 +609,14 @@ class CommKernel: self._write_int32(-1) # column not known self._write_string(function) self._flush() + else: + logger.debug("rpc service: %d %r %r = %r", + service_id, args, kwargs, result) + self._write_header(Request.RPCReply) + self._write_bytes(return_tags) + self._send_rpc_value(bytearray(return_tags), + result, result, service) + self._flush() def _serve_exception(self, embedding_map, symbolizer, demangler): name = self._read_string() From 735cd1eb3e5c069481ff2ca10de6e91e96692980 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 14 Jan 2022 16:50:08 +0800 Subject: [PATCH 54/59] manual: update development instructions --- doc/manual/developing.rst | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/doc/manual/developing.rst b/doc/manual/developing.rst index fa26c14fd..5b4022068 100644 --- a/doc/manual/developing.rst +++ b/doc/manual/developing.rst @@ -5,24 +5,19 @@ Developing ARTIQ This section is only for software or FPGA developers who want to modify ARTIQ. The steps described here are not required if you simply want to run experiments with ARTIQ. If you purchased a system from M-Labs or QUARTIQ, we normally provide board binaries for you. The easiest way to obtain an ARTIQ development environment is via the Nix package manager on Linux. The Nix system is used on the `M-Labs Hydra server `_ to build ARTIQ and its dependencies continuously; it ensures that all build instructions are up-to-date and allows binary packages to be used on developers' machines, in particular for large tools such as the Rust compiler. -ARTIQ itself does not depend on Nix, and it is also possible to compile everything from source (look into the ``.nix`` files from the ``nix-scripts`` repository and run the commands manually) - but Nix makes the process a lot easier. +ARTIQ itself does not depend on Nix, and it is also possible to compile everything from source (look into the ``flake.nix`` file and/or nixpkgs, and run the commands manually) - but Nix makes the process a lot easier. -* Download Vivado from Xilinx and install it (by running the official installer in a FHS chroot environment if using NixOS). If you do not want to write to ``/opt``, you can install it in a folder of your home directory. The "appropriate" Vivado version to use for building the bitstream can vary. Some versions contain bugs that lead to hidden or visible failures, others work fine. Refer to `Hydra `_ and/or the ``vivado.nix`` file from the ``nix-scripts`` repository in order to determine which version is used at M-Labs. If the Vivado GUI installer crashes, you may be able to work around the problem by running it in unattended mode with a command such as ``./xsetup -a XilinxEULA,3rdPartyEULA,WebTalkTerms -b Install -e 'Vitis Unified Software Platform' -l /opt/Xilinx/``. +* Download Vivado from Xilinx and install it (by running the official installer in a FHS chroot environment if using NixOS; the ARTIQ flake provides such an environment). If you do not want to write to ``/opt``, you can install it in a folder of your home directory. The "appropriate" Vivado version to use for building the bitstream can vary. Some versions contain bugs that lead to hidden or visible failures, others work fine. Refer to `Hydra `_ and/or the ``flake.nix`` file from the ARTIQ repository in order to determine which version is used at M-Labs. If the Vivado GUI installer crashes, you may be able to work around the problem by running it in unattended mode with a command such as ``./xsetup -a XilinxEULA,3rdPartyEULA,WebTalkTerms -b Install -e 'Vitis Unified Software Platform' -l /opt/Xilinx/``. * During the Vivado installation, uncheck ``Install cable drivers`` (they are not required as we use better and open source alternatives). -* Install the `Nix package manager `_ and Git (e.g. ``$ nix-shell -p git``). -* Set up the M-Labs binary substituter (:ref:`same procedure as the user section `) to allow binaries to be downloaded. Otherwise, tools such as LLVM and the Rust compiler will be compiled on your machine, which uses a lot of CPU time, memory, and disk space. -* Clone the repositories https://github.com/m-labs/artiq and https://git.m-labs.hk/m-labs/nix-scripts. -* If you did not install Vivado in its default location ``/opt``, edit ``vivado.nix`` accordingly. -* Run ``$ nix-shell -I artiqSrc=path_to_artiq_sources shell-dev.nix`` to obtain an environment containing all the required development tools (e.g. Migen, MiSoC, Clang, Rust, OpenOCD...) in addition to the ARTIQ user environment. ``artiqSrc`` should point to the root of the cloned ``artiq`` repository, and ``shell-dev.nix`` can be found in the ``artiq-fast`` folder of the ``nix-scripts`` repository. +* Install the `Nix package manager `_, version 2.4 or later. Prefer a single-user installation for simplicity. +* If you did not install Vivado in its default location ``/opt``, clone the ARTIQ Git repository and edit ``flake.nix`` accordingly. +* Enable flakes in Nix by e.g. adding ``experimental-features = nix-command flakes`` to ``nix.conf`` (for example ``~/.config/nix/nix.conf``). +* Enter the development shell by running ``nix develop github:m-labs/artiq``, or alternatively by cloning the ARTIQ Git repository and running ``nix develop`` at the root (where ``flake.nix`` is). * You can then build the firmware and gateware with a command such as ``$ python -m artiq.gateware.targets.kasli``. If you are using a JSON system description file, use ``$ python -m artiq.gateware.targets.kasli_generic file.json``. -* Flash the binaries into the FPGA board with a command such as ``$ artiq_flash --srcbuild -d artiq_kasli -V ``. You need to configure OpenOCD as explained :ref:`in the user section `. OpenOCD is already part of the shell started by ``shell-dev.nix``. -* Check that the board boots and examine the UART messages by running a serial terminal program, e.g. ``$ flterm /dev/ttyUSB1`` (``flterm`` is part of MiSoC and installed by ``shell-dev.nix``). Leave the terminal running while you are flashing the board, so that you see the startup messages when the board boots immediately after flashing. You can also restart the board (without reflashing it) with ``$ artiq_flash start``. +* Flash the binaries into the FPGA board with a command such as ``$ artiq_flash --srcbuild -d artiq_kasli -V ``. You need to configure OpenOCD as explained :ref:`in the user section `. OpenOCD is already part of the flake's development environment. +* Check that the board boots and examine the UART messages by running a serial terminal program, e.g. ``$ flterm /dev/ttyUSB1`` (``flterm`` is part of MiSoC and installed in the flake's development environment). Leave the terminal running while you are flashing the board, so that you see the startup messages when the board boots immediately after flashing. You can also restart the board (without reflashing it) with ``$ artiq_flash start``. * The communication parameters are 115200 8-N-1. Ensure that your user has access to the serial device (e.g. by adding the user account to the ``dialout`` group). -.. note:: - If you do not plan to modify ``nix-scripts``, with the ARTIQ channel configured you can simply enter the development Nix shell with ``nix-shell ""``. No repositories need to be cloned. This is especially useful if you simply want to build firmware using an unmodified version of ARTIQ. - - .. warning:: - Nix will make a read-only copy of the ARTIQ source to use in the shell environment. Therefore, any modifications that you make to the source after the shell is started will not be taken into account. A solution applicable to ARTIQ (and several other Python packages such as Migen and MiSoC) is to prepend the ARTIQ source directory to the ``PYTHONPATH`` environment variable after entering the shell. If you want this to be done by default, edit ``profile`` in ``shell-dev.nix``. + Nix will make a read-only copy of the ARTIQ source to use in the shell environment. Therefore, any modifications that you make to the source after the shell is started will not be taken into account. A solution applicable to ARTIQ (and several other Python packages such as Migen and MiSoC) is to prepend the ARTIQ source directory to the ``PYTHONPATH`` environment variable after entering the shell. If you want this to be done by default, edit the ``devShell`` section of ``flake.nix``. From 53878fe1d410ecc121f5ef5a4b634b43aa43b634 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 19 Jan 2022 19:58:55 +0800 Subject: [PATCH 55/59] flake: get version number from nix --- flake.nix | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index f5a29ff4a..0bd20b61d 100644 --- a/flake.nix +++ b/flake.nix @@ -12,6 +12,12 @@ outputs = { self, nixpkgs, mozilla-overlay, src-sipyco, src-pythonparser, src-migen, src-misoc }: let pkgs = import nixpkgs { system = "x86_64-linux"; overlays = [ (import mozilla-overlay) ]; }; + + artiqVersionMajor = 7; + artiqVersionMinor = self.sourceInfo.revCount or 0; + artiqVersionId = self.sourceInfo.shortRev or "unknown"; + artiqVersion = (builtins.toString artiqVersionMajor) + "." + (builtins.toString artiqVersionMinor) + "-" + artiqVersionId; + rustManifest = pkgs.fetchurl { url = "https://static.rust-lang.org/dist/2021-01-29/channel-rust-nightly.toml"; sha256 = "sha256-EZKgw89AH4vxaJpUHmIMzMW/80wAFQlfcxRoBD9nz0c="; @@ -134,7 +140,7 @@ artiq = pkgs.python3Packages.buildPythonPackage rec { pname = "artiq"; - version = "7.0-dev"; + version = artiqVersion; src = self; preBuild = "export VERSIONEER_OVERRIDE=${version}"; From f542f045da168d139cb7ad26680b751fa99bb066 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 19 Jan 2022 20:04:20 +0800 Subject: [PATCH 56/59] manual: use git+https URL for ARTIQ flake github: flake URL lacks revCount --- doc/manual/developing.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/developing.rst b/doc/manual/developing.rst index 5b4022068..91ac11f07 100644 --- a/doc/manual/developing.rst +++ b/doc/manual/developing.rst @@ -12,7 +12,7 @@ ARTIQ itself does not depend on Nix, and it is also possible to compile everythi * Install the `Nix package manager `_, version 2.4 or later. Prefer a single-user installation for simplicity. * If you did not install Vivado in its default location ``/opt``, clone the ARTIQ Git repository and edit ``flake.nix`` accordingly. * Enable flakes in Nix by e.g. adding ``experimental-features = nix-command flakes`` to ``nix.conf`` (for example ``~/.config/nix/nix.conf``). -* Enter the development shell by running ``nix develop github:m-labs/artiq``, or alternatively by cloning the ARTIQ Git repository and running ``nix develop`` at the root (where ``flake.nix`` is). +* Enter the development shell by running ``nix develop git+https://github.com/m-labs/artiq.git``, or alternatively by cloning the ARTIQ Git repository and running ``nix develop`` at the root (where ``flake.nix`` is). * You can then build the firmware and gateware with a command such as ``$ python -m artiq.gateware.targets.kasli``. If you are using a JSON system description file, use ``$ python -m artiq.gateware.targets.kasli_generic file.json``. * Flash the binaries into the FPGA board with a command such as ``$ artiq_flash --srcbuild -d artiq_kasli -V ``. You need to configure OpenOCD as explained :ref:`in the user section `. OpenOCD is already part of the flake's development environment. * Check that the board boots and examine the UART messages by running a serial terminal program, e.g. ``$ flterm /dev/ttyUSB1`` (``flterm`` is part of MiSoC and installed in the flake's development environment). Leave the terminal running while you are flashing the board, so that you see the startup messages when the board boots immediately after flashing. You can also restart the board (without reflashing it) with ``$ artiq_flash start``. From 5597be335681e0027c76395bb9b0325fa5115c13 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 19 Jan 2022 20:17:11 +0800 Subject: [PATCH 57/59] flake: add beta to version string --- flake.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index 0bd20b61d..3c7094e28 100644 --- a/flake.nix +++ b/flake.nix @@ -16,7 +16,7 @@ artiqVersionMajor = 7; artiqVersionMinor = self.sourceInfo.revCount or 0; artiqVersionId = self.sourceInfo.shortRev or "unknown"; - artiqVersion = (builtins.toString artiqVersionMajor) + "." + (builtins.toString artiqVersionMinor) + "-" + artiqVersionId; + artiqVersion = (builtins.toString artiqVersionMajor) + "." + (builtins.toString artiqVersionMinor) + "-" + artiqVersionId + "-beta"; rustManifest = pkgs.fetchurl { url = "https://static.rust-lang.org/dist/2021-01-29/channel-rust-nightly.toml"; From 4e420fc297be78ebc31f8eb45e1be4c066e1d96e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 19 Jan 2022 20:18:54 +0800 Subject: [PATCH 58/59] flake: update inputs --- flake.lock | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/flake.lock b/flake.lock index 6647c1a02..fa8cd3d65 100644 --- a/flake.lock +++ b/flake.lock @@ -3,11 +3,11 @@ "mozilla-overlay": { "flake": false, "locked": { - "lastModified": 1637337116, - "narHash": "sha256-LKqAcdL+woWeYajs02bDQ7q8rsqgXuzhC354NoRaV80=", + "lastModified": 1638887313, + "narHash": "sha256-FMYV6rVtvSIfthgC1sK1xugh3y7muoQcvduMdriz4ag=", "owner": "mozilla", "repo": "nixpkgs-mozilla", - "rev": "cbc7435f5b0b3d17b16fb1d20cf7b616eec5e093", + "rev": "7c1e8b1dd6ed0043fb4ee0b12b815256b0b9de6f", "type": "github" }, "original": { @@ -18,11 +18,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1638279546, - "narHash": "sha256-1KCwN7twjp1dBdp0jPgVdYFztDkCR8+roo0B34J9oBY=", + "lastModified": 1642522226, + "narHash": "sha256-m/j9U8KYuwwxjwgRCjmEj8ejftvdMLJ+NGXh/L2I4FU=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "96b4157790fc96e70d6e6c115e3f34bba7be490f", + "rev": "610d4ea2750e064bf34b33fa38cb671edd893d3d", "type": "github" }, "original": { @@ -45,11 +45,11 @@ "src-migen": { "flake": false, "locked": { - "lastModified": 1636715924, - "narHash": "sha256-V3ThFSo2d7OC4SHE0lCkKGQKeFXmvxtwZRWe5NMU3nM=", + "lastModified": 1639659493, + "narHash": "sha256-qpVj/yJf4hDDc99XXpVPH4EbLC8aCmEtACn5qNc3DGI=", "owner": "m-labs", "repo": "migen", - "rev": "9a0be7a4210ff96043412539eb5388659b81831d", + "rev": "ac703010eaa06ac9b6e32f97c6fa98b15de22b31", "type": "github" }, "original": { @@ -95,11 +95,11 @@ "src-sipyco": { "flake": false, "locked": { - "lastModified": 1632832039, - "narHash": "sha256-GYXXCCOxNZyy6j7qScB3/QWUUCEVX+4tM4bXXVGXty0=", + "lastModified": 1641866796, + "narHash": "sha256-TSH0IgNbi9IcMcBDb2nWRphKlxstbWeATjrGbi6K2m0=", "owner": "m-labs", "repo": "sipyco", - "rev": "b83d8e5d82b25dba9393f0c12bdc5253f8138545", + "rev": "b04234c49379cd446d4cb3346d4741868d86841a", "type": "github" }, "original": { From f58aa3bdf6e2bb53544688aa042c791e5d319ccc Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 19 Jan 2022 20:44:50 +0800 Subject: [PATCH 59/59] flake: update qasync --- flake.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/flake.nix b/flake.nix index 3c7094e28..8918092b0 100644 --- a/flake.nix +++ b/flake.nix @@ -67,12 +67,12 @@ qasync = pkgs.python3Packages.buildPythonPackage rec { pname = "qasync"; - version = "0.10.0"; + version = "0.19.0"; src = pkgs.fetchFromGitHub { owner = "CabbageDevelopment"; repo = "qasync"; rev = "v${version}"; - sha256 = "1zga8s6dr7gk6awmxkh4pf25gbg8n6dv1j4b0by7y0fhi949qakq"; + sha256 = "sha256-xGAUAyOq+ELwzMGbLLmXijxLG8pv4a6tPvfAVOt1YwU="; }; propagatedBuildInputs = [ pkgs.python3Packages.pyqt5 ]; checkInputs = [ pkgs.python3Packages.pytest ];