forked from M-Labs/artiq
1
0
Fork 0

Merge branch 'master' into nac3

This commit is contained in:
Sebastien Bourdeauducq 2022-03-28 13:41:17 +08:00
commit 69cda517f6
26 changed files with 442 additions and 137 deletions

View File

@ -13,31 +13,35 @@ Highlights:
- HVAMP_8CH 8 channel HV amplifier for Fastino / Zotinos
- Almazny mezzanine board for Mirny
* Softcore targets now use the RISC-V architecture (VexRiscv) instead of OR1K (mor1kx).
* Gateware FPU is supported on KC705 and Kasli 2.0.
* Faster compilation for large arrays/lists.
* Phaser:
- Improved documentation
- Expose the DAC coarse mixer and ``sif_sync``
- Exposes upconverter calibration and enabling/disabling of upconverter LO & RF outputs.
- Add helpers to align Phaser updates to the RTIO timeline (``get_next_frame_mu()``)
* ``get()``, ``get_mu()``, ``get_att()``, and ``get_att_mu()`` functions added for AD9910 and AD9912
* Core device moninj is now proxied via the ``aqctl_moninj_proxy`` controller.
* The configuration entry ``rtio_clock`` supports multiple clocking settings, deprecating the usage
of compile-time options.
* Packaging via Nix Flakes.
* Firmware and gateware can now be built on-demand on the M-Labs server using ``afws_client``
(subscribers only).
* Extended Kasli gateware JSON description with configuration for SPI over DIO.
* ``get()``, ``get_mu()``, ``get_att()``, and ``get_att_mu()`` functions added for AD9910 and AD9912.
* On Kasli, the number of FIFO lanes in the scalable events dispatcher (SED) can now be configured in
the JSON hardware description file.
* ``artiq_ddb_template`` generates edge-counter keys that start with the key of the corresponding
TTL device (e.g. ``ttl_0_counter`` for the edge counter on TTL device ``ttl_0``).
* ``artiq_master`` now has an ``--experiment-subdir`` option to scan only a subdirectory of the
repository when building the list of experiments.
* The configuration entry ``rtio_clock`` supports multiple clocking settings, deprecating the usage
of compile-time options.
* DRTIO: added support for 100MHz clock.
* Added support for 100MHz RTIO clock in DRTIO.
* Previously detected RTIO async errors are reported to the host after each kernel terminates and a
warning is logged. The warning is additional to the one already printed in the core device log upon
detection of the error.
* Removed worker DB warning for writing a dataset that is also in the archive
* Extended Kasli gateware JSON description with configuration for SPI over DIO.
See: https://github.com/m-labs/artiq/pull/1800
* Removed worker DB warning for writing a dataset that is also in the archive.
* ``PCA9548`` I2C switch class renamed to ``I2CSwitch``, to accomodate support for PCA9547, and
possibly other switches in future. Readback has been removed, and now only one channel per
switch is supported.
switch is supported.
Breaking changes:
@ -51,7 +55,6 @@ Breaking changes:
* Phaser: fixed coarse mixer frequency configuration
* Mirny: Added extra delays in ``ADF5356.sync()``. This avoids the need of an extra delay before
calling `ADF5356.init()`.
* DRTIO: Changed message alignment from 32-bits to 64-bits.
* The deprecated ``set_dataset(..., save=...)`` is no longer supported.
ARTIQ-6

View File

@ -107,7 +107,7 @@ class Hdf5FileSystemModel(QtWidgets.QFileSystemModel):
v = ("artiq_version: {}\nrepo_rev: {}\nfile: {}\n"
"class_name: {}\nrid: {}\nstart_time: {}").format(
h5["artiq_version"][()], expid["repo_rev"],
expid["file"], expid["class_name"],
expid.get("file", "<none>"), expid["class_name"],
h5["rid"][()], start_time)
return v
except:
@ -179,7 +179,7 @@ class FilesDock(QtWidgets.QDockWidget):
v = {
"artiq_version": f["artiq_version"][()],
"repo_rev": expid["repo_rev"],
"file": expid["file"],
"file": expid.get("file", "<none>"),
"class_name": expid["class_name"],
"rid": f["rid"][()],
"start_time": start_time,

View File

@ -277,6 +277,17 @@ class CoreException:
traceback_str +\
'\n\nEnd of Core Device Traceback\n'
def incompatible_versions(v1, v2):
if v1.endswith(".beta") or v2.endswith(".beta"):
# Beta branches may introduce breaking changes. Check version strictly.
return v1 != v2
else:
# On stable branches, runtime/software protocol backward compatibility is kept.
# Runtime and software with the same major version number are compatible.
return v1.split(".", maxsplit=1)[0] != v2.split(".", maxsplit=1)[0]
class CommKernel:
warned_of_mismatch = False
@ -453,7 +464,7 @@ class CommKernel:
runtime_id = self._read(4)
if runtime_id == b"AROR":
gateware_version = self._read_string().split(";")[0]
if gateware_version != software_version and not self.warned_of_mismatch:
if not self.warned_of_mismatch and incompatible_versions(gateware_version, software_version):
logger.warning("Mismatch between gateware (%s) "
"and software (%s) versions",
gateware_version, software_version)

View File

@ -169,7 +169,7 @@ class CommMgmt:
self._write_bytes(value)
ty = self._read_header()
if ty == Reply.Error:
raise IOError("Flash storage is full")
raise IOError("Device failed to write config. More information may be available in the log.")
elif ty != Reply.Success:
raise IOError("Incorrect reply from device: {} (expected {})".
format(ty, Reply.Success))

View File

@ -33,14 +33,6 @@ class CommMonInj:
try:
self._writer.write(b"ARTIQ moninj\n")
# get device endian
endian = await self._reader.read(1)
if endian == b"e":
self.endian = "<"
elif endian == b"E":
self.endian = ">"
else:
raise IOError("Incorrect reply from device: expected e/E.")
self._receive_task = asyncio.ensure_future(self._receive_cr())
except:
self._writer.close()
@ -62,19 +54,19 @@ class CommMonInj:
del self._writer
def monitor_probe(self, enable, channel, probe):
packet = struct.pack(self.endian + "bblb", 0, enable, channel, probe)
packet = struct.pack("<bblb", 0, enable, channel, probe)
self._writer.write(packet)
def monitor_injection(self, enable, channel, overrd):
packet = struct.pack(self.endian + "bblb", 3, enable, channel, overrd)
packet = struct.pack("<bblb", 3, enable, channel, overrd)
self._writer.write(packet)
def inject(self, channel, override, value):
packet = struct.pack(self.endian + "blbb", 1, channel, override, value)
packet = struct.pack("<blbb", 1, channel, override, value)
self._writer.write(packet)
def get_injection_status(self, channel, override):
packet = struct.pack(self.endian + "blb", 2, channel, override)
packet = struct.pack("<blb", 2, channel, override)
self._writer.write(packet)
async def _receive_cr(self):
@ -84,14 +76,12 @@ class CommMonInj:
if not ty:
return
if ty == b"\x00":
payload = await self._reader.readexactly(9)
channel, probe, value = struct.unpack(
self.endian + "lbl", payload)
payload = await self._reader.readexactly(13)
channel, probe, value = struct.unpack("<lbq", payload)
self.monitor_cb(channel, probe, value)
elif ty == b"\x01":
payload = await self._reader.readexactly(6)
channel, override, value = struct.unpack(
self.endian + "lbb", payload)
channel, override, value = struct.unpack("<lbb", payload)
self.injection_status_cb(channel, override, value)
else:
raise ValueError("Unknown packet type", ty)

View File

@ -726,7 +726,7 @@ class ExperimentManager:
else:
repo_match = "repo_rev" not in expid
if (repo_match and
expid["file"] == file and
("file" in expid and expid["file"] == file) and
expid["class_name"] == class_name):
rids.append(rid)
asyncio.ensure_future(self._request_term_multiple(rids))

View File

@ -202,51 +202,50 @@ _WidgetDesc = namedtuple("_WidgetDesc", "uid comment cls arguments")
def setup_from_ddb(ddb):
core_addr = None
mi_addr = None
dds_sysclk = None
description = set()
for k, v in ddb.items():
comment = None
if "comment" in v:
comment = v["comment"]
try:
if isinstance(v, dict) and v["type"] == "local":
if k == "core":
core_addr = v["arguments"]["host"]
elif v["module"] == "artiq.coredevice.ttl":
channel = v["arguments"]["channel"]
force_out = v["class"] == "TTLOut"
widget = _WidgetDesc(k, comment, _TTLWidget, (channel, force_out, k))
description.add(widget)
elif (v["module"] == "artiq.coredevice.ad9914"
and v["class"] == "AD9914"):
bus_channel = v["arguments"]["bus_channel"]
channel = v["arguments"]["channel"]
dds_sysclk = v["arguments"]["sysclk"]
widget = _WidgetDesc(k, comment, _DDSWidget, (bus_channel, channel, k))
description.add(widget)
elif ( (v["module"] == "artiq.coredevice.ad53xx" and v["class"] == "AD53xx")
or (v["module"] == "artiq.coredevice.zotino" and v["class"] == "Zotino")):
spi_device = v["arguments"]["spi_device"]
spi_device = ddb[spi_device]
while isinstance(spi_device, str):
spi_device = ddb[spi_device]
spi_channel = spi_device["arguments"]["channel"]
for channel in range(32):
widget = _WidgetDesc((k, channel), comment, _DACWidget, (spi_channel, channel, k))
if isinstance(v, dict):
comment = v.get("comment")
if v["type"] == "local":
if v["module"] == "artiq.coredevice.ttl":
channel = v["arguments"]["channel"]
force_out = v["class"] == "TTLOut"
widget = _WidgetDesc(k, comment, _TTLWidget, (channel, force_out, k))
description.add(widget)
elif (v["module"] == "artiq.coredevice.ad9914"
and v["class"] == "AD9914"):
bus_channel = v["arguments"]["bus_channel"]
channel = v["arguments"]["channel"]
dds_sysclk = v["arguments"]["sysclk"]
widget = _WidgetDesc(k, comment, _DDSWidget, (bus_channel, channel, k))
description.add(widget)
elif ( (v["module"] == "artiq.coredevice.ad53xx" and v["class"] == "AD53xx")
or (v["module"] == "artiq.coredevice.zotino" and v["class"] == "Zotino")):
spi_device = v["arguments"]["spi_device"]
spi_device = ddb[spi_device]
while isinstance(spi_device, str):
spi_device = ddb[spi_device]
spi_channel = spi_device["arguments"]["channel"]
for channel in range(32):
widget = _WidgetDesc((k, channel), comment, _DACWidget, (spi_channel, channel, k))
description.add(widget)
elif v["type"] == "controller" and k == "core_moninj":
mi_addr = v["host"]
except KeyError:
pass
return core_addr, dds_sysclk, description
return mi_addr, dds_sysclk, description
class _DeviceManager:
def __init__(self):
self.core_addr = None
self.reconnect_core = asyncio.Event()
self.core_connection = None
self.core_connector_task = asyncio.ensure_future(self.core_connector())
self.mi_addr = None
self.reconnect_mi = asyncio.Event()
self.mi_connection = None
self.mi_connector_task = asyncio.ensure_future(self.mi_connector())
self.ddb = dict()
self.description = set()
@ -265,11 +264,11 @@ class _DeviceManager:
return ddb
def notify(self, mod):
core_addr, dds_sysclk, description = setup_from_ddb(self.ddb)
mi_addr, dds_sysclk, description = setup_from_ddb(self.ddb)
if core_addr != self.core_addr:
self.core_addr = core_addr
self.reconnect_core.set()
if mi_addr != self.mi_addr:
self.mi_addr = mi_addr
self.reconnect_mi.set()
self.dds_sysclk = dds_sysclk
@ -319,44 +318,44 @@ class _DeviceManager:
self.description = description
def ttl_set_mode(self, channel, mode):
if self.core_connection is not None:
if self.mi_connection is not None:
widget = self.ttl_widgets[channel]
if mode == "0":
widget.cur_override = True
widget.cur_level = False
self.core_connection.inject(channel, TTLOverride.level.value, 0)
self.core_connection.inject(channel, TTLOverride.oe.value, 1)
self.core_connection.inject(channel, TTLOverride.en.value, 1)
self.mi_connection.inject(channel, TTLOverride.level.value, 0)
self.mi_connection.inject(channel, TTLOverride.oe.value, 1)
self.mi_connection.inject(channel, TTLOverride.en.value, 1)
elif mode == "1":
widget.cur_override = True
widget.cur_level = True
self.core_connection.inject(channel, TTLOverride.level.value, 1)
self.core_connection.inject(channel, TTLOverride.oe.value, 1)
self.core_connection.inject(channel, TTLOverride.en.value, 1)
self.mi_connection.inject(channel, TTLOverride.level.value, 1)
self.mi_connection.inject(channel, TTLOverride.oe.value, 1)
self.mi_connection.inject(channel, TTLOverride.en.value, 1)
elif mode == "exp":
widget.cur_override = False
self.core_connection.inject(channel, TTLOverride.en.value, 0)
self.mi_connection.inject(channel, TTLOverride.en.value, 0)
else:
raise ValueError
# override state may have changed
widget.refresh_display()
def setup_ttl_monitoring(self, enable, channel):
if self.core_connection is not None:
self.core_connection.monitor_probe(enable, channel, TTLProbe.level.value)
self.core_connection.monitor_probe(enable, channel, TTLProbe.oe.value)
self.core_connection.monitor_injection(enable, channel, TTLOverride.en.value)
self.core_connection.monitor_injection(enable, channel, TTLOverride.level.value)
if self.mi_connection is not None:
self.mi_connection.monitor_probe(enable, channel, TTLProbe.level.value)
self.mi_connection.monitor_probe(enable, channel, TTLProbe.oe.value)
self.mi_connection.monitor_injection(enable, channel, TTLOverride.en.value)
self.mi_connection.monitor_injection(enable, channel, TTLOverride.level.value)
if enable:
self.core_connection.get_injection_status(channel, TTLOverride.en.value)
self.mi_connection.get_injection_status(channel, TTLOverride.en.value)
def setup_dds_monitoring(self, enable, bus_channel, channel):
if self.core_connection is not None:
self.core_connection.monitor_probe(enable, bus_channel, channel)
if self.mi_connection is not None:
self.mi_connection.monitor_probe(enable, bus_channel, channel)
def setup_dac_monitoring(self, enable, spi_channel, channel):
if self.core_connection is not None:
self.core_connection.monitor_probe(enable, spi_channel, channel)
if self.mi_connection is not None:
self.mi_connection.monitor_probe(enable, spi_channel, channel)
def monitor_cb(self, channel, probe, value):
if channel in self.ttl_widgets:
@ -385,29 +384,29 @@ class _DeviceManager:
widget.refresh_display()
def disconnect_cb(self):
logger.error("lost connection to core device moninj")
self.reconnect_core.set()
logger.error("lost connection to moninj")
self.reconnect_mi.set()
async def core_connector(self):
async def mi_connector(self):
while True:
await self.reconnect_core.wait()
self.reconnect_core.clear()
if self.core_connection is not None:
await self.core_connection.close()
self.core_connection = None
new_core_connection = CommMonInj(self.monitor_cb, self.injection_status_cb,
await self.reconnect_mi.wait()
self.reconnect_mi.clear()
if self.mi_connection is not None:
await self.mi_connection.close()
self.mi_connection = None
new_mi_connection = CommMonInj(self.monitor_cb, self.injection_status_cb,
self.disconnect_cb)
try:
await new_core_connection.connect(self.core_addr, 1383)
await new_mi_connection.connect(self.mi_addr, 1383)
except asyncio.CancelledError:
logger.info("cancelled connection to core device moninj")
logger.info("cancelled connection to moninj")
break
except:
logger.error("failed to connect to core device moninj", exc_info=True)
logger.error("failed to connect to moninj", exc_info=True)
await asyncio.sleep(10.)
self.reconnect_core.set()
self.reconnect_mi.set()
else:
self.core_connection = new_core_connection
self.mi_connection = new_mi_connection
for ttl_channel in self.ttl_widgets.keys():
self.setup_ttl_monitoring(True, ttl_channel)
for bus_channel, channel in self.dds_widgets.keys():
@ -416,13 +415,13 @@ class _DeviceManager:
self.setup_dac_monitoring(True, spi_channel, channel)
async def close(self):
self.core_connector_task.cancel()
self.mi_connector_task.cancel()
try:
await asyncio.wait_for(self.core_connector_task, None)
await asyncio.wait_for(self.mi_connector_task, None)
except asyncio.CancelledError:
pass
if self.core_connection is not None:
await self.core_connection.close()
if self.mi_connection is not None:
await self.mi_connection.close()
class _MonInjDock(QtWidgets.QDockWidget):

View File

@ -48,7 +48,7 @@ class Model(DictSyncModel):
else:
return "Outside repo."
elif column == 6:
return v["expid"]["file"]
return v["expid"].get("file", "<none>")
elif column == 7:
if v["expid"]["class_name"] is None:
return ""

View File

@ -13,6 +13,13 @@ device_db = {
"port": 1068,
"command": "aqctl_corelog -p {port} --bind {bind} " + core_addr
},
"core_moninj": {
"type": "controller",
"host": "::1",
"port_proxy": 1383,
"port": 1384,
"command": "aqctl_moninj_proxy --port-proxy {port_proxy} --port-control {port} --bind {bind} " + core_addr
},
"core_cache": {
"type": "local",
"module": "artiq.coredevice.cache",

View File

@ -17,6 +17,13 @@ device_db = {
"port": 1068,
"command": "aqctl_corelog -p {port} --bind {bind} " + core_addr
},
"core_moninj": {
"type": "controller",
"host": "::1",
"port_proxy": 1383,
"port": 1384,
"command": "aqctl_moninj_proxy --port-proxy {port_proxy} --port-control {port} --bind {bind} " + core_addr
},
"core_cache": {
"type": "local",
"module": "artiq.coredevice.cache",

View File

@ -63,7 +63,10 @@ static mut API: &'static [(&'static str, *const ())] = &[
api!(__powidf2),
/* libc */
api!(memcmp, extern { fn memcmp(a: *const u8, b: *mut u8, size: usize); }),
api!(memcpy, extern { fn memcpy(dest: *mut u8, src: *const u8, n: usize) -> *mut u8; }),
api!(memmove, extern { fn memmove(dest: *mut u8, src: *const u8, n: usize) -> *mut u8; }),
api!(memset, extern { fn memset(s: *mut u8, c: i32, n: usize) -> *mut u8; }),
api!(memcmp, extern { fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32; }),
/* libm */
// commented out functions are not available with the libm used here, but are available in NAR3.

View File

@ -64,12 +64,14 @@ pub fn config_routing_table(default_n_links: usize) -> RoutingTable {
}
}
return true;
} else {
warn!("length of the configured routing table is incorrect");
}
}
false
});
if !ok {
warn!("could not read routing table from configuration, using default");
info!("could not read routing table from configuration, using default");
}
info!("routing table: {}", ret);
ret

View File

@ -34,7 +34,7 @@ pub enum Packet {
RoutingAck,
MonitorRequest { destination: u8, channel: u16, probe: u8 },
MonitorReply { value: u32 },
MonitorReply { value: u64 },
InjectionRequest { destination: u8, channel: u16, overrd: u8, value: u8 },
InjectionStatusRequest { destination: u8, channel: u16, overrd: u8 },
InjectionStatusReply { value: u8 },
@ -105,7 +105,7 @@ impl Packet {
probe: reader.read_u8()?
},
0x41 => Packet::MonitorReply {
value: reader.read_u32()?
value: reader.read_u64()?
},
0x50 => Packet::InjectionRequest {
destination: reader.read_u8()?,
@ -259,7 +259,7 @@ impl Packet {
},
Packet::MonitorReply { value } => {
writer.write_u8(0x41)?;
writer.write_u32(value)?;
writer.write_u64(value)?;
},
Packet::InjectionRequest { destination, channel, overrd, value } => {
writer.write_u8(0x50)?;

View File

@ -40,7 +40,7 @@ pub enum HostMessage {
#[derive(Debug)]
pub enum DeviceMessage {
MonitorStatus { channel: u32, probe: u8, value: u32 },
MonitorStatus { channel: u32, probe: u8, value: u64 },
InjectionStatus { channel: u32, overrd: u8, value: u8 }
}
@ -82,7 +82,7 @@ impl DeviceMessage {
writer.write_u8(0)?;
writer.write_u32(channel)?;
writer.write_u8(probe)?;
writer.write_u32(value)?;
writer.write_u64(value)?;
},
DeviceMessage::InjectionStatus { channel, overrd, value } => {
writer.write_u8(1)?;

View File

@ -2,7 +2,6 @@ use alloc::collections::btree_map::BTreeMap;
use core::cell::RefCell;
use io::Error as IoError;
use io::Write;
use moninj_proto::*;
use sched::{Io, Mutex, TcpListener, TcpStream, Error as SchedError};
use urc::Urc;
@ -13,12 +12,12 @@ use board_artiq::drtio_routing;
mod local_moninj {
use board_misoc::csr;
pub fn read_probe(channel: u16, probe: u8) -> u32 {
pub fn read_probe(channel: u16, probe: u8) -> u64 {
unsafe {
csr::rtio_moninj::mon_chan_sel_write(channel as _);
csr::rtio_moninj::mon_probe_sel_write(probe);
csr::rtio_moninj::mon_value_update_write(1);
csr::rtio_moninj::mon_value_read() as u32
csr::rtio_moninj::mon_value_read() as u64
}
}
@ -41,7 +40,7 @@ mod local_moninj {
#[cfg(not(has_rtio_moninj))]
mod local_moninj {
pub fn read_probe(_channel: u16, _probe: u8) -> u32 { 0 }
pub fn read_probe(_channel: u16, _probe: u8) -> u64 { 0 }
pub fn inject(_channel: u16, _overrd: u8, _value: u8) { }
@ -54,7 +53,7 @@ mod remote_moninj {
use rtio_mgt::drtio;
use sched::{Io, Mutex};
pub fn read_probe(io: &Io, aux_mutex: &Mutex, linkno: u8, destination: u8, channel: u16, probe: u8) -> u32 {
pub fn read_probe(io: &Io, aux_mutex: &Mutex, linkno: u8, destination: u8, channel: u16, probe: u8) -> u64 {
let reply = drtio::aux_transact(io, aux_mutex, linkno, &drtioaux::Packet::MonitorRequest {
destination: destination,
channel: channel,
@ -123,7 +122,6 @@ fn connection_worker(io: &Io, _aux_mutex: &Mutex, _routing_table: &drtio_routing
let mut next_check = 0;
read_magic(&mut stream)?;
stream.write_all("e".as_bytes())?;
info!("new connection from {}", stream.remote_endpoint());
loop {

View File

@ -201,13 +201,13 @@ fn process_aux_packet(_repeaters: &mut [repeater::Repeater],
csr::rtio_moninj::mon_chan_sel_write(channel as _);
csr::rtio_moninj::mon_probe_sel_write(probe);
csr::rtio_moninj::mon_value_update_write(1);
value = csr::rtio_moninj::mon_value_read();
value = csr::rtio_moninj::mon_value_read() as u64;
}
#[cfg(not(has_rtio_moninj))]
{
value = 0;
}
let reply = drtioaux::Packet::MonitorReply { value: value as u32 };
let reply = drtioaux::Packet::MonitorReply { value: value };
drtioaux::send(0, &reply)
},
drtioaux::Packet::InjectionRequest { destination: _destination, channel, overrd, value } => {

View File

@ -0,0 +1,224 @@
#!/usr/bin/env python3
import argparse
import logging
import asyncio
import struct
from enum import Enum
from sipyco.asyncio_tools import AsyncioServer
from sipyco.pc_rpc import Server
from sipyco import common_args
from artiq.coredevice.comm_moninj import CommMonInj
logger = logging.getLogger(__name__)
class EventType(Enum):
PROBE = 0
INJECTION = 1
class MonitorMux:
def __init__(self):
self.listeners = dict()
self.comm_moninj = None
def _monitor(self, listener, event):
try:
listeners = self.listeners[event]
except KeyError:
listeners = []
self.listeners[event] = listeners
if event[0] == EventType.PROBE:
logger.debug("starting monitoring channel %d probe %d", event[1], event[2])
self.comm_moninj.monitor_probe(True, event[1], event[2])
elif event[0] == EventType.INJECTION:
logger.debug("starting monitoring channel %d injection %d", event[1], event[2])
self.comm_moninj.monitor_injection(True, event[1], event[2])
else:
raise ValueError
if listener in listeners:
logger.warning("listener trying to subscribe twice to %s", event)
else:
listeners.append(listener)
def _unmonitor(self, listener, event):
try:
listeners = self.listeners[event]
except KeyError:
listeners = []
try:
listeners.remove(listener)
except ValueError:
logger.warning("listener trying to unsubscribe from %s, but was not subscribed", event)
return
if not listeners:
del self.listeners[event]
if event[0] == EventType.PROBE:
logger.debug("stopped monitoring channel %d probe %d", event[1], event[2])
self.comm_moninj.monitor_probe(False, event[1], event[2])
elif event[0] == EventType.INJECTION:
logger.debug("stopped monitoring channel %d injection %d", event[1], event[2])
self.comm_moninj.monitor_injection(False, event[1], event[2])
else:
raise ValueError
def monitor_probe(self, listener, enable, channel, probe):
if enable:
self._monitor(listener, (EventType.PROBE, channel, probe))
else:
self._unmonitor(listener, (EventType.PROBE, channel, probe))
def monitor_injection(self, listener, enable, channel, overrd):
if enable:
self._monitor(listener, (EventType.INJECTION, channel, overrd))
else:
self._unmonitor(listener, (EventType.INJECTION, channel, overrd))
def _event_cb(self, event, value):
try:
listeners = self.listeners[event]
except KeyError:
# We may still receive buffered events shortly after an unsubscription. They can be ignored.
logger.debug("received event %s but no listener", event)
listeners = []
for listener in listeners:
if event[0] == EventType.PROBE:
listener.monitor_cb(event[1], event[2], value)
elif event[0] == EventType.INJECTION:
listener.injection_status_cb(event[1], event[2], value)
else:
raise ValueError
def monitor_cb(self, channel, probe, value):
self._event_cb((EventType.PROBE, channel, probe), value)
def injection_status_cb(self, channel, override, value):
self._event_cb((EventType.INJECTION, channel, override), value)
def remove_listener(self, listener):
for event, listeners in list(self.listeners.items()):
try:
listeners.remove(listener)
except ValueError:
pass
if not listeners:
del self.listeners[event]
if event[0] == EventType.PROBE:
logger.debug("stopped monitoring channel %d probe %d", event[1], event[2])
self.comm_moninj.monitor_probe(False, event[1], event[2])
elif event[0] == EventType.INJECTION:
logger.debug("stopped monitoring channel %d injection %d", event[1], event[2])
self.comm_moninj.monitor_injection(False, event[1], event[2])
else:
raise ValueError
class ProxyConnection:
def __init__(self, monitor_mux, reader, writer):
self.monitor_mux = monitor_mux
self.reader = reader
self.writer = writer
async def handle(self):
try:
while True:
ty = await self.reader.read(1)
if not ty:
return
if ty == b"\x00": # MonitorProbe
packet = await self.reader.readexactly(6)
enable, channel, probe = struct.unpack("<blb", packet)
self.monitor_mux.monitor_probe(self, enable, channel, probe)
elif ty == b"\x01": # Inject
packet = await self.reader.readexactly(6)
channel, overrd, value = struct.unpack("<lbb", packet)
self.monitor_mux.comm_moninj.inject(channel, overrd, value)
elif ty == b"\x02": # GetInjectionStatus
packet = await self.reader.readexactly(5)
channel, overrd = struct.unpack("<lb", packet)
self.monitor_mux.comm_moninj.get_injection_status(channel, overrd)
elif ty == b"\x03": # MonitorInjection
packet = await self.reader.readexactly(6)
enable, channel, overrd = struct.unpack("<blb", packet)
self.monitor_mux.monitor_injection(self, enable, channel, overrd)
else:
raise ValueError
finally:
self.monitor_mux.remove_listener(self)
def monitor_cb(self, channel, probe, value):
packet = struct.pack("<blbq", 0, channel, probe, value)
self.writer.write(packet)
def injection_status_cb(self, channel, override, value):
packet = struct.pack("<blbb", 1, channel, override, value)
self.writer.write(packet)
class ProxyServer(AsyncioServer):
def __init__(self, monitor_mux):
AsyncioServer.__init__(self)
self.monitor_mux = monitor_mux
async def _handle_connection_cr(self, reader, writer):
line = await reader.readline()
if line != b"ARTIQ moninj\n":
logger.error("incorrect magic")
return
await ProxyConnection(self.monitor_mux, reader, writer).handle()
def get_argparser():
parser = argparse.ArgumentParser(
description="ARTIQ moninj proxy")
common_args.verbosity_args(parser)
common_args.simple_network_args(parser, [
("proxy", "proxying", 1383),
("control", "control", 1384)
])
parser.add_argument("core_addr", metavar="CORE_ADDR",
help="hostname or IP address of the core device")
return parser
class PingTarget:
def ping(self):
return True
def main():
args = get_argparser().parse_args()
common_args.init_logger_from_args(args)
bind_address = common_args.bind_address_from_args(args)
loop = asyncio.get_event_loop()
try:
monitor_mux = MonitorMux()
comm_moninj = CommMonInj(monitor_mux.monitor_cb, monitor_mux.injection_status_cb)
monitor_mux.comm_moninj = comm_moninj
loop.run_until_complete(comm_moninj.connect(args.core_addr))
try:
proxy_server = ProxyServer(monitor_mux)
loop.run_until_complete(proxy_server.start(bind_address, args.port_proxy))
try:
server = Server({"moninj_proxy": PingTarget()}, None, True)
loop.run_until_complete(server.start(bind_address, args.port_control))
try:
loop.run_until_complete(server.wait_terminate())
finally:
loop.run_until_complete(server.stop())
finally:
loop.run_until_complete(proxy_server.stop())
finally:
loop.run_until_complete(comm_moninj.close())
finally:
loop.close()
if __name__ == "__main__":
main()

View File

@ -67,6 +67,9 @@ def get_argparser():
parser_add.add_argument("-r", "--revision", default=None,
help="use a specific repository revision "
"(defaults to head, ignored without -R)")
parser_add.add_argument("--content", default=False,
action="store_true",
help="submit by content")
parser_add.add_argument("-c", "--class-name", default=None,
help="name of the class to run")
parser_add.add_argument("file", metavar="FILE",
@ -134,12 +137,18 @@ def _action_submit(remote, args):
expid = {
"log_level": logging.WARNING + args.quiet*10 - args.verbose*10,
"file": args.file,
"class_name": args.class_name,
"arguments": arguments,
}
if args.repository:
expid["repo_rev"] = args.revision
if args.content:
with open(args.file, "r") as f:
expid["content"] = f.read()
if args.repository:
raise ValueError("Repository cannot be used when submitting by content")
else:
expid["file"] = args.file
if args.repository:
expid["repo_rev"] = args.revision
if args.timed is None:
due_date = None
else:
@ -207,7 +216,7 @@ def _show_schedule(schedule):
row.append(expid["repo_rev"])
else:
row.append("Outside repo.")
row.append(expid["file"])
row.append(expid.get("file", "<none>"))
if expid["class_name"] is None:
row.append("")
else:

View File

@ -38,6 +38,13 @@ def process_header(output, description):
"port": 1068,
"command": "aqctl_corelog -p {{port}} --bind {{bind}} " + core_addr
}},
"core_moninj": {{
"type": "controller",
"host": "::1",
"port_proxy": 1383,
"port": 1384,
"command": "aqctl_moninj_proxy --port-proxy {{port_proxy}} --port-control {{port}} --bind {{bind}} " + core_addr
}},
"core_cache": {{
"type": "local",
"module": "artiq.coredevice.cache",

View File

@ -74,7 +74,7 @@ class Worker:
return None
def _get_log_source(self):
return "worker({},{})".format(self.rid, self.filename)
return "worker({},{})".format(self.rid, self.filename if self.filename is not None else "<none>")
async def _create_process(self, log_level):
if self.ipc is not None:
@ -260,7 +260,8 @@ class Worker:
async def build(self, rid, pipeline_name, wd, expid, priority,
timeout=15.0):
self.rid = rid
self.filename = os.path.basename(expid["file"])
if "file" in expid:
self.filename = os.path.basename(expid["file"])
await self._create_process(expid["log_level"])
await self._worker_action(
{"action": "build",

View File

@ -13,6 +13,8 @@ import inspect
import logging
import traceback
from collections import OrderedDict
import importlib.util
import linecache
import h5py
@ -129,11 +131,39 @@ class CCB:
issue = staticmethod(make_parent_action("ccb_issue"))
def get_experiment(file, class_name):
def get_experiment_from_file(file, class_name):
module = tools.file_import(file, prefix="artiq_worker_")
return tools.get_experiment(module, class_name)
class StringLoader:
def __init__(self, fake_filename, content):
self.fake_filename = fake_filename
self.content = content
def get_source(self, fullname):
return self.content
def create_module(self, spec):
return None
def exec_module(self, module):
code = compile(self.get_source(self.fake_filename), self.fake_filename, "exec")
exec(code, module.__dict__)
def get_experiment_from_content(content, class_name):
fake_filename = "expcontent"
spec = importlib.util.spec_from_loader(
"expmodule",
StringLoader(fake_filename, content)
)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
linecache.lazycache(fake_filename, module.__dict__)
return tools.get_experiment(module, class_name)
register_experiment = make_parent_action("register_experiment")
@ -246,14 +276,17 @@ def main():
start_time = time.time()
rid = obj["rid"]
expid = obj["expid"]
if obj["wd"] is not None:
# Using repository
experiment_file = os.path.join(obj["wd"], expid["file"])
repository_path = obj["wd"]
if "file" in expid:
if obj["wd"] is not None:
# Using repository
experiment_file = os.path.join(obj["wd"], expid["file"])
repository_path = obj["wd"]
else:
experiment_file = expid["file"]
repository_path = None
exp = get_experiment_from_file(experiment_file, expid["class_name"])
else:
experiment_file = expid["file"]
repository_path = None
exp = get_experiment(experiment_file, expid["class_name"])
exp = get_experiment_from_content(expid["content"], expid["class_name"])
device_mgr.virtual_devices["scheduler"].set_run_info(
rid, obj["pipeline_name"], expid, obj["priority"])
start_local_time = time.localtime(start_time)

View File

@ -9,7 +9,7 @@ class TestFrontends(unittest.TestCase):
"""Test --help as a simple smoke test against catastrophic breakage."""
commands = {
"aqctl": [
"corelog"
"corelog", "moninj_proxy"
],
"artiq": [
"client", "compile", "coreanalyzer", "coremgmt",

View File

@ -10,7 +10,9 @@ Default network ports
+---------------------------------+--------------+
| Core device (analyzer) | 1382 |
+---------------------------------+--------------+
| Core device (mon/inj) | 1383 |
| Moninj (core device or proxy) | 1383 |
+---------------------------------+--------------+
| Moninj (proxy control) | 1384 |
+---------------------------------+--------------+
| Master (logging input) | 1066 |
+---------------------------------+--------------+

View File

@ -109,6 +109,14 @@ Core device logging controller
:ref: artiq.frontend.aqctl_corelog.get_argparser
:prog: aqctl_corelog
Moninj proxy
------------
.. argparse::
:ref: artiq.frontend.aqctl_moninj_proxy.get_argparser
:prog: aqctl_moninj_proxy
.. _core-device-rtio-analyzer-tool:
Core device RTIO analyzer tool

View File

@ -168,7 +168,7 @@
vivado = pkgs.buildFHSUserEnv {
name = "vivado";
targetPkgs = vivadoDeps;
profile = "source /opt/Xilinx/Vivado/2021.1/settings64.sh";
profile = "set -e; source /opt/Xilinx/Vivado/2021.2/settings64.sh";
runScript = "vivado";
};

View File

@ -35,6 +35,7 @@ console_scripts = [
"artiq_run = artiq.frontend.artiq_run:main",
"artiq_flash = artiq.frontend.artiq_flash:main",
"aqctl_corelog = artiq.frontend.aqctl_corelog:main",
"aqctl_moninj_proxy = artiq.frontend.aqctl_moninj_proxy:main",
"afws_client = artiq.frontend.afws_client:main",
]