refactor ddb/pdb/rdb

pull/109/head
Sebastien Bourdeauducq 2015-07-13 22:08:20 +02:00
parent 8b02b58a77
commit 32d141f5ac
42 changed files with 650 additions and 779 deletions

View File

@ -1,6 +1,5 @@
from operator import itemgetter
from artiq.language.db import AutoDB
from artiq.language.units import ms
from artiq.coredevice.runtime import LinkInterface
@ -13,7 +12,10 @@ class _RuntimeEnvironment(LinkInterface):
return str(self.llvm_module)
class Comm(AutoDB):
class Comm:
def __init__(self, dmgr):
pass
def get_runtime_env(self):
return _RuntimeEnvironment()

View File

@ -3,16 +3,15 @@ import serial
import struct
from artiq.coredevice.comm_generic import CommGeneric
from artiq.language.db import *
logger = logging.getLogger(__name__)
class Comm(CommGeneric, AutoDB):
class DBKeys:
serial_dev = Argument()
baud_rate = Argument(115200)
class Comm(CommGeneric):
def __init__(self, dmgr, serial_dev, baud_rate=115200):
self.serial_dev = serial_dev
self.baud_rate = baud_rate
def open(self):
if hasattr(self, "port"):

View File

@ -2,16 +2,15 @@ import logging
import socket
from artiq.coredevice.comm_generic import CommGeneric
from artiq.language.db import *
logger = logging.getLogger(__name__)
class Comm(CommGeneric, AutoDB):
class DBKeys:
host = Argument()
port = Argument(1381)
class Comm(CommGeneric):
def __init__(self, dmgr, host, port=1381):
self.host = host
self.port = port
def open(self):
if hasattr(self, "socket"):

View File

@ -1,7 +1,6 @@
import os
from artiq.language.core import *
from artiq.language.db import *
from artiq.language.units import ns
from artiq.transforms.inline import inline
@ -46,13 +45,12 @@ def _no_debug_unparse(label, node):
pass
class Core(AutoDB):
class DBKeys:
comm = Device()
ref_period = Argument(8*ns)
external_clock = Argument(False)
class Core:
def __init__(self, dmgr, ref_period=8*ns, external_clock=False):
self.comm = dmgr.get("comm")
self.ref_period = ref_period
self.external_clock = external_clock
def build(self):
self.first_run = True
self.core = self
self.comm.core = self

View File

@ -1,5 +1,4 @@
from artiq.language.core import *
from artiq.language.db import *
from artiq.language.units import *
@ -23,14 +22,12 @@ class _BatchContextManager:
self.dds_bus.batch_exit()
class DDSBus(AutoDB):
class DDSBus:
"""Core device Direct Digital Synthesis (DDS) bus batching driver.
Manages batching of DDS commands on a DDS shared bus."""
class DBKeys:
core = Device()
def build(self):
def __init__(self, dmgr):
self.core = dmgr.get("core")
self.batch = _BatchContextManager(self)
@kernel
@ -46,7 +43,7 @@ class DDSBus(AutoDB):
syscall("dds_batch_exit")
class _DDSGeneric(AutoDB):
class _DDSGeneric:
"""Core device Direct Digital Synthesis (DDS) driver.
Controls one DDS channel managed directly by the core device's runtime.
@ -57,12 +54,10 @@ class _DDSGeneric(AutoDB):
:param sysclk: DDS system frequency.
:param channel: channel number of the DDS device to control.
"""
class DBKeys:
core = Device()
sysclk = Argument()
channel = Argument()
def build(self):
def __init__(self, dmgr, sysclk, channel):
self.core = dmgr.get("core")
self.sysclk = sysclk
self.channel = channel
self.phase_mode = PHASE_MODE_CONTINUOUS
@portable

View File

@ -1,8 +1,7 @@
from artiq.language.core import *
from artiq.language.db import *
class TTLOut(AutoDB):
class TTLOut:
"""RTIO TTL output driver.
This should be used with output-only channels.
@ -10,12 +9,10 @@ class TTLOut(AutoDB):
:param core: core device
:param channel: channel number
"""
class DBKeys:
core = Device()
channel = Argument()
def __init__(self, dmgr, channel):
self.core = dmgr.get("core")
self.channel = channel
def build(self):
# in RTIO cycles
self.o_previous_timestamp = int64(0)
@ -58,7 +55,7 @@ class TTLOut(AutoDB):
self.off()
class TTLInOut(AutoDB):
class TTLInOut:
"""RTIO TTL input/output driver.
In output mode, provides functions to set the logic level on the signal.
@ -76,11 +73,10 @@ class TTLInOut(AutoDB):
:param core: core device
:param channel: channel number
"""
class DBKeys:
core = Device()
channel = Argument()
def __init__(self, dmgr, channel):
self.core = dmgr.get("core")
self.channel = channel
def build(self):
# in RTIO cycles
self.o_previous_timestamp = int64(0)
self.i_previous_timestamp = int64(0)
@ -208,7 +204,7 @@ class TTLInOut(AutoDB):
return syscall("ttl_get", self.channel, self.i_previous_timestamp)
class TTLClockGen(AutoDB):
class TTLClockGen:
"""RTIO TTL clock generator driver.
This should be used with TTL channels that have a clock generator
@ -217,9 +213,9 @@ class TTLClockGen(AutoDB):
:param core: core device
:param channel: channel number
"""
class DBKeys:
core = Device()
channel = Argument()
def __init__(self, dmgr, channel):
self.core = dmgr.get("core")
self.channel = channel
def build(self):
# in RTIO cycles

View File

@ -1,5 +1,4 @@
from artiq.language.core import *
from artiq.language.db import *
from artiq.language.units import *
@ -154,19 +153,14 @@ class _Frame:
self.pdq.next_segment = -1
class CompoundPDQ2(AutoDB):
class DBKeys:
core = Device()
pdq2_devices = Argument()
trigger_device = Argument()
frame_devices = Argument()
def build(self):
self.pdq2s = [self.dbh.get_device(d) for d in self.pdq2_devices]
self.trigger = self.dbh.get_device(self.trigger_device)
self.frame0 = self.dbh.get_device(self.frame_devices[0])
self.frame1 = self.dbh.get_device(self.frame_devices[1])
self.frame2 = self.dbh.get_device(self.frame_devices[2])
class CompoundPDQ2:
def __init__(self, dmgr, pdq2_devices, trigger_device, frame_devices):
self.core = dmgr.get("core")
self.pdq2s = [dmgr.get(d) for d in self.pdq2_devices]
self.trigger = dmgr.get(trigger_device)
self.frame0 = dmgr.get(frame_devices[0])
self.frame1 = dmgr.get(frame_devices[1])
self.frame2 = dmgr.get(frame_devices[2])
self.frames = []
self.current_frame = -1

View File

@ -1,7 +1,6 @@
import numpy as np
from artiq.language.core import *
from artiq.language.db import *
from artiq.language.units import *
from artiq.wavesynth.compute_samples import Synthesizer
@ -139,20 +138,16 @@ class _Frame:
self.daqmx.next_segment = -1
class CompoundDAQmx(AutoDB):
class DBKeys:
core = Device()
daqmx_device = Argument()
clock_device = Argument()
channel_count = Argument()
sample_rate = Argument()
sample_rate_in_mu = Argument(False)
def build(self):
self.daqmx = self.dbh.get_device(self.daqmx_device)
self.clock = self.dbh.get_device(self.clock_device)
if not self.sample_rate_in_mu:
class CompoundDAQmx:
def __init__(self, dmgr, daqmx_device, clock_device, channel_count,
sample_rate, sample_rate_in_mu=False):
self.core = dmgr.get("core")
self.daqmx = dmgr.get(daqmx_device)
self.clock = dmgr.get(clock_device)
self.channel_count = channel_count
if self.sample_rate_in_mu:
self.sample_rate = sample_rate
else:
self.sample_rate = self.clock.frequency_to_ftw(sample_rate)
self.frame = None

View File

@ -4,7 +4,7 @@ import logging
import argparse
from artiq.protocols.file_db import FlatFileDB
from artiq.master.worker_db import DBHub
from artiq.master.worker_db import DeviceManager
from artiq.tools import *
@ -36,15 +36,14 @@ def main():
args = get_argparser().parse_args()
init_logger(args)
ddb = FlatFileDB(args.ddb)
dmgr = DeviceManager(FlatFileDB(args.ddb))
pdb = FlatFileDB(args.pdb)
dbh = DBHub(ddb, pdb, rdb=None, read_only=True)
try:
module = file_import(args.file)
exp = get_experiment(module, args.experiment)
arguments = parse_arguments(args.arguments)
exp_inst = exp(dbh, **arguments)
exp_inst = exp(dmgr, pdb, **arguments)
if (not hasattr(exp.run, "k_function_info")
or not exp.run.k_function_info):
@ -56,7 +55,7 @@ def main():
[exp_inst], {},
with_attr_writeback=False)
finally:
dbh.close_devices()
dmgr.close_devices()
if rpc_map:
raise ValueError("Experiment must not use RPC")

View File

@ -18,9 +18,11 @@ from artiq.gui.parameters import ParametersDock
from artiq.gui.schedule import ScheduleDock
from artiq.gui.log import LogDock
data_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"..", "gui")
def get_argparser():
parser = argparse.ArgumentParser(description="ARTIQ GUI client")
parser.add_argument(

View File

@ -6,10 +6,10 @@ import atexit
import os
from artiq.protocols.pc_rpc import Server
from artiq.protocols.sync_struct import Publisher
from artiq.protocols.sync_struct import Notifier, Publisher, process_mod
from artiq.protocols.file_db import FlatFileDB
from artiq.master.scheduler import Scheduler
from artiq.master.results import RTResults, get_last_rid
from artiq.master.worker_db import get_last_rid
from artiq.master.repository import Repository
from artiq.tools import verbosity_args, init_logger
@ -36,7 +36,7 @@ def main():
init_logger(args)
ddb = FlatFileDB("ddb.pyon")
pdb = FlatFileDB("pdb.pyon")
rtr = RTResults()
rtr = Notifier(dict())
repository = Repository()
if os.name == "nt":
@ -47,11 +47,10 @@ def main():
atexit.register(lambda: loop.close())
worker_handlers = {
"req_device": ddb.request,
"req_parameter": pdb.request,
"get_device": ddb.get,
"get_parameter": pdb.get,
"set_parameter": pdb.set,
"init_rt_results": rtr.init,
"update_rt_results": rtr.update,
"update_rt_results": lambda mod: process_mod(rtr, mod),
}
scheduler = Scheduler(get_last_rid() + 1, worker_handlers)
worker_handlers["scheduler_submit"] = scheduler.submit
@ -72,7 +71,7 @@ def main():
"schedule": scheduler.notifier,
"devices": ddb.data,
"parameters": pdb.data,
"rt_results": rtr.groups,
"rt_results": rtr,
"explist": repository.explist
})
loop.run_until_complete(server_notify.start(

View File

@ -9,20 +9,19 @@ import logging
import h5py
from artiq.language.db import *
from artiq.language.experiment import Experiment
from artiq.language.environment import EnvExperiment
from artiq.protocols.file_db import FlatFileDB
from artiq.master.worker_db import DBHub, ResultDB
from artiq.master.worker_db import DeviceManager, ResultDB
from artiq.tools import *
logger = logging.getLogger(__name__)
class ELFRunner(Experiment, AutoDB):
class DBKeys:
core = Device()
file = Argument()
class ELFRunner(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("file")
def run(self):
with open(self.file, "rb") as f:
@ -37,12 +36,11 @@ class SimpleParamLogger:
class DummyScheduler:
def __init__(self, expid):
def __init__(self):
self.next_rid = 0
self.next_trid = 0
self.pipeline_name = "main"
self.priority = 0
self.expid = expid
self.expid = None
def submit(self, pipeline_name, expid, priority, due_date, flush):
rid = self.next_rid
@ -78,7 +76,7 @@ def get_argparser(with_file=True):
return parser
def _build_experiment(dbh, args):
def _build_experiment(dmgr, pdb, rdb, args):
if hasattr(args, "file"):
if args.file.endswith(".elf"):
if args.arguments:
@ -86,7 +84,7 @@ def _build_experiment(dbh, args):
if args.experiment:
raise ValueError("experiment-by-name not supported "
"for ELF kernels")
return ELFRunner(dbh, file=args.file)
return ELFRunner(dmgr, pdb, rdb, file=args.file)
else:
module = file_import(args.file)
file = args.file
@ -100,34 +98,33 @@ def _build_experiment(dbh, args):
"experiment": args.experiment,
"arguments": arguments
}
return exp(dbh,
scheduler=DummyScheduler(expid),
**arguments)
dmgr.virtual_devices["scheduler"].expid = expid
return exp(dmgr, pdb, rdb, **arguments)
def run(with_file=False):
args = get_argparser(with_file).parse_args()
init_logger(args)
ddb = FlatFileDB(args.ddb)
dmgr = DeviceManager(FlatFileDB(args.ddb),
virtual_devices={"scheduler": DummyScheduler()})
pdb = FlatFileDB(args.pdb)
pdb.hooks.append(SimpleParamLogger())
rdb = ResultDB(lambda description: None, lambda mod: None)
dbh = DBHub(ddb, pdb, rdb)
rdb = ResultDB()
try:
exp_inst = _build_experiment(dbh, args)
rdb.build()
exp_inst = _build_experiment(dmgr, pdb, rdb, args)
exp_inst.prepare()
exp_inst.run()
exp_inst.analyze()
finally:
dbh.close_devices()
dmgr.close_devices()
if args.hdf5 is not None:
with h5py.File(args.hdf5, "w") as f:
rdb.write_hdf5(f)
elif rdb.data.read or rdb.realtime_data.read:
r = chain(rdb.realtime_data.read.items(), rdb.data.read.items())
elif rdb.rt.read or rdb.nrt:
r = chain(rdb.rt.read.items(), rdb.nrt.items())
for k, v in sorted(r, key=itemgetter(0)):
print("{}: {}".format(k, v))

View File

@ -1,11 +1,10 @@
from artiq.language import core, experiment, db, units
from artiq.language import core, environment, units
from artiq.language.core import *
from artiq.language.experiment import *
from artiq.language.db import *
from artiq.language.environment import *
from artiq.language.units import *
__all__ = []
__all__.extend(core.__all__)
__all__.extend(experiment.__all__)
__all__.extend(db.__all__)
__all__.extend(environment.__all__)
__all__.extend(units.__all__)

View File

@ -1,133 +0,0 @@
"""
Connection to device, parameter and result database.
"""
__all__ = ["Device", "NoDefault", "Parameter", "Argument", "Result", "AutoDB"]
class _AttributeKind:
pass
class Device(_AttributeKind):
"""Represents a device for ``AutoDB`` to process."""
pass
class NoDefault:
"""Represents the absence of a default value for ``Parameter``
and ``Argument``.
"""
pass
class Parameter(_AttributeKind):
"""Represents a parameter (from the database) for ``AutoDB``
to process.
:param default: Default value of the parameter to be used if not found
in the database.
"""
def __init__(self, default=NoDefault):
self.default = default
class Argument(_AttributeKind):
"""Represents an argument (specifiable at instance creation) for
``AutoDB`` to process.
:param default: Default value of the argument to be used if not specified
at instance creation.
"""
def __init__(self, default=NoDefault):
self.default = default
class Result(_AttributeKind):
"""Represents a result for ``AutoDB`` to process."""
pass
class AutoDB:
"""Base class to automate device, parameter and result database access.
Drivers and experiments should in most cases overload this class to
obtain the parameters and devices (including the core device) that they
need, report results, and modify parameters.
:param dbh: database hub to use. If ``None``, all devices and parameters
must be supplied as keyword arguments, and reporting results and
modifying parameters is not supported.
"""
class DBKeys:
pass
realtime_results = dict()
def __init__(self, dbh=None, **kwargs):
self.dbh = dbh
for k, v in kwargs.items():
object.__setattr__(self, k, v)
for k in dir(self.DBKeys):
if k not in self.__dict__:
ak = getattr(self.DBKeys, k)
if isinstance(ak, Argument):
if ak.default is NoDefault:
raise AttributeError(
"No value specified for argument '{}'".format(k))
object.__setattr__(self, k, ak.default)
elif isinstance(ak, Device):
try:
dev = self.dbh.get_device(k)
except KeyError:
raise KeyError("Device '{}' not found".format(k))
object.__setattr__(self, k, dev)
self.build()
if self.dbh is not None and self.realtime_results:
self.dbh.add_rt_results(self.realtime_results)
def __getattr__(self, name):
ak = getattr(self.DBKeys, name)
if isinstance(ak, Parameter):
try:
if self.dbh is None:
raise KeyError
return self.dbh.get_parameter(name)
except KeyError:
if ak.default is not NoDefault:
return ak.default
else:
raise AttributeError("Parameter '{}' not in database"
" and without default value"
.format(name))
elif isinstance(ak, Result):
try:
return self.dbh.get_result(name)
except KeyError:
raise AttributeError("Result '{}' not found".format(name))
else:
raise ValueError
def __setattr__(self, name, value):
try:
ak = getattr(self.DBKeys, name)
except AttributeError:
object.__setattr__(self, name, value)
else:
if isinstance(ak, Parameter):
self.dbh.set_parameter(name, value)
elif isinstance(ak, Result):
self.dbh.set_result(name, value)
else:
raise ValueError
def build(self):
"""This is called by ``__init__`` after the parameter initialization
is done.
The user may overload this method to complete the object's
initialization with all parameters available.
"""
pass

View File

@ -0,0 +1,182 @@
from inspect import isclass
__all__ = ["NoDefault", "FreeValue", "HasEnvironment",
"Experiment", "EnvExperiment", "is_experiment"]
class NoDefault:
"""Represents the absence of a default value."""
pass
class FreeValue:
def __init__(self, default=NoDefault):
if default is not NoDefault:
self.default_value = default
def default(self):
return self.default_value
def process(self, x):
return x
def describe(self):
d = {"ty": "FreeValue"}
if hasattr(self, "default_value"):
d["default"] = self.default_value
return d
class HasEnvironment:
"""Provides methods to manage the environment of an experiment (devices,
parameters, results, arguments)."""
def __init__(self, dmgr=None, pdb=None, rdb=None, *,
param_override=dict(), **kwargs):
self.requested_args = dict()
self.__dmgr = dmgr
self.__pdb = pdb
self.__rdb = rdb
self.__param_override = param_override
self.__kwargs = kwargs
self.__in_build = True
self.build()
self.__in_build = False
for key in self.__kwargs.keys():
if key not in self.requested_args:
raise TypeError("Got unexpected argument: " + key)
del self.__kwargs
def build(self):
raise NotImplementedError
def dbs(self):
return self.__dmgr, self.__pdb, self.__rdb
def get_argument(self, key, processor=None):
if not self.__in_build:
raise TypeError("get_argument() should only "
"be called from build()")
if processor is None:
processor = FreeValue()
self.requested_args[key] = processor
try:
argval = self.__kwargs[key]
except KeyError:
return processor.default()
return processor.process(argval)
def attr_argument(self, key, processor=None):
setattr(self, key, self.get_argument(key, processor))
def get_device(self, key):
if self.__dmgr is None:
raise ValueError("Device manager not present")
return self.__dmgr.get(key)
def attr_device(self, key):
setattr(self, key, self.get_device(key))
def get_parameter(self, key, default=NoDefault):
if self.__pdb is None:
raise ValueError("Parameter database not present")
if key in self.__param_override:
return self.__param_override[key]
try:
return self.__pdb.get(key)
except KeyError:
if default is not NoDefault:
return default
else:
raise
def attr_parameter(self, key, default=NoDefault):
setattr(self, key, self.get_parameter(key, default))
def set_parameter(self, key, value):
if self.__pdb is None:
raise ValueError("Parameter database not present")
self.__pdb.set(key, value)
def set_result(self, key, value, realtime=False):
if self.__rdb is None:
raise ValueError("Result database not present")
if realtime:
if key in self.__rdb.nrt:
raise ValueError("Result is already non-realtime")
self.__rdb.rt[key] = value
notifier = self.__rdb.rt[key]
notifier.kernel_attr_init = False
return notifier
else:
if key in self.__rdb.rt.read:
raise ValueError("Result is already realtime")
self.__rdb.nrt[key] = value
def attr_rtresult(self, key, init_value):
setattr(self, key, set_result(key, init_value, True))
def get_result(self, key):
if self.__rdb is None:
raise ValueError("Result database not present")
return self.__rdb.get(key)
class Experiment:
"""Base class for experiments.
Deriving from this class enables automatic experiment discovery in
Python modules.
"""
def prepare(self):
"""Entry point for pre-computing data necessary for running the
experiment.
Doing such computations outside of ``run`` enables more efficient
scheduling of multiple experiments that need to access the shared
hardware during part of their execution.
This method must not interact with the hardware.
"""
pass
def run(self):
"""The main entry point of the experiment.
This method must be overloaded by the user to implement the main
control flow of the experiment.
This method may interact with the hardware.
The experiment may call the scheduler's ``pause`` method while in
``run``.
"""
raise NotImplementedError
def analyze(self):
"""Entry point for analyzing the results of the experiment.
This method may be overloaded by the user to implement the analysis
phase of the experiment, for example fitting curves.
Splitting this phase from ``run`` enables tweaking the analysis
algorithm on pre-existing data, and CPU-bound analyses to be run
overlapped with the next experiment in a pipelined manner.
This method must not interact with the hardware.
"""
pass
class EnvExperiment(Experiment, HasEnvironment):
pass
def is_experiment(o):
"""Checks if a Python object is an instantiable user experiment."""
return (isclass(o)
and issubclass(o, Experiment)
and o is not Experiment
and o is not EnvExperiment)

View File

@ -1,54 +0,0 @@
from inspect import isclass
__all__ = ["Experiment", "is_experiment"]
class Experiment:
"""Base class for experiments.
Deriving from this class enables automatic experiment discovery in
Python modules.
"""
def prepare(self):
"""Entry point for pre-computing data necessary for running the
experiment.
Doing such computations outside of ``run`` enables more efficient
scheduling of multiple experiments that need to access the shared
hardware during part of their execution.
This method must not interact with the hardware.
"""
pass
def run(self):
"""The main entry point of the experiment.
This method must be overloaded by the user to implement the main
control flow of the experiment.
This method may interact with the hardware.
The experiment may call the scheduler's ``pause`` method while in
``run``.
"""
raise NotImplementedError
def analyze(self):
"""Entry point for analyzing the results of the experiment.
This method may be overloaded by the user to implement the analysis
phase of the experiment, for example fitting curves.
Splitting this phase from ``run`` enables tweaking the analysis
algorithm on pre-existing data, and CPU-bound analyses to be run
overlapped with the next experiment in a pipelined manner.
This method must not interact with the hardware.
"""
pass
def is_experiment(o):
"""Checks if a Python object is an instantiable experiment."""
return isclass(o) and issubclass(o, Experiment) and o is not Experiment

View File

@ -2,7 +2,7 @@ import os
from artiq.protocols.sync_struct import Notifier
from artiq.tools import file_import
from artiq.language.experiment import is_experiment
from artiq.language.environment import is_experiment
def scan_experiments():
@ -23,8 +23,7 @@ def scan_experiments():
name = name[:-1]
entry = {
"file": os.path.join("repository", f),
"experiment": k,
"gui_file": getattr(v, "__artiq_gui_file__", None)
"experiment": k
}
r[name] = entry
return r

View File

@ -1,103 +0,0 @@
import os
import time
import re
import numpy
import h5py
from artiq.protocols.sync_struct import Notifier, process_mod
def get_hdf5_output(start_time, rid, name):
dirname = os.path.join("results",
time.strftime("%Y-%m-%d", start_time),
time.strftime("%H-%M", start_time))
filename = "{:09}-{}.h5".format(rid, name)
os.makedirs(dirname, exist_ok=True)
return h5py.File(os.path.join(dirname, filename), "w")
def get_last_rid():
r = -1
try:
day_folders = os.listdir("results")
except:
return r
day_folders = filter(lambda x: re.fullmatch('\d\d\d\d-\d\d-\d\d', x),
day_folders)
for df in day_folders:
day_path = os.path.join("results", df)
try:
minute_folders = os.listdir(day_path)
except:
continue
minute_folders = filter(lambda x: re.fullmatch('\d\d-\d\d', x),
minute_folders)
for mf in minute_folders:
minute_path = os.path.join(day_path, mf)
try:
h5files = os.listdir(minute_path)
except:
continue
for x in h5files:
m = re.fullmatch('(\d\d\d\d\d\d\d\d\d)-.*\.h5', x)
rid = int(m.group(1))
if rid > r:
r = rid
return r
_type_to_hdf5 = {
int: h5py.h5t.STD_I64BE,
float: h5py.h5t.IEEE_F64BE
}
def result_dict_to_hdf5(f, rd):
for name, data in rd.items():
if isinstance(data, list):
el_ty = type(data[0])
for d in data:
if type(d) != el_ty:
raise TypeError("All list elements must have the same"
" type for HDF5 output")
try:
el_ty_h5 = _type_to_hdf5[el_ty]
except KeyError:
raise TypeError("List element type {} is not supported for"
" HDF5 output".format(el_ty))
dataset = f.create_dataset(name, (len(data), ), el_ty_h5)
dataset[:] = data
elif isinstance(data, numpy.ndarray):
f.create_dataset(name, data=data)
else:
ty = type(data)
try:
ty_h5 = _type_to_hdf5[ty]
except KeyError:
raise TypeError("Type {} is not supported for HDF5 output"
.format(ty))
dataset = f.create_dataset(name, (), ty_h5)
dataset[()] = data
class RTResults:
def __init__(self):
self.groups = Notifier(dict())
self.current_group = "default"
def init(self, description):
data = dict()
for rtr in description.keys():
if isinstance(rtr, tuple):
for e in rtr:
data[e] = []
else:
data[rtr] = []
self.groups[self.current_group] = {
"description": description,
"data": data
}
def update(self, mod):
target = self.groups[self.current_group]["data"]
process_mod(target, mod)

View File

@ -1,76 +1,114 @@
from collections import OrderedDict
import importlib
import logging
import os
import time
import re
import numpy
import h5py
from artiq.protocols.sync_struct import Notifier
from artiq.protocols.pc_rpc import Client, BestEffortClient
from artiq.master.results import result_dict_to_hdf5
logger = logging.getLogger(__name__)
class ResultDB:
def __init__(self, init_rt_results, update_rt_results):
self.init_rt_results = init_rt_results
self.update_rt_results = update_rt_results
self.rtr_description = dict()
def get_hdf5_output(start_time, rid, name):
dirname = os.path.join("results",
time.strftime("%Y-%m-%d", start_time),
time.strftime("%H-%M", start_time))
filename = "{:09}-{}.h5".format(rid, name)
os.makedirs(dirname, exist_ok=True)
return h5py.File(os.path.join(dirname, filename), "w")
def add_rt_results(self, rtr_description):
intr = set(self.rtr_description.keys()).intersection(
set(rtr_description.keys()))
if intr:
raise ValueError("Duplicate realtime results: " + ", ".join(intr))
self.rtr_description.update(rtr_description)
def build(self):
realtime_results_set = set()
for rtr in self.rtr_description.keys():
if isinstance(rtr, tuple):
for e in rtr:
realtime_results_set.add(e)
else:
realtime_results_set.add(rtr)
self.realtime_data = Notifier({x: [] for x in realtime_results_set})
self.data = Notifier(dict())
self.init_rt_results(self.rtr_description)
self.realtime_data.publish = lambda notifier, data: \
self.update_rt_results(data)
def _request(self, name):
try:
return self.realtime_data[name]
except KeyError:
try:
return self.data[name]
except KeyError:
self.data[name] = []
return self.data[name]
def request(self, name):
r = self._request(name)
r.kernel_attr_init = False
def get_last_rid():
r = -1
try:
day_folders = os.listdir("results")
except:
return r
day_folders = filter(lambda x: re.fullmatch('\d\d\d\d-\d\d-\d\d', x),
day_folders)
for df in day_folders:
day_path = os.path.join("results", df)
try:
minute_folders = os.listdir(day_path)
except:
continue
minute_folders = filter(lambda x: re.fullmatch('\d\d-\d\d', x),
minute_folders)
for mf in minute_folders:
minute_path = os.path.join(day_path, mf)
try:
h5files = os.listdir(minute_path)
except:
continue
for x in h5files:
m = re.fullmatch('(\d\d\d\d\d\d\d\d\d)-.*\.h5', x)
rid = int(m.group(1))
if rid > r:
r = rid
return r
def set(self, name, value):
if name in self.realtime_data.read:
self.realtime_data[name] = value
_type_to_hdf5 = {
int: h5py.h5t.STD_I64BE,
float: h5py.h5t.IEEE_F64BE
}
def result_dict_to_hdf5(f, rd):
for name, data in rd.items():
if isinstance(data, list):
el_ty = type(data[0])
for d in data:
if type(d) != el_ty:
raise TypeError("All list elements must have the same"
" type for HDF5 output")
try:
el_ty_h5 = _type_to_hdf5[el_ty]
except KeyError:
raise TypeError("List element type {} is not supported for"
" HDF5 output".format(el_ty))
dataset = f.create_dataset(name, (len(data), ), el_ty_h5)
dataset[:] = data
elif isinstance(data, numpy.ndarray):
f.create_dataset(name, data=data)
else:
self.data[name] = value
ty = type(data)
try:
ty_h5 = _type_to_hdf5[ty]
except KeyError:
raise TypeError("Type {} is not supported for HDF5 output"
.format(ty))
dataset = f.create_dataset(name, (), ty_h5)
dataset[()] = data
class ResultDB:
def __init__(self):
self.rt = Notifier(dict())
self.nrt = dict()
def get(self, key):
try:
return self.nrt[key]
except KeyError:
return self.rt[key].read
def write_hdf5(self, f):
result_dict_to_hdf5(f, self.realtime_data.read)
result_dict_to_hdf5(f, self.data.read)
result_dict_to_hdf5(f, self.rt.read)
result_dict_to_hdf5(f, self.nrt)
def create_device(desc, dbh):
def create_device(desc, dmgr):
ty = desc["type"]
if ty == "local":
module = importlib.import_module(desc["module"])
device_class = getattr(module, desc["class"])
return device_class(dbh, **desc["arguments"])
return device_class(dmgr, **desc["arguments"])
elif ty == "controller":
if desc["best_effort"]:
cl = BestEffortClient
@ -81,30 +119,26 @@ def create_device(desc, dbh):
raise ValueError("Unsupported type in device DB: " + ty)
class DBHub:
"""Connects device, parameter and result databases to experiment.
Handle device driver creation and destruction.
"""
def __init__(self, ddb, pdb, rdb, read_only=False):
class DeviceManager:
"""Handles creation and destruction of local device drivers and controller
RPC clients."""
def __init__(self, ddb, virtual_devices=dict()):
self.ddb = ddb
self.virtual_devices = virtual_devices
self.active_devices = OrderedDict()
self.get_parameter = pdb.request
if not read_only:
self.set_parameter = pdb.set
self.add_rt_results = rdb.add_rt_results
self.get_result = rdb.request
self.set_result = rdb.set
def get_device(self, name):
def get(self, name):
"""Get the device driver or controller client corresponding to a
device database entry."""
if name in self.virtual_devices:
return self.virtual_devices[name]
if name in self.active_devices:
return self.active_devices[name]
else:
desc = self.ddb.request(name)
desc = self.ddb.get(name)
while isinstance(desc, str):
# alias
desc = self.ddb.request(desc)
desc = self.ddb.get(desc)
dev = create_device(desc, self)
self.active_devices[name] = dev
return dev

View File

@ -3,9 +3,8 @@ import time
from artiq.protocols import pyon
from artiq.tools import file_import
from artiq.master.worker_db import DBHub, ResultDB
from artiq.master.results import get_hdf5_output
from artiq.language.experiment import is_experiment
from artiq.master.worker_db import DeviceManager, ResultDB, get_hdf5_output
from artiq.language.environment import is_experiment
from artiq.language.core import set_watchdog_factory
@ -46,15 +45,14 @@ def make_parent_action(action, argnames, exception=ParentActionError):
class ParentDDB:
request = make_parent_action("req_device", "name", KeyError)
get = make_parent_action("get_device", "name", KeyError)
class ParentPDB:
request = make_parent_action("req_parameter", "name", KeyError)
get = make_parent_action("get_parameter", "name", KeyError)
set = make_parent_action("set_parameter", "name value")
init_rt_results = make_parent_action("init_rt_results", "description")
update_rt_results = make_parent_action("update_rt_results", "mod")
@ -82,7 +80,7 @@ class Scheduler:
"pipeline_name expid priority due_date flush"))
cancel = staticmethod(make_parent_action("scheduler_cancel", "rid"))
def __init__(self, pipeline_name, expid, priority):
def set_run_info(self, pipeline_name, expid, priority):
self.pipeline_name = pipeline_name
self.expid = expid
self.priority = priority
@ -110,8 +108,10 @@ def main():
exp = None
exp_inst = None
rdb = ResultDB(init_rt_results, update_rt_results)
dbh = DBHub(ParentDDB, ParentPDB, rdb)
dmgr = DeviceManager(ParentDDB,
virtual_devices={"scheduler": Scheduler()})
rdb = ResultDB()
rdb.rt.publish = update_rt_results
try:
while True:
@ -120,16 +120,12 @@ def main():
if action == "build":
start_time = time.localtime()
rid = obj["rid"]
pipeline_name = obj["pipeline_name"]
expid = obj["expid"]
priority = obj["priority"]
exp = get_exp(expid["file"], expid["experiment"])
exp_inst = exp(dbh,
scheduler=Scheduler(pipeline_name,
expid,
priority),
**expid["arguments"])
rdb.build()
dmgr.virtual_devices["scheduler"].set_run_info(
obj["pipeline_name"], expid, obj["priority"])
exp_inst = exp(dmgr, ParentPDB, rdb,
**expid["arguments"])
put_object({"action": "completed"})
elif action == "prepare":
exp_inst.prepare()
@ -150,7 +146,7 @@ def main():
elif action == "terminate":
break
finally:
dbh.close_devices()
dmgr.close_devices()
if __name__ == "__main__":
main()

View File

@ -20,7 +20,7 @@ class FlatFileDB:
def save(self):
pyon.store_file(self.filename, self.data.read)
def request(self, name):
def get(self, name):
return self.data.read[name]
def set(self, name, value):

View File

@ -1,13 +1,12 @@
from random import Random
from artiq.language.core import delay, kernel
from artiq.language.db import *
from artiq.language import units
from artiq.sim import time
class Core(AutoDB):
def build(self):
class Core:
def __init__(self, dmgr):
self.ref_period = 1
self._level = 0
@ -20,12 +19,11 @@ class Core(AutoDB):
return r
class Input(AutoDB):
class DBKeys:
core = Device()
name = Argument()
class Input:
def __init__(self, dmgr, name):
self.core = dmgr.get("core")
self.name = name
def build(self):
self.prng = Random()
@kernel
@ -42,10 +40,10 @@ class Input(AutoDB):
return result
class WaveOutput(AutoDB):
class DBKeys:
core = Device()
name = Argument()
class WaveOutput:
def __init__(self, dmgr, name):
self.core = dmgr.get("core")
self.name = name
@kernel
def pulse(self, frequency, duration):
@ -53,10 +51,10 @@ class WaveOutput(AutoDB):
delay(duration)
class VoltageOutput(AutoDB):
class DBKeys:
core = Device()
name = Argument()
class VoltageOutput:
def __init__(self, dmgr, name):
self.core = dmgr.get("core")
self.name = name
@kernel
def set(self, value):

View File

@ -6,11 +6,10 @@ from artiq.coredevice.runtime_exceptions import RTIOUnderflow
from artiq.coredevice import runtime_exceptions
class RTT(Experiment, AutoDB):
class DBKeys:
core = Device()
ttl_inout = Device()
rtt = Result()
class RTT(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_inout")
@kernel
def run(self):
@ -24,15 +23,15 @@ class RTT(Experiment, AutoDB):
delay(1*us)
t0 = now_mu()
self.ttl_inout.pulse(1*us)
self.rtt = mu_to_seconds(self.ttl_inout.timestamp() - t0)
self.set_result("rtt",
mu_to_seconds(self.ttl_inout.timestamp() - t0))
class Loopback(Experiment, AutoDB):
class DBKeys:
core = Device()
loop_in = Device()
loop_out = Device()
rtt = Result()
class Loopback(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("loop_in")
self.attr_device("loop_out")
@kernel
def run(self):
@ -44,15 +43,15 @@ class Loopback(Experiment, AutoDB):
delay(1*us)
t0 = now_mu()
self.loop_out.pulse(1*us)
self.rtt = mu_to_seconds(self.loop_in.timestamp() - t0)
self.set_result("rtt",
mu_to_seconds(self.loop_in.timestamp() - t0))
class ClockGeneratorLoopback(Experiment, AutoDB):
class DBKeys:
core = Device()
loop_clock_in = Device()
loop_clock_out = Device()
count = Result()
class ClockGeneratorLoopback(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("loop_clock_in")
self.attr_device("loop_clock_out")
@kernel
def run(self):
@ -64,14 +63,14 @@ class ClockGeneratorLoopback(Experiment, AutoDB):
with sequential:
delay(200*ns)
self.loop_clock_out.set(1*MHz)
self.count = self.loop_clock_in.count()
self.set_result("count",
self.loop_clock_in.count())
class PulseRate(Experiment, AutoDB):
class DBKeys:
core = Device()
loop_out = Device()
pulse_rate = Result()
class PulseRate(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("loop_out")
@kernel
def run(self):
@ -85,13 +84,14 @@ class PulseRate(Experiment, AutoDB):
dt += 1
self.core.break_realtime()
else:
self.pulse_rate = mu_to_seconds(2*dt)
self.set_result("pulse_rate",
mu_to_seconds(2*dt))
break
class Watchdog(Experiment, AutoDB):
class DBKeys:
core = Device()
class Watchdog(EnvExperiment):
def build(self):
self.attr_device("core")
@kernel
def run(self):
@ -100,14 +100,11 @@ class Watchdog(Experiment, AutoDB):
pass
class LoopbackCount(Experiment, AutoDB):
class DBKeys:
core = Device()
ttl_inout = Device()
npulses = Argument()
def report(self, n):
self.result = n
class LoopbackCount(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_inout")
self.attr_argument("npulses")
@kernel
def run(self):
@ -119,13 +116,14 @@ class LoopbackCount(Experiment, AutoDB):
for i in range(self.npulses):
delay(25*ns)
self.ttl_inout.pulse(25*ns)
self.report(self.ttl_inout.count())
self.set_result("count",
self.ttl_inout.count())
class Underflow(Experiment, AutoDB):
class DBKeys:
core = Device()
ttl_out = Device()
class Underflow(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_out")
@kernel
def run(self):
@ -134,10 +132,10 @@ class Underflow(Experiment, AutoDB):
self.ttl_out.pulse(25*ns)
class SequenceError(Experiment, AutoDB):
class DBKeys:
core = Device()
ttl_out = Device()
class SequenceError(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_out")
@kernel
def run(self):
@ -147,21 +145,18 @@ class SequenceError(Experiment, AutoDB):
self.ttl_out.pulse(25*us)
class TimeKeepsRunning(Experiment, AutoDB):
class DBKeys:
core = Device()
time_at_start = Result()
class TimeKeepsRunning(EnvExperiment):
def build(self):
self.attr_device("core")
@kernel
def run(self):
self.time_at_start = now_mu()
self.set_result("time_at_start", now_mu())
class Handover(Experiment, AutoDB):
class DBKeys:
core = Device()
t1 = Result()
t2 = Result()
class Handover(EnvExperiment):
def build(self):
self.attr_device("core")
@kernel
def get_now(self):
@ -169,34 +164,34 @@ class Handover(Experiment, AutoDB):
def run(self):
self.get_now()
self.t1 = self.time_at_start
self.set_result("t1", self.time_at_start)
self.get_now()
self.t2 = self.time_at_start
self.set_result("t2", self.time_at_start)
class CoredeviceTest(ExperimentCase):
def test_rtt(self):
self.execute(RTT)
rtt = self.dbh.get_result("rtt").read
rtt = self.rdb.get("rtt")
print(rtt)
self.assertGreater(rtt, 0*ns)
self.assertLess(rtt, 100*ns)
def test_loopback(self):
self.execute(Loopback)
rtt = self.dbh.get_result("rtt").read
rtt = self.rdb.get("rtt")
print(rtt)
self.assertGreater(rtt, 0*ns)
self.assertLess(rtt, 50*ns)
def test_clock_generator_loopback(self):
self.execute(ClockGeneratorLoopback)
count = self.dbh.get_result("count").read
count = self.rdb.get("count")
self.assertEqual(count, 10)
def test_pulse_rate(self):
self.execute(PulseRate)
rate = self.dbh.get_result("pulse_rate").read
rate = self.rdb.get("pulse_rate")
print(rate)
self.assertGreater(rate, 100*ns)
self.assertLess(rate, 2500*ns)
@ -204,7 +199,8 @@ class CoredeviceTest(ExperimentCase):
def test_loopback_count(self):
npulses = 2
r = self.execute(LoopbackCount, npulses=npulses)
self.assertEqual(r.result, npulses)
count = self.rdb.get("count")
self.assertEqual(count, npulses)
def test_underflow(self):
with self.assertRaises(runtime_exceptions.RTIOUnderflow):
@ -221,26 +217,23 @@ class CoredeviceTest(ExperimentCase):
def test_time_keeps_running(self):
self.execute(TimeKeepsRunning)
t1 = self.dbh.get_result("time_at_start").read
t1 = self.rdb.get("time_at_start")
self.execute(TimeKeepsRunning)
t2 = self.dbh.get_result("time_at_start").read
dead_time = mu_to_seconds(t2 - t1, self.dbh.get_device("core"))
t2 = self.rdb.get("time_at_start")
dead_time = mu_to_seconds(t2 - t1, self.dmgr.get("core"))
print(dead_time)
self.assertGreater(dead_time, 1*ms)
self.assertLess(dead_time, 300*ms)
def test_handover(self):
self.execute(Handover)
self.assertEqual(self.dbh.get_result("t1").read,
self.dbh.get_result("t2").read)
self.assertEqual(self.rdb.get("t1"), self.rdb.get("t2"))
class RPCTiming(Experiment, AutoDB):
class DBKeys:
core = Device()
repeats = Argument(100)
rpc_time_mean = Result()
rpc_time_stddev = Result()
class RPCTiming(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("repeats", FreeValue(100))
def nop(self, x):
pass
@ -257,14 +250,14 @@ class RPCTiming(Experiment, AutoDB):
def run(self):
self.bench()
mean = sum(self.ts)/self.repeats
self.rpc_time_stddev = sqrt(
sum([(t - mean)**2 for t in self.ts])/self.repeats)*s
self.rpc_time_mean = mean*s
self.set_result("rpc_time_stddev", sqrt(
sum([(t - mean)**2 for t in self.ts])/self.repeats))
self.set_result("rpc_time_mean", mean)
class RPCTest(ExperimentCase):
def test_rpc_timing(self):
self.execute(RPCTiming)
self.assertGreater(self.dbh.get_result("rpc_time_mean").read, 100*ns)
self.assertLess(self.dbh.get_result("rpc_time_mean").read, 15*ms)
self.assertLess(self.dbh.get_result("rpc_time_stddev").read, 1*ms)
self.assertGreater(self.rdb.get("rpc_time_mean"), 100*ns)
self.assertLess(self.rdb.get("rpc_time_mean"), 15*ms)
self.assertLess(self.rdb.get("rpc_time_stddev"), 1*ms)

View File

@ -6,18 +6,19 @@ from artiq.sim import devices as sim_devices
from artiq.test.hardware_testbench import ExperimentCase
def _run_on_host(k_class, **parameters):
coredev = sim_devices.Core()
k_inst = k_class(core=coredev, **parameters)
def _run_on_host(k_class, **arguments):
dmgr = dict()
dmgr["core"] = sim_devices.Core(dmgr)
k_inst = k_class(dmgr, **arguments)
k_inst.run()
return k_inst
class _Primes(Experiment, AutoDB):
class DBKeys:
core = Device()
output_list = Argument()
maximum = Argument()
class _Primes(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("output_list")
self.attr_argument("maximum")
@kernel
def run(self):
@ -33,11 +34,10 @@ class _Primes(Experiment, AutoDB):
self.output_list.append(x)
class _Misc(Experiment, AutoDB):
class DBKeys:
core = Device()
class _Misc(EnvExperiment):
def build(self):
self.attr_device("core")
self.input = 84
self.al = [1, 2, 3, 4, 5]
self.list_copy_in = [2*Hz, 10*MHz]
@ -52,11 +52,11 @@ class _Misc(Experiment, AutoDB):
self.list_copy_out = self.list_copy_in
class _PulseLogger(Experiment, AutoDB):
class DBKeys:
core = Device()
output_list = Argument()
name = Argument()
class _PulseLogger(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("output_list")
self.attr_argument("name")
def _append(self, t, l, f):
if not hasattr(self, "first_timestamp"):
@ -79,14 +79,13 @@ class _PulseLogger(Experiment, AutoDB):
self.off(now_mu())
class _Pulses(Experiment, AutoDB):
class DBKeys:
core = Device()
output_list = Argument()
class _Pulses(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("output_list")
for name in "a", "b", "c", "d":
pl = _PulseLogger(core=self.core,
pl = _PulseLogger(*self.dbs(),
output_list=self.output_list,
name=name)
setattr(self, name, pl)
@ -107,10 +106,10 @@ class _MyException(Exception):
pass
class _Exceptions(Experiment, AutoDB):
class DBKeys:
core = Device()
trace = Argument()
class _Exceptions(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("trace")
@kernel
def run(self):
@ -151,12 +150,11 @@ class _Exceptions(Experiment, AutoDB):
self.trace.append(104)
class _RPCExceptions(Experiment, AutoDB):
class DBKeys:
core = Device()
catch = Argument(False)
class _RPCExceptions(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("catch", FreeValue(False))
self.success = False
def exception_raiser(self):

View File

@ -5,9 +5,8 @@ import logging
from artiq.language import *
from artiq.protocols.file_db import FlatFileDB
from artiq.master.worker_db import DBHub, ResultDB
from artiq.frontend.artiq_run import (
DummyScheduler, DummyWatchdog, SimpleParamLogger)
from artiq.master.worker_db import DeviceManager, ResultDB
from artiq.frontend.artiq_run import DummyScheduler
artiq_root = os.getenv("ARTIQ_ROOT")
@ -33,9 +32,10 @@ def get_from_ddb(*path, default="skip"):
class ExperimentCase(unittest.TestCase):
def setUp(self):
self.ddb = FlatFileDB(os.path.join(artiq_root, "ddb.pyon"))
self.dmgr = DeviceManager(self.ddb,
virtual_devices={"scheduler": DummyScheduler()})
self.pdb = FlatFileDB(os.path.join(artiq_root, "pdb.pyon"))
self.rdb = ResultDB(lambda description: None, lambda mod: None)
self.dbh = DBHub(self.ddb, self.pdb, self.rdb)
self.rdb = ResultDB()
def execute(self, cls, **kwargs):
expid = {
@ -43,16 +43,16 @@ class ExperimentCase(unittest.TestCase):
"experiment": cls.__name__,
"arguments": kwargs
}
sched = DummyScheduler(expid)
self.dmgr.virtual_devices["scheduler"].expid = expid
try:
try:
exp = cls(self.dbh, scheduler=sched, **kwargs)
exp = cls(self.dmgr, self.pdb, self.rdb, **kwargs)
except KeyError as e:
# skip if ddb does not match requirements
raise unittest.SkipTest(*e.args)
self.rdb.build()
exp.prepare()
exp.run()
exp.analyze()
return exp
finally:
self.dbh.close_devices()
self.dmgr.close_devices()

View File

@ -7,12 +7,18 @@ from artiq import *
from artiq.master.scheduler import Scheduler
class EmptyExperiment(Experiment, AutoDB):
class EmptyExperiment(EnvExperiment):
def build(self):
pass
def run(self):
pass
class BackgroundExperiment(Experiment, AutoDB):
class BackgroundExperiment(EnvExperiment):
def build(self):
self.attr_device("scheduler")
def run(self):
while True:
self.scheduler.pause()

View File

@ -36,7 +36,9 @@ def run():
class OptimizeCase(unittest.TestCase):
def test_optimize(self):
coredev = core.Core(comm=comm_dummy.Comm(), ref_period=1*ns)
dmgr = dict()
dmgr["comm"] = comm_dummy.Comm(dmgr)
coredev = core.Core(dmgr, ref_period=1*ns)
func_def = ast.parse(optimize_in).body[0]
coredev.transform_stack(func_def, dict(), dict())
self.assertEqual(unparse(func_def), optimize_out)

View File

@ -7,20 +7,26 @@ from artiq import *
from artiq.master.worker import *
class WatchdogNoTimeout(Experiment, AutoDB):
class WatchdogNoTimeout(EnvExperiment):
def build(self):
pass
def run(self):
for i in range(10):
with watchdog(0.5*s):
sleep(0.1)
class WatchdogTimeout(Experiment, AutoDB):
class WatchdogTimeout(EnvExperiment):
def build(self):
pass
def run(self):
with watchdog(0.1*s):
sleep(100.0)
class WatchdogTimeoutInBuild(Experiment, AutoDB):
class WatchdogTimeoutInBuild(EnvExperiment):
def build(self):
with watchdog(0.1*s):
sleep(100.0)

View File

@ -7,7 +7,7 @@ import asyncio
import time
import os.path
from artiq.language.experiment import is_experiment
from artiq.language.environment import is_experiment
from artiq.protocols import pyon

View File

@ -118,7 +118,7 @@ pygments_style = 'sphinx'
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'classic'
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the

View File

@ -9,10 +9,10 @@ The most commonly used features from those modules can be imported with ``from a
.. automodule:: artiq.language.core
:members:
:mod:`artiq.language.db` module
-------------------------------
:mod:`artiq.language.environment` module
----------------------------------------
.. automodule:: artiq.language.db
.. automodule:: artiq.language.environment
:members:
:mod:`artiq.language.units` module

View File

@ -103,7 +103,7 @@ Run it as before, while the controller is running. You should see the message ap
$ ./hello_controller.py
message: Hello World!
When using the driver in an experiment, for simple cases the ``Client`` instance can be returned by the :class:`artiq.language.db.AutoDB` mechanism and used normally as a device.
When using the driver in an experiment, the ``Client`` instance can be returned by the environment mechanism (via the ``get_device`` and ``attr_device`` methods of :class:`artiq.language.environment.HasEnvironment`) and used normally as a device.
:warning: RPC servers operate on copies of objects provided by the client, and modifications to mutable types are not written back. For example, if the client passes a list as a parameter of an RPC method, and that method ``append()s`` an element to the list, the element is not appended to the client's list.

View File

@ -7,30 +7,19 @@ How do I ...
prevent my first RTIO command from causing an underflow?
--------------------------------------------------------
The RTIO timestamp counter starts counting at zero at the beginning of the first kernel run on the core device. The first RTIO event is programmed with a small timestamp above zero. If the kernel needs more time than this timestamp to produce the event, an underflow will occur. You can prevent it by calling ``break_realtime`` just before programming the first event.
override the `sysclk` frequency of just one DDS?
------------------------------------------------
Override the parameter using an argument in the DDB.
The first RTIO event is programmed with a small timestamp above the value of the timecounter at the start of the experiment. If the kernel needs more time than this timestamp to produce the event, an underflow will occur. You can prevent it by calling ``break_realtime`` just before programming the first event, or by adding a sufficient delay.
organize parameters in folders?
-------------------------------
Use GUI auto-completion and filtering.
Names need to be unique.
Folders are not supported yet, use GUI filtering for now. Names need to be unique.
enforce functional dependencies between parameters?
---------------------------------------------------
If you want to override a parameter ``b`` in the PDB to be ``b = 2*a``,
use wrapper experiments, overriding parameters by passing them to the
experiment's constructor.
get rid of ``DBKeys``?
----------------------
``DBKeys`` references keys in PDB, DDB and RDB.
experiment's constructor (``param_override`` argument).
write a generator feeding a kernel feeding an analyze function?
---------------------------------------------------------------
@ -94,4 +83,4 @@ The preferred way to specify a serial device is to make use of the ``hwgrep://``
URL: it allows to select the serial device by its USB vendor ID, product
ID and/or serial number. Those never change, unlike the device file name.
See the :ref:`TDC001 documentation <tdc001-controller-usage-example>` for an example of ``hwgrep://`` usage.
See the :ref:`TDC001 documentation <tdc001-controller-usage-example>` for an example of ``hwgrep://`` usage.

View File

@ -11,23 +11,23 @@ As a very first step, we will turn on a LED on the core device. Create a file ``
from artiq import *
class LED(Experiment, AutoDB):
class DBKeys:
core = Device()
led = Device()
class LED(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("led")
@kernel
def run(self):
self.led.on()
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.db.AutoDB`. ``AutoDB`` is part of the mechanism that attaches device drivers and retrieves parameters according to a database. Our ``DBKeys`` class lists the devices (and parameters) that ``LED`` needs in order to operate, and the names of the attributes (e.g. ``led``) are used to search the database. ``AutoDB`` replaces them with the actual device drivers (and parameter values). Finally, the ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host).
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``attr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``attr_device`` like any other.
Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run.py``). You can open the database files using a text editor - their contents are in a human-readable format.
Run your code using ``artiq_run.py``, which is part of the ARTIQ front-end tools: ::
Run your code using ``artiq_run``, which is part of the ARTIQ front-end tools: ::
$ artiq_run.py led.py
$ artiq_run led.py
The LED of the device should turn on. Congratulations! You have a basic ARTIQ system up and running.
@ -41,10 +41,10 @@ Modify the code as follows: ::
def input_led_state():
return int(input("Enter desired LED state: "))
class LED(Experiment, AutoDB):
class DBKeys:
core = Device()
led = Device()
class LED(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("led")
@kernel
def run(self):
@ -58,9 +58,9 @@ Modify the code as follows: ::
You can then turn the LED off and on by entering 0 or 1 at the prompt that appears: ::
$ artiq_run.py led.py
$ artiq_run led.py
Enter desired LED state: 1
$ artiq_run.py led.py
$ artiq_run led.py
Enter desired LED state: 0
What happens is the ARTIQ compiler notices that the ``input_led_state`` function does not have a ``@kernel`` decorator and thus must be executed on the host. When the core device calls it, it sends a request to the host to execute it. The host displays the prompt, collects user input, and sends the result back to the core device, which sets the LED state accordingly.
@ -90,10 +90,10 @@ Create a new file ``rtio.py`` containing the following: ::
from artiq import *
class Tutorial(Experiment, AutoDB):
class DBKeys:
core = Device()
ttl0 = Device()
class Tutorial(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl0")
@kernel
def run(self):
@ -113,10 +113,10 @@ Try reducing the period of the generated waveform until the CPU cannot keep up w
def print_underflow():
print("RTIO underflow occured")
class Tutorial(Experiment, AutoDB):
class DBKeys:
core = Device()
ttl0 = Device()
class Tutorial(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl0")
@kernel
def run(self):

View File

@ -1,19 +1,19 @@
from artiq import *
class DDSTest(Experiment, AutoDB):
class DDSTest(EnvExperiment):
"""DDS test"""
class DBKeys:
core = Device()
dds_bus = Device()
dds0 = Device()
dds1 = Device()
dds2 = Device()
ttl0 = Device()
ttl1 = Device()
ttl2 = Device()
led = Device()
def build(self):
self.attr_device("core")
self.attr_device("dds_bus")
self.attr_device("dds0")
self.attr_device("dds1")
self.attr_device("dds2")
self.attr_device("ttl0")
self.attr_device("ttl1")
self.attr_device("ttl2")
self.attr_device("led")
@kernel
def run(self):

View File

@ -23,25 +23,21 @@ def model_numpy(xdata, F0):
return r
class FloppingF(Experiment, AutoDB):
class FloppingF(EnvExperiment):
"""Flopping F simulation"""
class DBKeys:
npoints = Argument(100)
min_freq = Argument(1000)
max_freq = Argument(2000)
def build(self):
self.attr_argument("npoints", FreeValue(100))
self.attr_argument("min_freq", FreeValue(1000))
self.attr_argument("max_freq", FreeValue(2000))
F0 = Argument(1500)
noise_amplitude = Argument(0.1)
self.attr_argument("F0", FreeValue(1500))
self.attr_argument("noise_amplitude", FreeValue(0.1))
frequency = Result()
brightness = Result()
self.frequency = self.set_result("flopping_f_frequency", [], True)
self.brightness = self.set_result("flopping_f_brightness", [], True)
flopping_freq = Parameter()
realtime_results = {
("frequency", "brightness"): "xy"
}
self.attr_device("scheduler")
def run(self):
for i in range(self.npoints):
@ -56,7 +52,7 @@ class FloppingF(Experiment, AutoDB):
def analyze(self):
popt, pcov = curve_fit(model_numpy,
self.frequency.read, self.brightness.read,
p0=[self.flopping_freq])
p0=[self.get_parameter("flopping_freq")])
perr = np.sqrt(np.diag(pcov))
if perr < 0.1:
self.flopping_freq = float(popt)
self.set_parameter("flopping_freq", float(popt))

View File

@ -1,10 +1,10 @@
from artiq import *
class Handover(Experiment, AutoDB):
class DBKeys:
core = Device()
led = Device()
class Handover(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("led")
@kernel
def blink_once(self):

View File

@ -3,11 +3,11 @@ import sys
from artiq import *
class Mandelbrot(Experiment, AutoDB):
class Mandelbrot(EnvExperiment):
"""Mandelbrot set demo"""
class DBKeys:
core = Device()
def build(self):
self.attr_device("core")
def col(self, i):
sys.stdout.write(" .,-:;i+hHM$*#@ "[i])

View File

@ -1,29 +1,24 @@
from artiq import *
class PhotonHistogram(Experiment, AutoDB):
class PhotonHistogram(EnvExperiment):
"""Photon histogram"""
class DBKeys:
core = Device()
dds_bus = Device()
bd_dds = Device()
bd_sw = Device()
bdd_dds = Device()
bdd_sw = Device()
pmt = Device()
def build(self):
self.attr_device("core")
self.attr_device("dds_bus")
self.attr_device("bd_dds")
self.attr_device("bd_sw")
self.attr_device("bdd_dds")
self.attr_device("bdd_sw")
self.attr_device("pmt")
nbins = Argument(100)
repeats = Argument(100)
self.attr_argument("nbins", FreeValue(100))
self.attr_argument("repeats", FreeValue(100))
cool_f = Parameter(230*MHz)
detect_f = Parameter(220*MHz)
detect_t = Parameter(100*us)
ion_present = Parameter(True)
hist = Result()
total = Result()
self.attr_parameter("cool_f", 230*MHz)
self.attr_parameter("detect_f", 220*MHz)
self.attr_parameter("detect_t", 100*us)
@kernel
def program_cooling(self):
@ -65,9 +60,8 @@ class PhotonHistogram(Experiment, AutoDB):
hist[n] += 1
total += n
self.hist = hist
self.total = total
self.ion_present = total > 5*self.repeats
self.set_result("cooling_photon_histogram", hist)
self.set_parameter("ion_present", total > 5*self.repeats)
if __name__ == "__main__":

View File

@ -10,23 +10,22 @@ transport_data = dict(
# 4 devices, 3 board each, 3 dacs each
)
class Transport(Experiment, AutoDB):
class Transport(EnvExperiment):
"""Transport"""
class DBKeys:
core = Device()
bd = Device()
bdd = Device()
pmt = Device()
electrodes = Device()
def build(self):
self.attr_device("core")
self.attr_device("bd")
self.attr_device("bdd")
self.attr_device("pmt")
self.attr_device("electrodes")
wait_at_stop = Parameter(100*us)
speed = Parameter(1.5)
self.attr_argument("wait_at_stop", FreeValue(100*us))
self.attr_argument("speed", FreeValue(1.5))
self.attr_argument("repeats", FreeValue(100))
self.attr_argument("nbins", FreeValue(100))
repeats = Argument(100)
nbins = Argument(100)
def prepare(self, stop):
def calc_waveforms(self, stop):
t = transport_data["t"][:stop]*self.speed
u = transport_data["u"][:stop]
@ -89,9 +88,9 @@ class Transport(Experiment, AutoDB):
def scan(self, stops):
for s in stops:
self.histogram = []
# non-kernel, calculate waveforms, build frames
# non-kernel, build frames
# could also be rpc'ed from repeat()
self.prepare(s)
self.calc_waveforms(s)
# kernel part
self.repeat()
# live update 2d plot with current self.histogram

View File

@ -1,20 +1,20 @@
from artiq import *
class AluminumSpectroscopy(Experiment, AutoDB):
class AluminumSpectroscopy(EnvExperiment):
"""Aluminum spectroscopy (simulation)"""
class DBKeys:
core = Device()
mains_sync = Device()
laser_cooling = Device()
spectroscopy = Device()
spectroscopy_b = Device()
state_detection = Device()
pmt = Device()
spectroscopy_freq = Parameter(432*MHz)
photon_limit_low = Argument(10)
photon_limit_high = Argument(15)
def build(self):
self.attr_device("core")
self.attr_device("mains_sync")
self.attr_device("laser_cooling")
self.attr_device("spectroscopy")
self.attr_device("spectroscopy_b")
self.attr_device("state_detection")
self.attr_device("pmt")
self.attr_parameter("spectroscopy_freq", 432*MHz)
self.attr_argument("photon_limit_low", FreeValue(10))
self.attr_argument("photon_limit_high", FreeValue(15))
@kernel
def run(self):

View File

@ -1,15 +1,13 @@
from artiq import *
class SimpleSimulation(Experiment, AutoDB):
class SimpleSimulation(EnvExperiment):
"""Simple simulation"""
class DBKeys:
core = Device()
a = Device()
b = Device()
c = Device()
d = Device()
def build(self):
self.attr_device("core")
for wo in "abcd":
self.attr_device(wo)
@kernel
def run(self):
@ -23,16 +21,13 @@ class SimpleSimulation(Experiment, AutoDB):
def main():
from artiq.sim import devices as sd
from artiq.sim import devices
core = sd.Core()
exp = SimpleSimulation(
core=core,
a=sd.WaveOutput(core=core, name="a"),
b=sd.WaveOutput(core=core, name="b"),
c=sd.WaveOutput(core=core, name="c"),
d=sd.WaveOutput(core=core, name="d"),
)
dmgr = dict()
dmgr["core"] = devices.Core(dmgr)
for wo in "abcd":
dmgr[wo] = devices.WaveOutput(dmgr, wo)
exp = SimpleSimulation(dmgr)
exp.run()
if __name__ == "__main__":