Merge branch 'master' into new-py2llvm

This commit is contained in:
whitequark 2015-10-13 14:57:19 +03:00
commit bd5b324fc2
96 changed files with 2282 additions and 1167 deletions

1
.gitignore vendored
View File

@ -12,6 +12,7 @@ doc/manual/_build
/dist
/*.egg-info
/.coverage
artiq/test/results
examples/master/results
Output/
/lit-test/libartiq_support/libartiq_support.so

View File

@ -1,33 +1,39 @@
language: python
python:
- '3.4'
- '3.5'
branches:
only:
- master
sudo: false
env:
global:
- BUILD_SOC=1
- secure: "DUk/Ihg8KbbzEgPF0qrHqlxU8e8eET9i/BtzNvFddIGX4HP/P2qz0nk3cVkmjuWhqJXSbC22RdKME9qqPzw6fJwJ6dpJ3OR6dDmSd7rewavq+niwxu52PVa+yK8mL4yf1terM7QQ5tIRf+yUL9qGKrZ2xyvEuRit6d4cFep43Ws="
matrix:
- BUILD_SOC=0
- BUILD_SOC=1
before_install:
- mkdir -p $HOME/.mlabs
- if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=0; fi
- if [ $BUILD_SOC -ne 0 ]; then ./.travis/get-xilinx.sh; fi
- . ./.travis/get-toolchain.sh
- . ./.travis/get-anaconda.sh
- source $HOME/miniconda/bin/activate py34
- echo "BUILD_SOC=$BUILD_SOC" >> $HOME/.mlabs/build_settings.sh
- source $HOME/miniconda/bin/activate py35
- conda install -q pip coverage anaconda-client migen cython
- pip install coveralls
install:
- conda build conda/artiq
- conda build --python 3.5 conda/artiq
- conda install -q artiq --use-local
script:
- coverage run --source=artiq setup.py test
- make -C doc/manual html
after_success:
- anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password
- if [ "$TRAVIS_BRANCH" == "master" ]; then anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2; fi
- anaconda -q logout
- |
if [ "$TRAVIS_BRANCH" == "master" -a $BUILD_SOC -eq 1 ]; then
anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password
anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2
anaconda -q logout
fi
- coveralls
notifications:
email:

View File

@ -9,5 +9,5 @@ conda config --set always_yes yes --set changeps1 no
conda update -q conda
conda info -a
conda install conda-build jinja2
conda create -q -n py34 python=$TRAVIS_PYTHON_VERSION
conda create -q -n py35 python=$TRAVIS_PYTHON_VERSION
conda config --add channels https://conda.anaconda.org/m-labs/channel/dev

View File

@ -19,11 +19,10 @@ do
done
export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:$PATH
export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=$PWD/packages/usr/local/llvm-or1k/lib:$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH
echo "export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh
echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:\$PATH" >> $HOME/.mlabs/build_settings.sh
echo "export LD_LIBRARY_PATH=$PWD/packages/usr/local/llvm-or1k/lib:$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh
or1k-linux-as --version
llc --version
clang --version

View File

@ -30,7 +30,7 @@ git clone https://github.com/fallen/impersonate_macaddress
make -C impersonate_macaddress
# Tell mibuild where Xilinx toolchains are installed
# and feed it the mac address corresponding to the license
cat > $HOME/.mlabs/build_settings.sh << EOF
cat >> $HOME/.mlabs/build_settings.sh << EOF
MISOC_EXTRA_VIVADO_CMDLINE="-Ob vivado_path $HOME/Xilinx/Vivado"
MISOC_EXTRA_ISE_CMDLINE="-Ob ise_path $HOME/opt/Xilinx/"
export MACADDR=$macaddress

View File

@ -1,6 +1,6 @@
import sys, os
from artiq.protocols.file_db import FlatFileDB
from artiq.master.databases import DeviceDB
from artiq.master.worker_db import DeviceManager
from artiq.coredevice.core import Core, CompileError
@ -17,10 +17,10 @@ def main():
testcase_vars = {'__name__': 'testbench'}
exec(testcase_code, testcase_vars)
ddb_path = os.path.join(os.path.dirname(sys.argv[1]), "ddb.pyon")
ddb_path = os.path.join(os.path.dirname(sys.argv[1]), "device_db.pyon")
try:
core = Core(dmgr=DeviceManager(FlatFileDB(ddb_path)))
core = Core(dmgr=DeviceManager(DeviceDB(ddb_path)))
if compile_only:
core.compile(testcase_vars["entrypoint"], (), {})
else:

View File

@ -34,10 +34,22 @@ def rtio_get_counter() -> TInt64:
raise NotImplementedError("syscall not simulated")
class Core:
def __init__(self, dmgr, ref_period=8*ns, external_clock=False):
self.comm = dmgr.get("comm")
"""Core device driver.
:param ref_period: period of the reference clock for the RTIO subsystem.
On platforms that use clock multiplication and SERDES-based PHYs,
this is the period after multiplication. For example, with a RTIO core
clocked at 125MHz and a SERDES multiplication factor of 8, the
reference period is 1ns.
The time machine unit is equal to this period.
:param external_clock: whether the core device should switch to its
external RTIO clock input instead of using its internal oscillator.
:param comm_device: name of the device used for communications.
"""
def __init__(self, dmgr, ref_period=8*ns, external_clock=False, comm_device="comm"):
self.ref_period = ref_period
self.external_clock = external_clock
self.comm = dmgr.get(comm_device)
self.first_run = True
self.core = self
@ -80,6 +92,8 @@ class Core:
@kernel
def break_realtime(self):
"""Set the timeline to the current value of the hardware RTIO counter
plus a margin of 125000 machine units."""
min_now = rtio_get_counter() + 125000
if now_mu() < min_now:
at_mu(min_now)

View File

@ -137,16 +137,17 @@ class Novatech409B:
else:
self._ser_send("I a")
def do_simultaneous_update(self):
"""Apply update in simultaneous update mode."""
self._ser_send("I p")
def set_freq(self, ch_no, freq):
"""Set frequency of one channel."""
self.set_simultaneous_update(False)
# Novatech expects MHz
self._ser_send("F{:d} {:f}".format(ch_no, freq/1e6))
def set_phase(self, ch_no, phase):
"""Set phase of one channel."""
# do this immediately, disable SimultaneousUpdate mode
self.set_simultaneous_update(False)
# phase word is required by device
# N is an integer from 0 to 16383. Phase is set to
# N*360/16384 deg; in ARTIQ represent phase in cycles [0, 1]
@ -154,33 +155,6 @@ class Novatech409B:
cmd = "P{:d} {:d}".format(ch_no, phase_word)
self._ser_send(cmd)
def set_freq_all_phase_continuous(self, freq):
"""Set frequency of all channels simultaneously.
Set frequency of all channels simultaneously.
1) all DDSs are set to phase continuous mode
2) all DDSs are simultaneously set to new frequency
Together 1 and 2 ensure phase continuous frequency switching.
"""
self.set_simultaneous_update(True)
self.set_phase_continuous(True)
for i in range(4):
self.set_freq(i, freq)
# send command necessary to update all channels at the same time
self._ser_send("I p")
def set_phase_all(self, phase):
"""Set phase of all channels simultaneously."""
self.set_simultaneous_update(True)
# Note that this only works if the continuous
# phase switching is turned off.
self.set_phase_continuous(False)
for i in range(4):
self.set_phase(i, phase)
# send command necessary to update all channels at the same time
self._ser_send("I p")
def set_gain(self, ch_no, volts):
"""Set amplitude of one channel."""
@ -191,7 +165,6 @@ class Novatech409B:
s = "Amplitude out of range {v}".format(v=volts)
raise ValueError(s)
self.set_simultaneous_update(False)
s = "V{:d} {:d}".format(ch_no, dac_value)
self._ser_send(s)

View File

@ -12,6 +12,7 @@ from prettytable import PrettyTable
from artiq.protocols.pc_rpc import Client
from artiq.protocols.sync_struct import Subscriber
from artiq.protocols import pyon
from artiq.tools import short_format
def clear_screen():
@ -58,40 +59,37 @@ def get_argparser():
parser_delete = subparsers.add_parser("delete",
help="delete an experiment "
"from the schedule")
parser_delete.add_argument("-g", action="store_true",
help="request graceful termination")
parser_delete.add_argument("rid", type=int,
help="run identifier (RID)")
parser_set_device = subparsers.add_parser(
"set-device", help="add or modify a device")
parser_set_device.add_argument("name", help="name of the device")
parser_set_device.add_argument("description",
help="description in PYON format")
parser_set_dataset = subparsers.add_parser(
"set-dataset", help="add or modify a dataset")
parser_set_dataset.add_argument("name", help="name of the dataset")
parser_set_dataset.add_argument("value",
help="value in PYON format")
parser_set_dataset.add_argument("-p", "--persist", action="store_true",
help="make the dataset persistent")
parser_del_device = subparsers.add_parser(
"del-device", help="delete a device")
parser_del_device.add_argument("name", help="name of the device")
parser_set_parameter = subparsers.add_parser(
"set-parameter", help="add or modify a parameter")
parser_set_parameter.add_argument("name", help="name of the parameter")
parser_set_parameter.add_argument("value",
help="value in PYON format")
parser_del_parameter = subparsers.add_parser(
"del-parameter", help="delete a parameter")
parser_del_parameter.add_argument("name", help="name of the parameter")
parser_del_dataset = subparsers.add_parser(
"del-dataset", help="delete a dataset")
parser_del_dataset.add_argument("name", help="name of the dataset")
parser_show = subparsers.add_parser(
"show", help="show schedule, log, devices or parameters")
"show", help="show schedule, log, devices or datasets")
parser_show.add_argument(
"what",
help="select object to show: schedule/log/devices/parameters")
help="select object to show: schedule/log/devices/datasets")
parser_scan = subparsers.add_parser("scan-repository",
help="trigger a repository (re)scan")
parser_scan.add_argument("revision", default=None, nargs="?",
help="use a specific repository revision "
"(defaults to head)")
subparsers.add_parser(
"scan-devices", help="trigger a device database (re)scan")
parser_scan_repos = subparsers.add_parser(
"scan-repository", help="trigger a repository (re)scan")
parser_scan_repos.add_argument("revision", default=None, nargs="?",
help="use a specific repository revision "
"(defaults to head)")
return parser
@ -128,23 +126,22 @@ def _action_submit(remote, args):
def _action_delete(remote, args):
remote.delete(args.rid)
if args.g:
remote.request_termination(args.rid)
else:
remote.delete(args.rid)
def _action_set_device(remote, args):
remote.set(args.name, pyon.decode(args.description))
def _action_set_dataset(remote, args):
remote.set(args.name, pyon.decode(args.value), args.persist)
def _action_del_device(remote, args):
def _action_del_dataset(remote, args):
remote.delete(args.name)
def _action_set_parameter(remote, args):
remote.set(args.name, pyon.decode(args.value))
def _action_del_parameter(remote, args):
remote.delete(args.name)
def _action_scan_devices(remote, args):
remote.scan()
def _action_scan_repository(remote, args):
@ -192,11 +189,11 @@ def _show_devices(devices):
print(table)
def _show_parameters(parameters):
def _show_datasets(datasets):
clear_screen()
table = PrettyTable(["Parameter", "Value"])
for k, v in sorted(parameters.items(), key=itemgetter(0)):
table.add_row([k, str(v)])
table = PrettyTable(["Dataset", "Persistent", "Value"])
for k, (persist, value) in sorted(datasets.items(), key=itemgetter(0)):
table.add_row([k, "Y" if persist else "N", short_format(value)])
print(table)
@ -265,8 +262,8 @@ def main():
_show_log(args)
elif args.what == "devices":
_show_dict(args, "devices", _show_devices)
elif args.what == "parameters":
_show_dict(args, "parameters", _show_parameters)
elif args.what == "datasets":
_show_dict(args, "datasets", _show_datasets)
else:
print("Unknown object to show, use -h to list valid names.")
sys.exit(1)
@ -275,10 +272,9 @@ def main():
target_name = {
"submit": "master_schedule",
"delete": "master_schedule",
"set_device": "master_ddb",
"del_device": "master_ddb",
"set_parameter": "master_pdb",
"del_parameter": "master_pdb",
"set_dataset": "master_dataset_db",
"del_dataset": "master_dataset_db",
"scan_devices": "master_device_db",
"scan_repository": "master_repository"
}[action]
remote = Client(args.server, port, target_name)

View File

@ -2,8 +2,8 @@
import sys, logging, argparse
from artiq.protocols.file_db import FlatFileDB
from artiq.master.worker_db import DeviceManager
from artiq.master.databases import DeviceDB, DatasetDB
from artiq.master.worker_db import DeviceManager, DatasetManager
from artiq.coredevice.core import CompileError
from artiq.tools import *
@ -15,10 +15,10 @@ def get_argparser():
parser = argparse.ArgumentParser(description="ARTIQ static compiler")
verbosity_args(parser)
parser.add_argument("-d", "--ddb", default="ddb.pyon",
help="device database file")
parser.add_argument("-p", "--pdb", default="pdb.pyon",
help="parameter database file")
parser.add_argument("--device-db", default="device_db.pyon",
help="device database file (default: '%(default)s')")
parser.add_argument("--dataset-db", default="dataset_db.pyon",
help="dataset file (default: '%(default)s')")
parser.add_argument("-e", "--experiment", default=None,
help="experiment to compile")
@ -36,14 +36,14 @@ def main():
args = get_argparser().parse_args()
init_logger(args)
dmgr = DeviceManager(FlatFileDB(args.ddb))
pdb = FlatFileDB(args.pdb)
device_mgr = DeviceManager(DeviceDB(args.device_db))
dataset_mgr = DatasetManager(DatasetDB(args.dataset_db))
try:
module = file_import(args.file)
exp = get_experiment(module, args.experiment)
arguments = parse_arguments(args.arguments)
exp_inst = exp(dmgr, pdb, **arguments)
exp_inst = exp(device_mgr, dataset_mgr, **arguments)
if not hasattr(exp.run, "artiq_embedded"):
raise ValueError("Experiment entry point must be a kernel")
@ -57,7 +57,7 @@ def main():
print(error.render_string(colored=True), file=sys.stderr)
return
finally:
dmgr.close_devices()
device_mgr.close_devices()
if object_map.has_rpc():
raise ValueError("Experiment must not use RPC")

View File

@ -2,8 +2,8 @@
import argparse
from artiq.master.databases import DeviceDB
from artiq.master.worker_db import DeviceManager
from artiq.protocols.file_db import FlatFileDB
def to_bytes(string):
@ -13,8 +13,8 @@ def to_bytes(string):
def get_argparser():
parser = argparse.ArgumentParser(description="ARTIQ core device "
"remote access tool")
parser.add_argument("-d", "--ddb", default="ddb.pyon",
help="device database file")
parser.add_argument("--device-db", default="device_db.pyon",
help="device database file (default: '%(default)s')")
subparsers = parser.add_subparsers(dest="action")
subparsers.required = True
@ -58,9 +58,9 @@ def get_argparser():
def main():
args = get_argparser().parse_args()
dmgr = DeviceManager(FlatFileDB(args.ddb))
device_mgr = DeviceManager(DeviceDB(args.device_db))
try:
comm = dmgr.get("comm")
comm = device_mgr.get("comm")
comm.check_ident()
if args.action == "log":
@ -83,7 +83,7 @@ def main():
elif args.action == "cfg-erase":
comm.flash_storage_erase()
finally:
dmgr.close_devices()
device_mgr.close_devices()
if __name__ == "__main__":
main()

View File

@ -11,7 +11,7 @@ import socket
from artiq.protocols.sync_struct import Subscriber
from artiq.protocols.pc_rpc import AsyncioClient, Server
from artiq.tools import verbosity_args, init_logger
from artiq.tools import TaskObject, asyncio_process_wait_timeout, Condition
from artiq.tools import TaskObject, Condition
logger = logging.getLogger(__name__)
@ -56,60 +56,55 @@ class Controller:
self.process = None
self.launch_task = asyncio.Task(self.launcher())
@asyncio.coroutine
def end(self):
async def end(self):
self.launch_task.cancel()
yield from asyncio.wait_for(self.launch_task, None)
await asyncio.wait_for(self.launch_task, None)
@asyncio.coroutine
def _call_controller(self, method):
async def _call_controller(self, method):
remote = AsyncioClient()
yield from remote.connect_rpc(self.host, self.port, None)
await remote.connect_rpc(self.host, self.port, None)
try:
targets, _ = remote.get_rpc_id()
remote.select_rpc_target(targets[0])
r = yield from getattr(remote, method)()
r = await getattr(remote, method)()
finally:
remote.close_rpc()
return r
@asyncio.coroutine
def _ping(self):
async def _ping(self):
try:
ok = yield from asyncio.wait_for(self._call_controller("ping"),
self.ping_timeout)
ok = await asyncio.wait_for(self._call_controller("ping"),
self.ping_timeout)
if ok:
self.retry_timer_cur = self.retry_timer
return ok
except:
return False
@asyncio.coroutine
def _wait_and_ping(self):
async def _wait_and_ping(self):
while True:
try:
yield from asyncio_process_wait_timeout(self.process,
self.ping_timer)
await asyncio.wait_for(self.process.wait(),
self.ping_timer)
except asyncio.TimeoutError:
logger.debug("pinging controller %s", self.name)
ok = yield from self._ping()
ok = await self._ping()
if not ok:
logger.warning("Controller %s ping failed", self.name)
yield from self._terminate()
await self._terminate()
return
else:
break
@asyncio.coroutine
def launcher(self):
async def launcher(self):
try:
while True:
logger.info("Starting controller %s with command: %s",
self.name, self.command)
try:
self.process = yield from asyncio.create_subprocess_exec(
self.process = await asyncio.create_subprocess_exec(
*shlex.split(self.command))
yield from self._wait_and_ping()
await self._wait_and_ping()
except FileNotFoundError:
logger.warning("Controller %s failed to start", self.name)
else:
@ -117,33 +112,32 @@ class Controller:
logger.warning("Restarting in %.1f seconds",
self.retry_timer_cur)
try:
yield from asyncio.wait_for(self.retry_now.wait(),
self.retry_timer_cur)
await asyncio.wait_for(self.retry_now.wait(),
self.retry_timer_cur)
except asyncio.TimeoutError:
pass
self.retry_timer_cur *= self.retry_timer_backoff
except asyncio.CancelledError:
yield from self._terminate()
await self._terminate()
@asyncio.coroutine
def _terminate(self):
async def _terminate(self):
logger.info("Terminating controller %s", self.name)
if self.process is not None and self.process.returncode is None:
try:
yield from asyncio.wait_for(self._call_controller("terminate"),
self.term_timeout)
await asyncio.wait_for(self._call_controller("terminate"),
self.term_timeout)
except:
logger.warning("Controller %s did not respond to terminate "
"command, killing", self.name)
self.process.kill()
try:
yield from asyncio_process_wait_timeout(self.process,
self.term_timeout)
await asyncio.wait_for(self.process.wait(),
self.term_timeout)
except:
logger.warning("Controller %s failed to exit, killing",
self.name)
self.process.kill()
yield from self.process.wait()
await self.process.wait()
logger.debug("Controller %s terminated", self.name)
@ -163,17 +157,16 @@ class Controllers:
self.active = dict()
self.process_task = asyncio.Task(self._process())
@asyncio.coroutine
def _process(self):
async def _process(self):
while True:
action, param = yield from self.queue.get()
action, param = await self.queue.get()
if action == "set":
k, ddb_entry = param
if k in self.active:
yield from self.active[k].end()
await self.active[k].end()
self.active[k] = Controller(k, ddb_entry)
elif action == "del":
yield from self.active[param].end()
await self.active[param].end()
del self.active[param]
else:
raise ValueError
@ -196,11 +189,10 @@ class Controllers:
for name in set(self.active_or_queued):
del self[name]
@asyncio.coroutine
def shutdown(self):
async def shutdown(self):
self.process_task.cancel()
for c in self.active.values():
yield from c.end()
await c.end()
class ControllerDB:
@ -225,8 +217,7 @@ class ControllerManager(TaskObject):
self.retry_master = retry_master
self.controller_db = ControllerDB()
@asyncio.coroutine
def _do(self):
async def _do(self):
try:
subscriber = Subscriber("devices",
self.controller_db.sync_struct_init)
@ -236,12 +227,12 @@ class ControllerManager(TaskObject):
s = subscriber.writer.get_extra_info("socket")
localhost = s.getsockname()[0]
self.controller_db.set_host_filter(localhost)
yield from subscriber.connect(self.server, self.port,
set_host_filter)
await subscriber.connect(self.server, self.port,
set_host_filter)
try:
yield from asyncio.wait_for(subscriber.receive_task, None)
await asyncio.wait_for(subscriber.receive_task, None)
finally:
yield from subscriber.close()
await subscriber.close()
except (ConnectionAbortedError, ConnectionError,
ConnectionRefusedError, ConnectionResetError) as e:
logger.warning("Connection to master failed (%s: %s)",
@ -249,11 +240,11 @@ class ControllerManager(TaskObject):
else:
logger.warning("Connection to master lost")
logger.warning("Retrying in %.1f seconds", self.retry_master)
yield from asyncio.sleep(self.retry_master)
await asyncio.sleep(self.retry_master)
except asyncio.CancelledError:
pass
finally:
yield from self.controller_db.current_controllers.shutdown()
await self.controller_db.current_controllers.shutdown()
def retry_now(self, k):
"""If a controller is disabled and pending retry, perform that retry

View File

@ -15,8 +15,7 @@ from artiq.protocols.pc_rpc import AsyncioClient
from artiq.gui.state import StateManager
from artiq.gui.explorer import ExplorerDock
from artiq.gui.moninj import MonInj
from artiq.gui.results import ResultsDock
from artiq.gui.parameters import ParametersDock
from artiq.gui.datasets import DatasetsDock
from artiq.gui.schedule import ScheduleDock
from artiq.gui.log import LogDock
from artiq.gui.console import ConsoleDock
@ -92,30 +91,24 @@ def main():
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close()))
d_results = ResultsDock(win, area)
smgr.register(d_results)
loop.run_until_complete(d_results.sub_connect(
d_datasets = DatasetsDock(win, area)
smgr.register(d_datasets)
loop.run_until_complete(d_datasets.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_results.sub_close()))
atexit.register(lambda: loop.run_until_complete(d_datasets.sub_close()))
if os.name != "nt":
d_ttl_dds = MonInj()
loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_ttl_dds.stop()))
d_params = ParametersDock()
loop.run_until_complete(d_params.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_params.sub_close()))
if os.name != "nt":
area.addDock(d_ttl_dds.dds_dock, "top")
area.addDock(d_ttl_dds.ttl_dock, "above", d_ttl_dds.dds_dock)
area.addDock(d_results, "above", d_ttl_dds.ttl_dock)
area.addDock(d_datasets, "above", d_ttl_dds.ttl_dock)
else:
area.addDock(d_results, "top")
area.addDock(d_params, "above", d_results)
area.addDock(d_explorer, "above", d_params)
area.addDock(d_datasets, "top")
area.addDock(d_explorer, "above", d_datasets)
d_schedule = ScheduleDock(status_bar, schedule_ctl)
loop.run_until_complete(d_schedule.sub_connect(
@ -127,16 +120,18 @@ def main():
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))
pdb = AsyncioClient()
loop.run_until_complete(pdb.connect_rpc(
args.server, args.port_control, "master_pdb"))
atexit.register(lambda: pdb.close_rpc())
def _get_parameter(k, v):
asyncio.async(pdb.set(k, v))
dataset_db = AsyncioClient()
loop.run_until_complete(dataset_db.connect_rpc(
args.server, args.port_control, "master_dataset_db"))
atexit.register(lambda: dataset_db.close_rpc())
def _set_dataset(k, v):
asyncio.ensure_future(dataset_db.set(k, v))
def _del_dataset(k):
asyncio.ensure_future(dataset_db.delete(k))
d_console = ConsoleDock(
d_params.get_parameter,
_get_parameter,
d_results.get_result)
d_datasets.get_dataset,
_set_dataset,
_del_dataset)
area.addDock(d_console, "bottom")
area.addDock(d_log, "above", d_console)

View File

@ -93,27 +93,26 @@ class DBWriter(TaskObject):
try:
self._queue.put_nowait((k, v))
except asyncio.QueueFull:
logger.warning("failed to update parameter '%s': "
logger.warning("failed to update dataset '%s': "
"too many pending updates", k)
@asyncio.coroutine
def _do(self):
async def _do(self):
while True:
k, v = yield from self._queue.get()
k, v = await self._queue.get()
url = self.base_url + "/write"
params = {"u": self.user, "p": self.password, "db": self.database,
"consistency": "any", "precision": "n"}
fmt_ty, fmt_v = format_influxdb(v)
data = "{},parameter={} {}={}".format(self.table, k, fmt_ty, fmt_v)
data = "{},dataset={} {}={}".format(self.table, k, fmt_ty, fmt_v)
try:
response = yield from aiohttp.request(
response = await aiohttp.request(
"POST", url, params=params, data=data)
except:
logger.warning("got exception trying to update '%s'",
k, exc_info=True)
else:
if response.status not in (200, 204):
content = (yield from response.content.read()).decode()
content = (await response.content.read()).decode()
if content:
content = content[:-1] # drop \n
logger.warning("got HTTP status %d "
@ -122,15 +121,31 @@ class DBWriter(TaskObject):
response.close()
class Parameters:
class _Mock:
def __setitem__(self, k, v):
pass
def __getitem__(self, k):
return self
def __delitem__(self, k):
pass
class Datasets:
def __init__(self, filter_function, writer, init):
self.filter_function = filter_function
self.writer = writer
def __setitem__(self, k, v):
if self.filter_function(k):
self.writer.update(k, v)
self.writer.update(k, v[1])
# ignore mutations
def __getitem__(self, k):
return _Mock()
# ignore deletions
def __delitem__(self, k):
pass
@ -144,18 +159,17 @@ class MasterReader(TaskObject):
self.filter_function = filter_function
self.writer = writer
@asyncio.coroutine
def _do(self):
async def _do(self):
subscriber = Subscriber(
"parameters",
partial(Parameters, self.filter_function, self.writer))
"datasets",
partial(Datasets, self.filter_function, self.writer))
while True:
try:
yield from subscriber.connect(self.server, self.port)
await subscriber.connect(self.server, self.port)
try:
yield from asyncio.wait_for(subscriber.receive_task, None)
await asyncio.wait_for(subscriber.receive_task, None)
finally:
yield from subscriber.close()
await subscriber.close()
except (ConnectionAbortedError, ConnectionError,
ConnectionRefusedError, ConnectionResetError) as e:
logger.warning("Connection to master failed (%s: %s)",
@ -163,7 +177,7 @@ class MasterReader(TaskObject):
else:
logger.warning("Connection to master lost")
logger.warning("Retrying in %.1f seconds", self.retry)
yield from asyncio.sleep(self.retry)
await asyncio.sleep(self.retry)
class Filter:

View File

@ -6,8 +6,8 @@ import atexit
import os
from artiq.protocols.pc_rpc import Server
from artiq.protocols.sync_struct import Notifier, Publisher, process_mod
from artiq.protocols.file_db import FlatFileDB
from artiq.protocols.sync_struct import Notifier, Publisher
from artiq.master.databases import DeviceDB, DatasetDB
from artiq.master.scheduler import Scheduler
from artiq.master.worker_db import get_last_rid
from artiq.master.repository import FilesystemBackend, GitBackend, Repository
@ -27,10 +27,10 @@ def get_argparser():
"--port-control", default=3251, type=int,
help="TCP port to listen to for control (default: %(default)d)")
group = parser.add_argument_group("databases")
group.add_argument("-d", "--ddb", default="ddb.pyon",
help="device database file")
group.add_argument("-p", "--pdb", default="pdb.pyon",
help="parameter database file")
group.add_argument("--device-db", default="device_db.pyon",
help="device database file (default: '%(default)s')")
group.add_argument("--dataset-db", default="dataset_db.pyon",
help="dataset file (default: '%(default)s')")
group = parser.add_argument_group("repository")
group.add_argument(
"-g", "--git", default=False, action="store_true",
@ -64,24 +64,25 @@ def main():
loop = asyncio.get_event_loop()
atexit.register(lambda: loop.close())
ddb = FlatFileDB(args.ddb)
pdb = FlatFileDB(args.pdb)
rtr = Notifier(dict())
device_db = DeviceDB(args.device_db)
dataset_db = DatasetDB(args.dataset_db)
dataset_db.start()
atexit.register(lambda: loop.run_until_complete(dataset_db.stop()))
log = Log(1000)
if args.git:
repo_backend = GitBackend(args.repository)
else:
repo_backend = FilesystemBackend(args.repository)
repository = Repository(repo_backend, log.log)
repository = Repository(repo_backend, device_db.get_device_db, log.log)
atexit.register(repository.close)
repository.scan_async()
worker_handlers = {
"get_device": ddb.get,
"get_parameter": pdb.get,
"set_parameter": pdb.set,
"update_rt_results": lambda mod: process_mod(rtr, mod),
"get_device_db": device_db.get_device_db,
"get_device": device_db.get,
"get_dataset": dataset_db.get,
"update_dataset": dataset_db.update,
"log": log.log
}
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
@ -90,8 +91,8 @@ def main():
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
server_control = Server({
"master_ddb": ddb,
"master_pdb": pdb,
"master_device_db": device_db,
"master_dataset_db": dataset_db,
"master_schedule": scheduler,
"master_repository": repository
})
@ -101,9 +102,8 @@ def main():
server_notify = Publisher({
"schedule": scheduler.notifier,
"devices": ddb.data,
"parameters": pdb.data,
"rt_results": rtr,
"devices": device_db.data,
"datasets": dataset_db.data,
"explist": repository.explist,
"log": log.data
})

View File

@ -4,16 +4,14 @@
import argparse
import sys
import time
from operator import itemgetter
from itertools import chain
import logging
import h5py
from artiq.language.environment import EnvExperiment
from artiq.protocols.file_db import FlatFileDB
from artiq.master.worker_db import DeviceManager, ResultDB
from artiq.master.databases import DeviceDB, DatasetDB
from artiq.master.worker_db import DeviceManager, DatasetManager
from artiq.coredevice.core import CompileError
from artiq.compiler.embedding import ObjectMap
from artiq.compiler.targets import OR1KTarget
@ -24,8 +22,8 @@ logger = logging.getLogger(__name__)
class ELFRunner(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("file")
self.setattr_device("core")
self.setattr_argument("file")
def run(self):
with open(self.file, "rb") as f:
@ -38,11 +36,6 @@ class ELFRunner(EnvExperiment):
lambda addresses: target.symbolize(kernel_library, addresses))
class SimpleParamLogger:
def set(self, timestamp, name, value):
logger.info("Parameter change: {} = {}".format(name, value))
class DummyScheduler:
def __init__(self):
self.next_rid = 0
@ -68,10 +61,10 @@ def get_argparser(with_file=True):
description="Local experiment running tool")
verbosity_args(parser)
parser.add_argument("-d", "--ddb", default="ddb.pyon",
help="device database file")
parser.add_argument("-p", "--pdb", default="pdb.pyon",
help="parameter database file")
parser.add_argument("--device-db", default="device_db.pyon",
help="device database file (default: '%(default)s')")
parser.add_argument("--dataset-db", default="dataset_db.pyon",
help="dataset file (default: '%(default)s')")
parser.add_argument("-e", "--experiment", default=None,
help="experiment to run")
@ -87,7 +80,7 @@ def get_argparser(with_file=True):
return parser
def _build_experiment(dmgr, pdb, rdb, args):
def _build_experiment(device_mgr, dataset_mgr, args):
if hasattr(args, "file"):
if args.file.endswith(".elf"):
if args.arguments:
@ -95,7 +88,7 @@ def _build_experiment(dmgr, pdb, rdb, args):
if args.experiment:
raise ValueError("experiment-by-name not supported "
"for ELF kernels")
return ELFRunner(dmgr, pdb, rdb, file=args.file)
return ELFRunner(device_mgr, dataset_mgr, file=args.file)
else:
module = file_import(args.file, prefix="artiq_run_")
file = args.file
@ -109,22 +102,21 @@ def _build_experiment(dmgr, pdb, rdb, args):
"experiment": args.experiment,
"arguments": arguments
}
dmgr.virtual_devices["scheduler"].expid = expid
return exp(dmgr, pdb, rdb, **arguments)
device_mgr.virtual_devices["scheduler"].expid = expid
return exp(device_mgr, dataset_mgr, **arguments)
def run(with_file=False):
args = get_argparser(with_file).parse_args()
init_logger(args)
dmgr = DeviceManager(FlatFileDB(args.ddb),
virtual_devices={"scheduler": DummyScheduler()})
pdb = FlatFileDB(args.pdb)
pdb.hooks.append(SimpleParamLogger())
rdb = ResultDB()
device_mgr = DeviceManager(DeviceDB(args.device_db),
virtual_devices={"scheduler": DummyScheduler()})
dataset_db = DatasetDB(args.dataset_db)
dataset_mgr = DatasetManager(dataset_db)
try:
exp_inst = _build_experiment(dmgr, pdb, rdb, args)
exp_inst = _build_experiment(device_mgr, dataset_mgr, args)
exp_inst.prepare()
exp_inst.run()
exp_inst.analyze()
@ -132,15 +124,15 @@ def run(with_file=False):
print(error.render_string(colored=True), file=sys.stderr)
return
finally:
dmgr.close_devices()
device_mgr.close_devices()
if args.hdf5 is not None:
with h5py.File(args.hdf5, "w") as f:
rdb.write_hdf5(f)
elif rdb.rt.read or rdb.nrt:
r = chain(rdb.rt.read.items(), rdb.nrt.items())
for k, v in sorted(r, key=itemgetter(0)):
dataset_mgr.write_hdf5(f)
else:
for k, v in sorted(dataset_mgr.local.items(), key=itemgetter(0)):
print("{}: {}".format(k, v))
dataset_db.save()
def main():

View File

@ -36,13 +36,13 @@ class _AD9xxx(Module):
ftws = [Signal(32) for i in range(nchannels)]
for c, ftw in enumerate(ftws):
if flen(pads.d) == 8:
self.sync.rio += \
self.sync.rio_phy += \
If(selected(c), [
If(current_address == ftw_base+i,
ftw[i*8:(i+1)*8].eq(current_data))
for i in range(4)])
elif flen(pads.d) == 16:
self.sync.rio += \
self.sync.rio_phy += \
If(selected(c), [
If(current_address == ftw_base+2*i,
ftw[i*16:(i+1)*16].eq(current_data))
@ -51,7 +51,7 @@ class _AD9xxx(Module):
raise NotImplementedError
# FTW to probe on FUD
self.sync.rio += If(current_address == 2**flen(pads.a), [
self.sync.rio_phy += If(current_address == 2**flen(pads.a), [
If(selected(c), probe.eq(ftw))
for c, (probe, ftw) in enumerate(zip(self.probes, ftws))])

View File

@ -5,19 +5,19 @@ _help = """
This is an interactive Python console.
The following functions are available:
get_parameter(key)
set_parameter(key, value) [asynchronous update]
get_result(key) [real-time results only]
get_dataset(key)
set_dataset(key, value, persist=False) [asynchronous update]
del_dataset(key) [asynchronous update]
"""
class ConsoleDock(dockarea.Dock):
def __init__(self, get_parameter, set_parameter, get_result):
def __init__(self, get_dataset, set_dataset, del_dataset):
dockarea.Dock.__init__(self, "Console", size=(1000, 300))
ns = {
"get_parameter": get_parameter,
"set_parameter": set_parameter,
"get_result": get_result
"get_dataset": get_dataset,
"set_dataset": set_dataset,
"del_dataset": del_dataset
}
c = console.ConsoleWidget(namespace=ns, text=_help)
self.addWidget(c)

View File

@ -8,16 +8,17 @@ from pyqtgraph import dockarea
from pyqtgraph import LayoutWidget
from artiq.protocols.sync_struct import Subscriber
from artiq.gui.tools import DictSyncModel, short_format
from artiq.tools import short_format
from artiq.gui.tools import DictSyncModel
from artiq.gui.displays import *
logger = logging.getLogger(__name__)
class ResultsModel(DictSyncModel):
class DatasetsModel(DictSyncModel):
def __init__(self, parent, init):
DictSyncModel.__init__(self, ["Result", "Value"],
DictSyncModel.__init__(self, ["Dataset", "Persistent", "Value"],
parent, init)
def sort_key(self, k, v):
@ -27,7 +28,9 @@ class ResultsModel(DictSyncModel):
if column == 0:
return k
elif column == 1:
return short_format(v)
return "Y" if v[0] else "N"
elif column == 2:
return short_format(v[1])
else:
raise ValueError
@ -38,23 +41,28 @@ def _get_display_type_name(display_cls):
return name
class ResultsDock(dockarea.Dock):
class DatasetsDock(dockarea.Dock):
def __init__(self, dialog_parent, dock_area):
dockarea.Dock.__init__(self, "Results", size=(1500, 500))
dockarea.Dock.__init__(self, "Datasets", size=(1500, 500))
self.dialog_parent = dialog_parent
self.dock_area = dock_area
grid = LayoutWidget()
self.addWidget(grid)
self.search = QtGui.QLineEdit()
self.search.setPlaceholderText("search...")
self.search.editingFinished.connect(self._search_datasets)
grid.addWidget(self.search, 0, )
self.table = QtGui.QTableView()
self.table.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.table.horizontalHeader().setResizeMode(
QtGui.QHeaderView.ResizeToContents)
grid.addWidget(self.table, 0, 0)
grid.addWidget(self.table, 1, 0)
add_display_box = QtGui.QGroupBox("Add display")
grid.addWidget(add_display_box, 0, 1)
grid.addWidget(add_display_box, 1, 1)
display_grid = QtGui.QGridLayout()
add_display_box.setLayout(display_grid)
@ -65,24 +73,37 @@ class ResultsDock(dockarea.Dock):
self.displays = dict()
def get_result(self, key):
return self.table_model.backing_store[key]
def _search_datasets(self):
model = self.table_model
search = self.search.displayText()
for row in range(model.rowCount(model.index(0, 0))):
index = model.index(row, 0)
dataset = model.data(index, QtCore.Qt.DisplayRole)
if search in dataset:
self.table.showRow(row)
else:
self.table.hideRow(row)
@asyncio.coroutine
def sub_connect(self, host, port):
self.subscriber = Subscriber("rt_results", self.init_results_model,
def get_dataset(self, key):
return self.table_model.backing_store[key][1]
async def sub_connect(self, host, port):
self.subscriber = Subscriber("datasets", self.init_datasets_model,
self.on_mod)
yield from self.subscriber.connect(host, port)
await self.subscriber.connect(host, port)
@asyncio.coroutine
def sub_close(self):
yield from self.subscriber.close()
async def sub_close(self):
await self.subscriber.close()
def init_results_model(self, init):
self.table_model = ResultsModel(self.table, init)
def init_datasets_model(self, init):
self.table_model = DatasetsModel(self.table, init)
self.table.setModel(self.table_model)
return self.table_model
def update_display_data(self, dsp):
dsp.update_data({k: self.table_model.backing_store[k][1]
for k in dsp.data_sources()})
def on_mod(self, mod):
if mod["action"] == "init":
for display in self.displays.values():
@ -98,7 +119,7 @@ class ResultsDock(dockarea.Dock):
for display in self.displays.values():
if source in display.data_sources():
display.update_data(self.table_model.backing_store)
self.update_display_data(display)
def create_dialog(self, ty):
dlg_class = display_types[ty][0]
@ -113,7 +134,7 @@ class ResultsDock(dockarea.Dock):
dsp_class = display_types[ty][1]
dsp = dsp_class(name, settings)
self.displays[name] = dsp
dsp.update_data(self.table_model.backing_store)
self.update_display_data(dsp)
def on_close():
del self.displays[name]

View File

@ -6,7 +6,7 @@ from pyqtgraph import LayoutWidget
from artiq.protocols.sync_struct import Subscriber
from artiq.protocols import pyon
from artiq.gui.tools import DictSyncModel
from artiq.gui.tools import si_prefix, DictSyncModel
from artiq.gui.scan import ScanController
@ -74,26 +74,28 @@ class _EnumerationEntry(QtGui.QComboBox):
class _NumberEntry(QtGui.QDoubleSpinBox):
def __init__(self, procdesc):
QtGui.QDoubleSpinBox.__init__(self)
self.scale = procdesc["scale"]
self.setDecimals(procdesc["ndecimals"])
self.setSingleStep(procdesc["step"])
self.setSingleStep(procdesc["step"]/self.scale)
if procdesc["min"] is not None:
self.setMinimum(procdesc["min"])
self.setMinimum(procdesc["min"]/self.scale)
else:
self.setMinimum(float("-inf"))
if procdesc["max"] is not None:
self.setMaximum(procdesc["max"])
self.setMaximum(procdesc["max"]/self.scale)
else:
self.setMaximum(float("inf"))
if procdesc["unit"]:
self.setSuffix(" " + procdesc["unit"])
suffix = si_prefix(self.scale) + procdesc["unit"]
if suffix:
self.setSuffix(" " + suffix)
if "default" in procdesc:
self.set_argument_value(procdesc["default"])
def get_argument_value(self):
return self.value()
return self.value()*self.scale
def set_argument_value(self, value):
self.setValue(value)
self.setValue(value/self.scale)
class _StringEntry(QtGui.QLineEdit):
@ -300,23 +302,20 @@ class ExplorerDock(dockarea.Dock):
def enable_duedate(self):
self.datetime_en.setChecked(True)
@asyncio.coroutine
def sub_connect(self, host, port):
async def sub_connect(self, host, port):
self.explist_subscriber = Subscriber("explist",
self.init_explist_model)
yield from self.explist_subscriber.connect(host, port)
await self.explist_subscriber.connect(host, port)
@asyncio.coroutine
def sub_close(self):
yield from self.explist_subscriber.close()
async def sub_close(self):
await self.explist_subscriber.close()
def init_explist_model(self, init):
self.explist_model = _ExplistModel(self, self.el, init)
self.el.setModel(self.explist_model)
return self.explist_model
@asyncio.coroutine
def submit(self, pipeline_name, file, class_name, arguments,
async def submit(self, pipeline_name, file, class_name, arguments,
priority, due_date, flush):
expid = {
"repo_rev": None,
@ -324,8 +323,8 @@ class ExplorerDock(dockarea.Dock):
"class_name": class_name,
"arguments": arguments,
}
rid = yield from self.schedule_ctl.submit(pipeline_name, expid,
priority, due_date, flush)
rid = await self.schedule_ctl.submit(pipeline_name, expid,
priority, due_date, flush)
self.status_bar.showMessage("Submitted RID {}".format(rid))
def submit_clicked(self):
@ -338,7 +337,10 @@ class ExplorerDock(dockarea.Dock):
arguments = self.argeditor.get_argument_values(True)
if arguments is None:
return
asyncio.async(self.submit(self.pipeline.text(),
expinfo["file"], expinfo["class_name"],
arguments, self.priority.value(),
due_date, self.flush.isChecked()))
asyncio.ensure_future(self.submit(self.pipeline.text(),
expinfo["file"],
expinfo["class_name"],
arguments,
self.priority.value(),
due_date,
self.flush.isChecked()))

View File

@ -41,14 +41,12 @@ class LogDock(dockarea.Dock):
self.addWidget(self.log)
self.scroll_at_bottom = False
@asyncio.coroutine
def sub_connect(self, host, port):
async def sub_connect(self, host, port):
self.subscriber = Subscriber("log", self.init_log_model)
yield from self.subscriber.connect(host, port)
await self.subscriber.connect(host, port)
@asyncio.coroutine
def sub_close(self):
yield from self.subscriber.close()
async def sub_close(self):
await self.subscriber.close()
def rows_inserted_before(self):
scrollbar = self.log.verticalScrollBar()

View File

@ -23,9 +23,9 @@ _mode_enc = {
class _TTLWidget(QtGui.QFrame):
def __init__(self, send_to_device, channel, force_out, title):
self.send_to_device = send_to_device
def __init__(self, channel, send_to_device, force_out, title):
self.channel = channel
self.send_to_device = send_to_device
self.force_out = force_out
QtGui.QFrame.__init__(self)
@ -119,7 +119,8 @@ class _TTLWidget(QtGui.QFrame):
class _DDSWidget(QtGui.QFrame):
def __init__(self, sysclk, title):
def __init__(self, channel, sysclk, title):
self.channel = channel
self.sysclk = sysclk
QtGui.QFrame.__init__(self)
@ -163,9 +164,11 @@ class _DeviceManager:
self[k] = v
def __setitem__(self, k, v):
self.ddb[k] = v
if k in self.ttl_widgets:
del self[k]
if k in self.dds_widgets:
del self[k]
self.ddb[k] = v
if not isinstance(v, dict):
return
try:
@ -176,14 +179,15 @@ class _DeviceManager:
if v["module"] == "artiq.coredevice.ttl":
channel = v["arguments"]["channel"]
force_out = v["class"] == "TTLOut"
self.ttl_widgets[channel] = _TTLWidget(
self.send_to_device, channel, force_out, title)
self.ttl_widgets[k] = _TTLWidget(
channel, self.send_to_device, force_out, title)
self.ttl_cb()
if (v["module"] == "artiq.coredevice.dds"
and v["class"] in {"AD9858", "AD9914"}):
channel = v["arguments"]["channel"]
sysclk = v["arguments"]["sysclk"]
self.dds_widgets[channel] = _DDSWidget(sysclk, title)
self.dds_widgets[channel] = _DDSWidget(
channel, sysclk, title)
self.dds_cb()
except KeyError:
pass
@ -191,8 +195,13 @@ class _DeviceManager:
def __delitem__(self, k):
del self.ddb[k]
if k in self.ttl_widgets:
self.ttl_widgets[k].deleteLater()
del self.ttl_widgets[k]
self.ttl_cb()
if k in self.dds_widgets:
self.dds_widgets[k].deleteLater()
del self.dds_widgets[k]
self.dds_cb()
def get_core_addr(self):
try:
@ -232,26 +241,24 @@ class MonInj(TaskObject):
self.dm = _DeviceManager(self.send_to_device, dict())
self.transport = None
@asyncio.coroutine
def start(self, server, port):
async def start(self, server, port):
loop = asyncio.get_event_loop()
yield from loop.create_datagram_endpoint(lambda: self,
await loop.create_datagram_endpoint(lambda: self,
family=socket.AF_INET)
try:
yield from self.subscriber.connect(server, port)
await self.subscriber.connect(server, port)
try:
TaskObject.start(self)
except:
yield from self.subscriber.close()
await self.subscriber.close()
raise
except:
self.transport.close()
raise
@asyncio.coroutine
def stop(self):
yield from TaskObject.stop(self)
yield from self.subscriber.close()
async def stop(self):
await TaskObject.stop(self)
await self.subscriber.close()
if self.transport is not None:
self.transport.close()
self.transport = None
@ -263,16 +270,17 @@ class MonInj(TaskObject):
try:
ttl_levels, ttl_oes, ttl_overrides = \
struct.unpack(">QQQ", data[:8*3])
for channel, w in self.dm.ttl_widgets.items():
for w in self.dm.ttl_widgets.values():
channel = w.channel
w.set_value(ttl_levels & (1 << channel),
ttl_oes & (1 << channel),
ttl_overrides & (1 << channel))
dds_data = data[8*3:]
ndds = len(dds_data)//4
ftws = struct.unpack(">" + "I"*ndds, dds_data)
for channel, w in self.dm.dds_widgets.items():
for w in self.dm.dds_widgets.values():
try:
ftw = ftws[channel]
ftw = ftws[w.channel]
except KeyError:
pass
else:
@ -295,10 +303,9 @@ class MonInj(TaskObject):
else:
self.transport.sendto(data, (ca, 3250))
@asyncio.coroutine
def _do(self):
async def _do(self):
while True:
yield from asyncio.sleep(0.2)
await asyncio.sleep(0.2)
# MONINJ_REQ_MONITOR
self.send_to_device(b"\x01")

View File

@ -1,74 +0,0 @@
import asyncio
from quamash import QtGui, QtCore
from pyqtgraph import dockarea
from pyqtgraph import LayoutWidget
from artiq.protocols.sync_struct import Subscriber
from artiq.gui.tools import DictSyncModel, short_format
class ParametersModel(DictSyncModel):
def __init__(self, parent, init):
DictSyncModel.__init__(self, ["Parameter", "Value"],
parent, init)
def sort_key(self, k, v):
return k
def convert(self, k, v, column):
if column == 0:
return k
elif column == 1:
return short_format(v)
else:
raise ValueError
class ParametersDock(dockarea.Dock):
def __init__(self):
dockarea.Dock.__init__(self, "Parameters", size=(400, 300))
grid = LayoutWidget()
self.addWidget(grid)
self.search = QtGui.QLineEdit()
self.search.setPlaceholderText("search...")
self.search.editingFinished.connect(self._search_parameters)
grid.addWidget(self.search, 0, 0)
self.table = QtGui.QTableView()
self.table.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.table.horizontalHeader().setResizeMode(
QtGui.QHeaderView.ResizeToContents)
grid.addWidget(self.table, 1, 0)
def get_parameter(self, key):
return self.table_model.backing_store[key]
def _search_parameters(self):
model = self.table.model()
parentIndex = model.index(0, 0)
numRows = model.rowCount(parentIndex)
for row in range(numRows):
index = model.index(row, 0)
parameter = model.data(index, QtCore.Qt.DisplayRole)
if parameter.startswith(self.search.displayText()):
self.table.showRow(row)
else:
self.table.hideRow(row)
@asyncio.coroutine
def sub_connect(self, host, port):
self.subscriber = Subscriber("parameters", self.init_parameters_model)
yield from self.subscriber.connect(host, port)
@asyncio.coroutine
def sub_close(self):
yield from self.subscriber.close()
def init_parameters_model(self, init):
self.table_model = ParametersModel(self.table, init)
self.table.setModel(self.table_model)
return self.table_model

View File

@ -1,25 +1,28 @@
from quamash import QtGui
from pyqtgraph import LayoutWidget
from artiq.gui.tools import si_prefix
class _Range(LayoutWidget):
def __init__(self, global_min, global_max, global_step, unit, ndecimals):
def __init__(self, global_min, global_max, global_step, suffix, scale, ndecimals):
LayoutWidget.__init__(self)
self.scale = scale
def apply_properties(spinbox):
spinbox.setDecimals(ndecimals)
if global_min is not None:
spinbox.setMinimum(global_min)
spinbox.setMinimum(global_min/self.scale)
else:
spinbox.setMinimum(float("-inf"))
if global_max is not None:
spinbox.setMaximum(global_max)
spinbox.setMaximum(global_max/self.scale)
else:
spinbox.setMaximum(float("inf"))
if global_step is not None:
spinbox.setSingleStep(global_step)
if unit:
spinbox.setSuffix(" " + unit)
spinbox.setSingleStep(global_step/self.scale)
if suffix:
spinbox.setSuffix(" " + suffix)
self.addWidget(QtGui.QLabel("Min:"), 0, 0)
self.min = QtGui.QDoubleSpinBox()
@ -38,8 +41,8 @@ class _Range(LayoutWidget):
self.addWidget(self.npoints, 0, 5)
def set_values(self, min, max, npoints):
self.min.setValue(min)
self.max.setValue(max)
self.min.setValue(min/self.scale)
self.max.setValue(max/self.scale)
self.npoints.setValue(npoints)
def get_values(self):
@ -48,8 +51,8 @@ class _Range(LayoutWidget):
if min > max:
raise ValueError("Minimum scan boundary must be less than maximum")
return {
"min": min,
"max": max,
"min": min*self.scale,
"max": max*self.scale,
"npoints": self.npoints.value()
}
@ -61,33 +64,35 @@ class ScanController(LayoutWidget):
self.stack = QtGui.QStackedWidget()
self.addWidget(self.stack, 1, 0, colspan=4)
self.scale = procdesc["scale"]
gmin, gmax = procdesc["global_min"], procdesc["global_max"]
gstep = procdesc["global_step"]
unit = procdesc["unit"]
suffix = si_prefix(self.scale) + procdesc["unit"]
ndecimals = procdesc["ndecimals"]
self.v_noscan = QtGui.QDoubleSpinBox()
self.v_noscan.setDecimals(ndecimals)
if gmin is not None:
self.v_noscan.setMinimum(gmin)
self.v_noscan.setMinimum(gmin/self.scale)
else:
self.v_noscan.setMinimum(float("-inf"))
if gmax is not None:
self.v_noscan.setMaximum(gmax)
self.v_noscan.setMaximum(gmax/self.scale)
else:
self.v_noscan.setMaximum(float("inf"))
self.v_noscan.setSingleStep(gstep)
if unit:
self.v_noscan.setSuffix(" " + unit)
self.v_noscan.setSingleStep(gstep/self.scale)
if suffix:
self.v_noscan.setSuffix(" " + suffix)
self.v_noscan_gr = LayoutWidget()
self.v_noscan_gr.addWidget(QtGui.QLabel("Value:"), 0, 0)
self.v_noscan_gr.addWidget(self.v_noscan, 0, 1)
self.stack.addWidget(self.v_noscan_gr)
self.v_linear = _Range(gmin, gmax, gstep, unit, ndecimals)
self.v_linear = _Range(gmin, gmax, gstep, suffix, self.scale, ndecimals)
self.stack.addWidget(self.v_linear)
self.v_random = _Range(gmin, gmax, gstep, unit, ndecimals)
self.v_random = _Range(gmin, gmax, gstep, suffix, self.scale, ndecimals)
self.stack.addWidget(self.v_random)
self.v_explicit = QtGui.QLineEdit()
@ -124,7 +129,7 @@ class ScanController(LayoutWidget):
def get_argument_value(self):
if self.noscan.isChecked():
return {"ty": "NoScan", "value": self.v_noscan.value()}
return {"ty": "NoScan", "value": self.v_noscan.value()*self.scale}
elif self.linear.isChecked():
d = {"ty": "LinearScan"}
d.update(self.v_linear.get_values())
@ -140,7 +145,7 @@ class ScanController(LayoutWidget):
def set_argument_value(self, d):
if d["ty"] == "NoScan":
self.noscan.setChecked(True)
self.v_noscan.setValue(d["value"])
self.v_noscan.setValue(d["value"]/self.scale)
elif d["ty"] == "LinearScan":
self.linear.setChecked(True)
self.v_linear.set_values(d["min"], d["max"], d["npoints"])

View File

@ -1,11 +1,13 @@
import asyncio
import time
from functools import partial
from quamash import QtGui, QtCore
from pyqtgraph import dockarea
from artiq.protocols.sync_struct import Subscriber
from artiq.gui.tools import elide, DictSyncModel
from artiq.gui.tools import DictSyncModel
from artiq.tools import elide
class _ScheduleModel(DictSyncModel):
@ -71,32 +73,36 @@ class ScheduleDock(dockarea.Dock):
self.addWidget(self.table)
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
request_termination_action = QtGui.QAction("Request termination", self.table)
request_termination_action.triggered.connect(partial(self.delete_clicked, True))
self.table.addAction(request_termination_action)
delete_action = QtGui.QAction("Delete", self.table)
delete_action.triggered.connect(self.delete_clicked)
delete_action.triggered.connect(partial(self.delete_clicked, False))
self.table.addAction(delete_action)
@asyncio.coroutine
def sub_connect(self, host, port):
self.subscriber = Subscriber("schedule", self.init_schedule_model)
yield from self.subscriber.connect(host, port)
@asyncio.coroutine
def sub_close(self):
yield from self.subscriber.close()
async def sub_connect(self, host, port):
self.subscriber = Subscriber("schedule", self.init_schedule_model)
await self.subscriber.connect(host, port)
async def sub_close(self):
await self.subscriber.close()
def init_schedule_model(self, init):
self.table_model = _ScheduleModel(self.table, init)
self.table.setModel(self.table_model)
return self.table_model
@asyncio.coroutine
def delete(self, rid):
yield from self.schedule_ctl.delete(rid)
async def delete(self, rid, graceful):
if graceful:
await self.schedule_ctl.request_termination(rid)
else:
await self.schedule_ctl.delete(rid)
def delete_clicked(self):
def delete_clicked(self, graceful):
idx = self.table.selectedIndexes()
if idx:
row = idx[0].row()
rid = self.table_model.row_to_key[row]
self.status_bar.showMessage("Deleted RID {}".format(rid))
asyncio.async(self.delete(rid))
asyncio.ensure_future(self.delete(rid, graceful))

View File

@ -69,11 +69,10 @@ class StateManager(TaskObject):
exc_info=True)
pyon.store_file(self.filename, data)
@asyncio.coroutine
def _do(self):
async def _do(self):
try:
while True:
yield from asyncio.sleep(self.autosave_period)
await asyncio.sleep(self.autosave_period)
self.save()
finally:
self.save()

View File

@ -2,39 +2,21 @@ from quamash import QtCore
import numpy as np
def elide(s, maxlen):
elided = False
if len(s) > maxlen:
s = s[:maxlen]
elided = True
def si_prefix(scale):
try:
idx = s.index("\n")
except ValueError:
pass
else:
s = s[:idx]
elided = True
if elided:
maxlen -= 3
if len(s) > maxlen:
s = s[:maxlen]
s += "..."
return s
def short_format(v):
if v is None:
return "None"
t = type(v)
if np.issubdtype(t, int) or np.issubdtype(t, float):
return str(v)
elif t is str:
return "\"" + elide(v, 15) + "\""
else:
r = t.__name__
if t is list or t is dict or t is set:
r += " ({})".format(len(v))
return r
return {
1e-12: "p",
1e-9: "n",
1e-6: "u",
1e-3: "m",
1.0: "",
1e3: "k",
1e6: "M",
1e9: "G",
1e12: "T"
}[scale]
except KeyError:
return "[x{}]".format(scale)
class _SyncSubstruct:

View File

@ -13,7 +13,8 @@ from artiq.coredevice.runtime import source_loader
__all__ = ["host_int", "int",
"kernel", "portable", "syscall",
"set_time_manager", "set_watchdog_factory",
"ARTIQException"]
"ARTIQException",
"TerminationRequested"]
# global namespace for kernels
kernel_globals = (
@ -350,6 +351,11 @@ def watchdog(timeout):
return _watchdog_factory(timeout)
class TerminationRequested(Exception):
"""Raised by ``pause`` when the user has requested termination."""
pass
class ARTIQException(Exception):
"""Base class for exceptions raised or passed through the core device."""

View File

@ -73,16 +73,22 @@ class NumberValue(_SimpleArgProcessor):
:param unit: A string representing the unit of the value, for user
interface (UI) purposes.
:param scale: The scale of value for UI purposes. The corresponding SI
prefix is shown in front of the unit, and the displayed value is
divided by the scale.
:param step: The step with which the value should be modified by up/down
buttons in a UI.
buttons in a UI. The default is the scale divided by 10.
:param min: The minimum value of the argument.
:param max: The maximum value of the argument.
:param ndecimals: The number of decimals a UI should use.
"""
def __init__(self, default=NoDefault, unit="", step=1.0,
min=None, max=None, ndecimals=2):
def __init__(self, default=NoDefault, unit="", scale=1.0,
step=None, min=None, max=None, ndecimals=2):
if step is None:
step = scale/10.0
_SimpleArgProcessor.__init__(self, default)
self.unit = unit
self.scale = scale
self.step = step
self.min = min
self.max = max
@ -91,6 +97,7 @@ class NumberValue(_SimpleArgProcessor):
def describe(self):
d = _SimpleArgProcessor.describe(self)
d["unit"] = self.unit
d["scale"] = self.scale
d["step"] = self.step
d["min"] = self.min
d["max"] = self.max
@ -106,15 +113,13 @@ class StringValue(_SimpleArgProcessor):
class HasEnvironment:
"""Provides methods to manage the environment of an experiment (devices,
parameters, results, arguments)."""
def __init__(self, dmgr=None, pdb=None, rdb=None, *, parent=None,
param_override=dict(), default_arg_none=False, **kwargs):
def __init__(self, device_mgr=None, dataset_mgr=None, *, parent=None,
default_arg_none=False, **kwargs):
self.requested_args = OrderedDict()
self.__dmgr = dmgr
self.__pdb = pdb
self.__rdb = rdb
self.__device_mgr = device_mgr
self.__dataset_mgr = dataset_mgr
self.__parent = parent
self.__param_override = param_override
self.__default_arg_none = default_arg_none
self.__kwargs = kwargs
@ -136,17 +141,16 @@ class HasEnvironment:
are set to ``None``."""
raise NotImplementedError
def dbs(self):
"""Returns the device manager, the parameter database and the result
database, in this order.
def managers(self):
"""Returns the device manager and the dataset manager, in this order.
This is the same order that the constructor takes them, allowing
sub-objects to be created with this idiom to pass the environment
around: ::
sub_object = SomeLibrary(*self.dbs())
sub_object = SomeLibrary(*self.managers())
"""
return self.__dmgr, self.__pdb, self.__rdb
return self.__device_mgr, self.__dataset_mgr
def get_argument(self, key, processor=None, group=None):
"""Retrieves and returns the value of an argument.
@ -177,94 +181,54 @@ class HasEnvironment:
raise
return processor.process(argval)
def attr_argument(self, key, processor=None, group=None):
def setattr_argument(self, key, processor=None, group=None):
"""Sets an argument as attribute. The names of the argument and of the
attribute are the same."""
setattr(self, key, self.get_argument(key, processor, group))
def get_device_db(self):
"""Returns the full contents of the device database."""
if self.__parent is not None:
return self.__parent.get_device_db()
return self.__device_mgr.get_device_db()
def get_device(self, key):
"""Creates and returns a device driver."""
if self.__parent is not None:
return self.__parent.get_device(key)
if self.__dmgr is None:
if self.__device_mgr is None:
raise ValueError("Device manager not present")
return self.__dmgr.get(key)
return self.__device_mgr.get(key)
def attr_device(self, key):
def setattr_device(self, key):
"""Sets a device driver as attribute. The names of the device driver
and of the attribute are the same."""
setattr(self, key, self.get_device(key))
def get_parameter(self, key, default=NoDefault):
"""Retrieves and returns a parameter."""
if self.__parent is not None and key not in self.__param_override:
return self.__parent.get_parameter(key, default)
if self.__pdb is None:
raise ValueError("Parameter database not present")
if key in self.__param_override:
return self.__param_override[key]
def set_dataset(self, key, value,
broadcast=False, persist=False, save=True):
if self.__parent is not None:
self.__parent.set_dataset(key, value, broadcast, persist, save)
return
if self.__dataset_mgr is None:
raise ValueError("Dataset manager not present")
return self.__dataset_mgr.set(key, value, broadcast, persist, save)
def get_dataset(self, key, default=NoDefault):
if self.__parent is not None:
return self.__parent.get_dataset(key, default)
if self.__dataset_mgr is None:
raise ValueError("Dataset manager not present")
try:
return self.__pdb.get(key)
return self.__dataset_mgr.get(key)
except KeyError:
if default is not NoDefault:
return default
else:
if default is NoDefault:
raise
else:
return default
def attr_parameter(self, key, default=NoDefault):
"""Sets a parameter as attribute. The names of the argument and of the
parameter are the same."""
setattr(self, key, self.get_parameter(key, default))
def set_parameter(self, key, value):
"""Writes the value of a parameter into the parameter database."""
if self.__parent is not None:
self.__parent.set_parameter(key, value)
return
if self.__pdb is None:
raise ValueError("Parameter database not present")
self.__pdb.set(key, value)
def set_result(self, key, value, realtime=False, store=True):
"""Writes the value of a result.
:param realtime: Marks the result as real-time, making it immediately
available to clients such as the user interface. Returns a
``Notifier`` instance that can be used to modify mutable results
(such as lists) and synchronize the modifications with the clients.
:param store: Defines if the result should be stored permanently,
e.g. in HDF5 output. Default is to store.
"""
if self.__parent is not None:
self.__parent.set_result(key, value, realtime, store)
return
if self.__rdb is None:
raise ValueError("Result database not present")
if realtime:
if key in self.__rdb.nrt:
raise ValueError("Result is already non-realtime")
self.__rdb.rt[key] = value
notifier = self.__rdb.rt[key]
notifier.kernel_attr_init = False
self.__rdb.set_store(key, store)
return notifier
else:
if key in self.__rdb.rt.read:
raise ValueError("Result is already realtime")
self.__rdb.nrt[key] = value
self.__rdb.set_store(key, store)
def get_result(self, key):
"""Retrieves the value of a result.
There is no difference between real-time and non-real-time results
(this function does not return ``Notifier`` instances).
"""
if self.__parent is not None:
return self.__parent.get_result(key)
if self.__rdb is None:
raise ValueError("Result database not present")
return self.__rdb.get(key)
def setattr_dataset(self, key, default=NoDefault):
setattr(self, key, self.get_dataset(key, default))
class Experiment:

View File

@ -47,6 +47,9 @@ class NoScan(ScanObject):
def __iter__(self):
return self._gen()
def __len__(self):
return 1
def describe(self):
return {"ty": "NoScan", "value": self.value}
@ -70,6 +73,9 @@ class LinearScan(ScanObject):
def __iter__(self):
return self._gen()
def __len__(self):
return self.npoints
def describe(self):
return {"ty": "LinearScan",
"min": self.min, "max": self.max, "npoints": self.npoints}
@ -79,6 +85,9 @@ class RandomScan(ScanObject):
"""A scan object that yields a fixed number of randomly ordered evenly
spaced values in a range."""
def __init__(self, min, max, npoints, seed=0):
self.min = min
self.max = max
self.npoints = npoints
self.sequence = list(LinearScan(min, max, npoints))
shuffle(self.sequence, Random(seed).random)
@ -86,6 +95,9 @@ class RandomScan(ScanObject):
def __iter__(self):
return iter(self.sequence)
def __len__(self):
return self.npoints
def describe(self):
return {"ty": "RandomScan",
"min": self.min, "max": self.max, "npoints": self.npoints}
@ -100,6 +112,9 @@ class ExplicitScan(ScanObject):
def __iter__(self):
return iter(self.sequence)
def __len__(self):
return len(self.sequence)
def describe(self):
return {"ty": "ExplicitScan", "sequence": self.sequence}
@ -121,17 +136,24 @@ class Scannable:
range of its input widgets.
:param global_max: Same as global_min, but for the maximum value.
:param global_step: The step with which the value should be modified by
up/down buttons in a user interface.
up/down buttons in a user interface. The default is the scale divided
by 10.
:param unit: A string representing the unit of the scanned variable, for user
interface (UI) purposes.
:param scale: The scale of value for UI purposes. The corresponding SI
prefix is shown in front of the unit, and the displayed value is
divided by the scale.
:param ndecimals: The number of decimals a UI should use.
"""
def __init__(self, default=NoDefault, unit="",
global_step=1.0, global_min=None, global_max=None,
def __init__(self, default=NoDefault, unit="", scale=1.0,
global_step=None, global_min=None, global_max=None,
ndecimals=2):
if global_step is None:
global_step = scale/10.0
if default is not NoDefault:
self.default_value = default
self.unit = unit
self.scale = scale
self.global_step = global_step
self.global_min = global_min
self.global_max = global_max
@ -155,6 +177,7 @@ class Scannable:
if hasattr(self, "default_value"):
d["default"] = self.default_value.describe()
d["unit"] = self.unit
d["scale"] = self.scale
d["global_step"] = self.global_step
d["global_min"] = self.global_min
d["global_max"] = self.global_max

62
artiq/master/databases.py Normal file
View File

@ -0,0 +1,62 @@
import asyncio
from artiq.protocols.sync_struct import Notifier, process_mod
from artiq.protocols import pyon
from artiq.tools import TaskObject
class DeviceDB:
def __init__(self, backing_file):
self.backing_file = backing_file
self.data = Notifier(pyon.load_file(self.backing_file))
def scan(self):
new_data = pyon.load_file(self.backing_file)
for k in list(self.data.read.keys()):
if k not in new_data:
del self.data[k]
for k in new_data.keys():
if k not in self.data.read or self.data.read[k] != new_data[k]:
self.data[k] = new_data[k]
def get_device_db(self):
return self.data.read
def get(self, key):
return self.data.read[key]
class DatasetDB(TaskObject):
def __init__(self, persist_file, autosave_period=30):
self.persist_file = persist_file
self.autosave_period = autosave_period
file_data = pyon.load_file(self.persist_file)
self.data = Notifier({k: (True, v) for k, v in file_data.items()})
def save(self):
data = {k: v[1] for k, v in self.data.read.items() if v[0]}
pyon.store_file(self.persist_file, data)
async def _do(self):
try:
while True:
await asyncio.sleep(self.autosave_period)
self.save()
finally:
self.save()
def get(self, key):
return self.data.read[key][1]
def update(self, mod):
process_mod(self.data, mod)
# convenience functions (update() can be used instead)
def set(self, key, value, persist=False):
self.data[key] = (persist, value)
def delete(self, key):
del self.data[key]
#

View File

@ -12,17 +12,19 @@ from artiq.tools import exc_to_warning
logger = logging.getLogger(__name__)
@asyncio.coroutine
def _scan_experiments(wd, log):
async def _scan_experiments(wd, get_device_db, log):
r = dict()
for f in os.listdir(wd):
if f.endswith(".py"):
try:
worker = Worker({"log": lambda message: log("scan", message)})
worker = Worker({
"get_device_db": get_device_db,
"log": lambda message: log("scan", message)
})
try:
description = yield from worker.examine(os.path.join(wd, f))
description = await worker.examine(os.path.join(wd, f))
finally:
yield from worker.close()
await worker.close()
for class_name, class_desc in description.items():
name = class_desc["name"]
arguments = class_desc["arguments"]
@ -54,8 +56,9 @@ def _sync_explist(target, source):
class Repository:
def __init__(self, backend, log_fn):
def __init__(self, backend, get_device_db_fn, log_fn):
self.backend = backend
self.get_device_db_fn = get_device_db_fn
self.log_fn = log_fn
self.cur_rev = self.backend.get_head_rev()
@ -68,8 +71,7 @@ class Repository:
# The object cannot be used anymore after calling this method.
self.backend.release_rev(self.cur_rev)
@asyncio.coroutine
def scan(self, new_cur_rev=None):
async def scan(self, new_cur_rev=None):
if self._scanning:
return
self._scanning = True
@ -79,14 +81,15 @@ class Repository:
wd, _ = self.backend.request_rev(new_cur_rev)
self.backend.release_rev(self.cur_rev)
self.cur_rev = new_cur_rev
new_explist = yield from _scan_experiments(wd, self.log_fn)
new_explist = await _scan_experiments(wd, self.get_device_db_fn,
self.log_fn)
_sync_explist(self.explist, new_explist)
finally:
self._scanning = False
def scan_async(self, new_cur_rev=None):
asyncio.async(exc_to_warning(self.scan(new_cur_rev)))
asyncio.ensure_future(exc_to_warning(self.scan(new_cur_rev)))
class FilesystemBackend:

View File

@ -24,13 +24,12 @@ class RunStatus(Enum):
def _mk_worker_method(name):
@asyncio.coroutine
def worker_method(self, *args, **kwargs):
async def worker_method(self, *args, **kwargs):
if self.worker.closed.is_set():
return True
m = getattr(self.worker, name)
try:
return (yield from m(*args, **kwargs))
return await m(*args, **kwargs)
except Exception as e:
if isinstance(e, asyncio.CancelledError):
raise
@ -58,6 +57,7 @@ class Run:
self.flush = flush
self.worker = Worker(pool.worker_handlers)
self.termination_requested = False
self._status = RunStatus.pending
@ -97,19 +97,17 @@ class Run:
runnable = 1
return (runnable, self.priority, due_date_k, -self.rid)
@asyncio.coroutine
def close(self):
async def close(self):
# called through pool
yield from self.worker.close()
await self.worker.close()
del self._notifier[self.rid]
_build = _mk_worker_method("build")
@asyncio.coroutine
def build(self):
yield from self._build(self.rid, self.pipeline_name,
self.wd, self.expid,
self.priority)
async def build(self):
await self._build(self.rid, self.pipeline_name,
self.wd, self.expid,
self.priority)
prepare = _mk_worker_method("prepare")
run = _mk_worker_method("run")
@ -154,13 +152,12 @@ class RunPool:
self.state_changed.notify()
return rid
@asyncio.coroutine
def delete(self, rid):
async def delete(self, rid):
# called through deleter
if rid not in self.runs:
return
run = self.runs[rid]
yield from run.close()
await run.close()
if "repo_rev" in run.expid:
self.repo_backend.release_rev(run.expid["repo_rev"])
del self.runs[rid]
@ -203,14 +200,13 @@ class PrepareStage(TaskObject):
else:
return candidate.due_date - now
@asyncio.coroutine
def _do(self):
async def _do(self):
while True:
run = self._get_run()
if run is None:
yield from self.pool.state_changed.wait()
await self.pool.state_changed.wait()
elif isinstance(run, float):
yield from asyncio_wait_or_cancel([self.pool.state_changed.wait()],
await asyncio_wait_or_cancel([self.pool.state_changed.wait()],
timeout=run)
else:
if run.flush:
@ -221,7 +217,7 @@ class PrepareStage(TaskObject):
for r in self.pool.runs.values()):
ev = [self.pool.state_changed.wait(),
run.worker.closed.wait()]
yield from asyncio_wait_or_cancel(
await asyncio_wait_or_cancel(
ev, return_when=asyncio.FIRST_COMPLETED)
if run.worker.closed.is_set():
break
@ -229,8 +225,8 @@ class PrepareStage(TaskObject):
continue
run.status = RunStatus.preparing
try:
yield from run.build()
yield from run.prepare()
await run.build()
await run.prepare()
except:
logger.warning("got worker exception in prepare stage, "
"deleting RID %d",
@ -255,8 +251,7 @@ class RunStage(TaskObject):
r = None
return r
@asyncio.coroutine
def _do(self):
async def _do(self):
stack = []
while True:
@ -265,7 +260,7 @@ class RunStage(TaskObject):
next_irun is not None and
next_irun.priority_key() > stack[-1].priority_key()):
while next_irun is None:
yield from self.pool.state_changed.wait()
await self.pool.state_changed.wait()
next_irun = self._get_run()
stack.append(next_irun)
@ -273,10 +268,15 @@ class RunStage(TaskObject):
try:
if run.status == RunStatus.paused:
run.status = RunStatus.running
completed = yield from run.resume()
# clear "termination requested" flag now
# so that if it is set again during the resume, this
# results in another exception.
request_termination = run.termination_requested
run.termination_requested = False
completed = await run.resume(request_termination)
else:
run.status = RunStatus.running
completed = yield from run.run()
completed = await run.run()
except:
logger.warning("got worker exception in run stage, "
"deleting RID %d",
@ -305,17 +305,16 @@ class AnalyzeStage(TaskObject):
r = None
return r
@asyncio.coroutine
def _do(self):
async def _do(self):
while True:
run = self._get_run()
while run is None:
yield from self.pool.state_changed.wait()
await self.pool.state_changed.wait()
run = self._get_run()
run.status = RunStatus.analyzing
try:
yield from run.analyze()
yield from run.write_results()
await run.analyze()
await run.write_results()
except:
logger.warning("got worker exception in analyze stage, "
"deleting RID %d",
@ -337,18 +336,17 @@ class Pipeline:
self._run.start()
self._analyze.start()
@asyncio.coroutine
def stop(self):
async def stop(self):
# NB: restart of a stopped pipeline is not supported
yield from self._analyze.stop()
yield from self._run.stop()
yield from self._prepare.stop()
await self._analyze.stop()
await self._run.stop()
await self._prepare.stop()
class Deleter(TaskObject):
def __init__(self, pipelines):
self._pipelines = pipelines
self._queue = asyncio.JoinableQueue()
self._queue = asyncio.Queue()
def delete(self, rid):
logger.debug("delete request for RID %d", rid)
@ -358,36 +356,32 @@ class Deleter(TaskObject):
break
self._queue.put_nowait(rid)
@asyncio.coroutine
def join(self):
yield from self._queue.join()
async def join(self):
await self._queue.join()
@asyncio.coroutine
def _delete(self, rid):
async def _delete(self, rid):
for pipeline in self._pipelines.values():
if rid in pipeline.pool.runs:
logger.debug("deleting RID %d...", rid)
yield from pipeline.pool.delete(rid)
await pipeline.pool.delete(rid)
logger.debug("deletion of RID %d completed", rid)
break
@asyncio.coroutine
def _gc_pipelines(self):
async def _gc_pipelines(self):
pipeline_names = list(self._pipelines.keys())
for name in pipeline_names:
if not self._pipelines[name].pool.runs:
logger.debug("garbage-collecting pipeline '%s'...", name)
yield from self._pipelines[name].stop()
await self._pipelines[name].stop()
del self._pipelines[name]
logger.debug("garbage-collection of pipeline '%s' completed",
name)
@asyncio.coroutine
def _do(self):
async def _do(self):
while True:
rid = yield from self._queue.get()
yield from self._delete(rid)
yield from self._gc_pipelines()
rid = await self._queue.get()
await self._delete(rid)
await self._gc_pipelines()
self._queue.task_done()
@ -406,15 +400,14 @@ class Scheduler:
def start(self):
self._deleter.start()
@asyncio.coroutine
def stop(self):
async def stop(self):
# NB: restart of a stopped scheduler is not supported
self._terminated = True # prevent further runs from being created
for pipeline in self._pipelines.values():
for rid in pipeline.pool.runs.keys():
self._deleter.delete(rid)
yield from self._deleter.join()
yield from self._deleter.stop()
await self._deleter.join()
await self._deleter.stop()
if self._pipelines:
logger.warning("some pipelines were not garbage-collected")
@ -435,3 +428,13 @@ class Scheduler:
def delete(self, rid):
self._deleter.delete(rid)
def request_termination(self, rid):
for pipeline in self._pipelines.values():
if rid in pipeline.pool.runs:
run = pipeline.pool.runs[rid]
if run.status == RunStatus.running or run.status == RunStatus.paused:
run.termination_requested = True
else:
self.delete(rid)
break

View File

@ -7,8 +7,7 @@ import time
from functools import partial
from artiq.protocols import pyon
from artiq.tools import (asyncio_process_wait_timeout, asyncio_process_wait,
asyncio_wait_or_cancel)
from artiq.tools import asyncio_wait_or_cancel
logger = logging.getLogger(__name__)
@ -22,6 +21,10 @@ class WorkerWatchdogTimeout(Exception):
pass
class WorkerException(Exception):
pass
class WorkerError(Exception):
pass
@ -57,27 +60,25 @@ class Worker:
else:
return None
@asyncio.coroutine
def _create_process(self):
yield from self.io_lock.acquire()
async def _create_process(self):
await self.io_lock.acquire()
try:
if self.closed.is_set():
raise WorkerError("Attempting to create process after close")
self.process = yield from asyncio.create_subprocess_exec(
self.process = await asyncio.create_subprocess_exec(
sys.executable, "-m", "artiq.master.worker_impl",
stdout=subprocess.PIPE, stdin=subprocess.PIPE)
finally:
self.io_lock.release()
@asyncio.coroutine
def close(self, term_timeout=1.0):
async def close(self, term_timeout=1.0):
"""Interrupts any I/O with the worker process and terminates the
worker process.
This method should always be called by the user to clean up, even if
build() or examine() raises an exception."""
self.closed.set()
yield from self.io_lock.acquire()
await self.io_lock.acquire()
try:
if self.process is None:
# Note the %s - self.rid can be None
@ -92,27 +93,25 @@ class Worker:
return
obj = {"action": "terminate"}
try:
yield from self._send(obj, cancellable=False)
await self._send(obj, cancellable=False)
except:
logger.warning("failed to send terminate command to worker"
" (RID %s), killing", self.rid, exc_info=True)
self.process.kill()
yield from asyncio_process_wait(self.process)
await self.process.wait()
return
try:
yield from asyncio_process_wait_timeout(self.process,
term_timeout)
await asyncio.wait_for(self.process.wait(), term_timeout)
except asyncio.TimeoutError:
logger.warning("worker did not exit (RID %s), killing", self.rid)
self.process.kill()
yield from asyncio_process_wait(self.process)
await self.process.wait()
else:
logger.debug("worker exited gracefully (RID %s)", self.rid)
finally:
self.io_lock.release()
@asyncio.coroutine
def _send(self, obj, cancellable=True):
async def _send(self, obj, cancellable=True):
assert self.io_lock.locked()
line = pyon.encode(obj)
self.process.stdin.write(line.encode())
@ -120,7 +119,7 @@ class Worker:
ifs = [self.process.stdin.drain()]
if cancellable:
ifs.append(self.closed.wait())
fs = yield from asyncio_wait_or_cancel(
fs = await asyncio_wait_or_cancel(
ifs, timeout=self.send_timeout,
return_when=asyncio.FIRST_COMPLETED)
if all(f.cancelled() for f in fs):
@ -131,10 +130,9 @@ class Worker:
if cancellable and self.closed.is_set():
raise WorkerError("Data transmission to worker cancelled")
@asyncio.coroutine
def _recv(self, timeout):
async def _recv(self, timeout):
assert self.io_lock.locked()
fs = yield from asyncio_wait_or_cancel(
fs = await asyncio_wait_or_cancel(
[self.process.stdout.readline(), self.closed.wait()],
timeout=timeout, return_when=asyncio.FIRST_COMPLETED)
if all(f.cancelled() for f in fs):
@ -150,13 +148,12 @@ class Worker:
raise WorkerError("Worker sent invalid PYON data")
return obj
@asyncio.coroutine
def _handle_worker_requests(self):
async def _handle_worker_requests(self):
while True:
try:
yield from self.io_lock.acquire()
await self.io_lock.acquire()
try:
obj = yield from self._recv(self.watchdog_time())
obj = await self._recv(self.watchdog_time())
finally:
self.io_lock.release()
except WorkerTimeout:
@ -166,6 +163,8 @@ class Worker:
return True
elif action == "pause":
return False
elif action == "exception":
raise WorkerException
del obj["action"]
if action == "create_watchdog":
func = self.create_watchdog
@ -183,24 +182,23 @@ class Worker:
except:
reply = {"status": "failed",
"message": traceback.format_exc()}
yield from self.io_lock.acquire()
await self.io_lock.acquire()
try:
yield from self._send(reply)
await self._send(reply)
finally:
self.io_lock.release()
@asyncio.coroutine
def _worker_action(self, obj, timeout=None):
async def _worker_action(self, obj, timeout=None):
if timeout is not None:
self.watchdogs[-1] = time.monotonic() + timeout
try:
yield from self.io_lock.acquire()
await self.io_lock.acquire()
try:
yield from self._send(obj)
await self._send(obj)
finally:
self.io_lock.release()
try:
completed = yield from self._handle_worker_requests()
completed = await self._handle_worker_requests()
except WorkerTimeout:
raise WorkerWatchdogTimeout
finally:
@ -208,11 +206,10 @@ class Worker:
del self.watchdogs[-1]
return completed
@asyncio.coroutine
def build(self, rid, pipeline_name, wd, expid, priority, timeout=15.0):
async def build(self, rid, pipeline_name, wd, expid, priority, timeout=15.0):
self.rid = rid
yield from self._create_process()
yield from self._worker_action(
await self._create_process()
await self._worker_action(
{"action": "build",
"rid": rid,
"pipeline_name": pipeline_name,
@ -221,45 +218,39 @@ class Worker:
"priority": priority},
timeout)
@asyncio.coroutine
def prepare(self):
yield from self._worker_action({"action": "prepare"})
async def prepare(self):
await self._worker_action({"action": "prepare"})
@asyncio.coroutine
def run(self):
completed = yield from self._worker_action({"action": "run"})
async def run(self):
completed = await self._worker_action({"action": "run"})
if not completed:
self.yield_time = time.monotonic()
return completed
@asyncio.coroutine
def resume(self):
async def resume(self, request_termination):
stop_duration = time.monotonic() - self.yield_time
for wid, expiry in self.watchdogs:
self.watchdogs[wid] += stop_duration
completed = yield from self._worker_action({"status": "ok",
"data": None})
completed = await self._worker_action({"status": "ok",
"data": request_termination})
if not completed:
self.yield_time = time.monotonic()
return completed
@asyncio.coroutine
def analyze(self):
yield from self._worker_action({"action": "analyze"})
async def analyze(self):
await self._worker_action({"action": "analyze"})
@asyncio.coroutine
def write_results(self, timeout=15.0):
yield from self._worker_action({"action": "write_results"},
timeout)
async def write_results(self, timeout=15.0):
await self._worker_action({"action": "write_results"},
timeout)
@asyncio.coroutine
def examine(self, file, timeout=20.0):
yield from self._create_process()
async def examine(self, file, timeout=20.0):
await self._create_process()
r = dict()
def register(class_name, name, arguments):
r[class_name] = {"name": name, "arguments": arguments}
self.register_experiment = register
yield from self._worker_action({"action": "examine",
"file": file}, timeout)
await self._worker_action({"action": "examine", "file": file},
timeout)
del self.register_experiment
return r

View File

@ -15,6 +15,64 @@ from artiq.protocols.pc_rpc import Client, BestEffortClient
logger = logging.getLogger(__name__)
def _create_device(desc, device_mgr):
ty = desc["type"]
if ty == "local":
module = importlib.import_module(desc["module"])
device_class = getattr(module, desc["class"])
return device_class(device_mgr, **desc["arguments"])
elif ty == "controller":
if desc["best_effort"]:
cl = BestEffortClient
else:
cl = Client
return cl(desc["host"], desc["port"], desc["target_name"])
else:
raise ValueError("Unsupported type in device DB: " + ty)
class DeviceManager:
"""Handles creation and destruction of local device drivers and controller
RPC clients."""
def __init__(self, ddb, virtual_devices=dict()):
self.ddb = ddb
self.virtual_devices = virtual_devices
self.active_devices = OrderedDict()
def get_device_db(self):
"""Returns the full contents of the device database."""
return self.ddb.get_device_db()
def get(self, name):
"""Get the device driver or controller client corresponding to a
device database entry."""
if name in self.virtual_devices:
return self.virtual_devices[name]
if name in self.active_devices:
return self.active_devices[name]
else:
desc = self.ddb.get(name)
while isinstance(desc, str):
# alias
desc = self.ddb.get(desc)
dev = _create_device(desc, self)
self.active_devices[name] = dev
return dev
def close_devices(self):
"""Closes all active devices, in the opposite order as they were
requested."""
for dev in reversed(list(self.active_devices.values())):
try:
if isinstance(dev, (Client, BestEffortClient)):
dev.close_rpc()
elif hasattr(dev, "close"):
dev.close()
except Exception as e:
logger.warning("Exception %r when closing device %r", e, dev)
self.active_devices.clear()
def get_hdf5_output(start_time, rid, name):
dirname = os.path.join("results",
time.strftime("%Y-%m-%d", start_time),
@ -87,80 +145,30 @@ def result_dict_to_hdf5(f, rd):
dataset[()] = data
class ResultDB:
def __init__(self):
self.rt = Notifier(dict())
self.nrt = dict()
self.store = set()
class DatasetManager:
def __init__(self, ddb):
self.broadcast = Notifier(dict())
self.local = dict()
self.ddb = ddb
self.broadcast.publish = ddb.update
def set(self, key, value, broadcast=False, persist=False, save=True):
if persist:
broadcast = True
r = None
if broadcast:
self.broadcast[key] = (persist, value)
r = self.broadcast[key][1]
if save:
self.local[key] = value
return r
def get(self, key):
try:
return self.nrt[key]
return self.local[key]
except KeyError:
return self.rt[key].read
def set_store(self, key, store):
if store:
self.store.add(key)
else:
self.store.discard(key)
return self.ddb.get(key)
def write_hdf5(self, f):
result_dict_to_hdf5(
f, {k: v for k, v in self.rt.read.items() if k in self.store})
result_dict_to_hdf5(
f, {k: v for k, v in self.nrt.items() if k in self.store})
def _create_device(desc, dmgr):
ty = desc["type"]
if ty == "local":
module = importlib.import_module(desc["module"])
device_class = getattr(module, desc["class"])
return device_class(dmgr, **desc["arguments"])
elif ty == "controller":
if desc["best_effort"]:
cl = BestEffortClient
else:
cl = Client
return cl(desc["host"], desc["port"], desc["target_name"])
else:
raise ValueError("Unsupported type in device DB: " + ty)
class DeviceManager:
"""Handles creation and destruction of local device drivers and controller
RPC clients."""
def __init__(self, ddb, virtual_devices=dict()):
self.ddb = ddb
self.virtual_devices = virtual_devices
self.active_devices = OrderedDict()
def get(self, name):
"""Get the device driver or controller client corresponding to a
device database entry."""
if name in self.virtual_devices:
return self.virtual_devices[name]
if name in self.active_devices:
return self.active_devices[name]
else:
desc = self.ddb.get(name)
while isinstance(desc, str):
# alias
desc = self.ddb.get(desc)
dev = _create_device(desc, self)
self.active_devices[name] = dev
return dev
def close_devices(self):
"""Closes all active devices, in the opposite order as they were
requested."""
for dev in reversed(list(self.active_devices.values())):
try:
if isinstance(dev, (Client, BestEffortClient)):
dev.close_rpc()
elif hasattr(dev, "close"):
dev.close()
except Exception as e:
logger.warning("Exception %r when closing device %r", e, dev)
self.active_devices.clear()
result_dict_to_hdf5(f, self.local)

View File

@ -1,12 +1,13 @@
import sys
import time
import os
import traceback
from artiq.protocols import pyon
from artiq.tools import file_import
from artiq.master.worker_db import DeviceManager, ResultDB, get_hdf5_output
from artiq.master.worker_db import DeviceManager, DatasetManager, get_hdf5_output
from artiq.language.environment import is_experiment
from artiq.language.core import set_watchdog_factory
from artiq.language.core import set_watchdog_factory, TerminationRequested
def get_object():
@ -62,16 +63,14 @@ class LogForwarder:
pass
class ParentDDB:
get = make_parent_action("get_device", "name", KeyError)
class ParentDeviceDB:
get_device_db = make_parent_action("get_device_db", "")
get = make_parent_action("get_device", "key", KeyError)
class ParentPDB:
get = make_parent_action("get_parameter", "name", KeyError)
set = make_parent_action("set_parameter", "name value")
update_rt_results = make_parent_action("update_rt_results", "mod")
class ParentDatasetDB:
get = make_parent_action("get_dataset", "key", KeyError)
update = make_parent_action("update_dataset", "mod")
class Watchdog:
@ -92,7 +91,11 @@ set_watchdog_factory(Watchdog)
class Scheduler:
pause = staticmethod(make_parent_action("pause", ""))
pause_noexc = staticmethod(make_parent_action("pause", ""))
def pause(self):
if self.pause_noexc():
raise TerminationRequested
submit = staticmethod(make_parent_action("scheduler_submit",
"pipeline_name expid priority due_date flush"))
@ -121,20 +124,22 @@ register_experiment = make_parent_action("register_experiment",
"class_name name arguments")
class DummyDMGR:
class ExamineDeviceMgr:
get_device_db = make_parent_action("get_device_db", "")
def get(self, name):
return None
class DummyPDB:
def get(self, name):
class DummyDatasetMgr:
def set(self, key, value, broadcast=False, persist=False, save=True):
return None
def set(self, name, value):
def get(self, key):
pass
def examine(dmgr, pdb, rdb, file):
def examine(device_mgr, dataset_mgr, file):
module = file_import(file)
for class_name, exp_class in module.__dict__.items():
if class_name[0] == "_":
@ -146,7 +151,7 @@ def examine(dmgr, pdb, rdb, file):
name = exp_class.__doc__.splitlines()[0].strip()
if name[-1] == ".":
name = name[:-1]
exp_inst = exp_class(dmgr, pdb, rdb, default_arg_none=True)
exp_inst = exp_class(device_mgr, dataset_mgr, default_arg_none=True)
arguments = [(k, (proc.describe(), group))
for k, (proc, group) in exp_inst.requested_args.items()]
register_experiment(class_name, name, arguments)
@ -161,10 +166,9 @@ def main():
exp = None
exp_inst = None
dmgr = DeviceManager(ParentDDB,
virtual_devices={"scheduler": Scheduler()})
rdb = ResultDB()
rdb.rt.publish = update_rt_results
device_mgr = DeviceManager(ParentDeviceDB,
virtual_devices={"scheduler": Scheduler()})
dataset_mgr = DatasetManager(ParentDatasetDB)
try:
while True:
@ -180,9 +184,9 @@ def main():
else:
expf = expid["file"]
exp = get_exp(expf, expid["class_name"])
dmgr.virtual_devices["scheduler"].set_run_info(
device_mgr.virtual_devices["scheduler"].set_run_info(
obj["pipeline_name"], expid, obj["priority"])
exp_inst = exp(dmgr, ParentPDB, rdb,
exp_inst = exp(device_mgr, dataset_mgr,
**expid["arguments"])
put_object({"action": "completed"})
elif action == "prepare":
@ -197,7 +201,7 @@ def main():
elif action == "write_results":
f = get_hdf5_output(start_time, rid, exp.__name__)
try:
rdb.write_hdf5(f)
dataset_mgr.write_hdf5(f)
if "repo_rev" in expid:
rr = expid["repo_rev"]
dtype = "S{}".format(len(rr))
@ -207,12 +211,15 @@ def main():
f.close()
put_object({"action": "completed"})
elif action == "examine":
examine(DummyDMGR(), DummyPDB(), ResultDB(), obj["file"])
examine(ExamineDeviceMgr(), DummyDatasetMgr(), obj["file"])
put_object({"action": "completed"})
elif action == "terminate":
break
except:
traceback.print_exc()
put_object({"action": "exception"})
finally:
dmgr.close_devices()
device_mgr.close_devices()
if __name__ == "__main__":
main()

View File

@ -12,8 +12,7 @@ class AsyncioServer:
def __init__(self):
self._client_tasks = set()
@asyncio.coroutine
def start(self, host, port):
async def start(self, host, port):
"""Starts the server.
The user must call ``stop`` to free resources properly after this
@ -26,11 +25,10 @@ class AsyncioServer:
:param port: TCP port to bind to.
"""
self.server = yield from asyncio.start_server(self._handle_connection,
host, port)
self.server = await asyncio.start_server(self._handle_connection,
host, port)
@asyncio.coroutine
def stop(self):
async def stop(self):
"""Stops the server.
"""
@ -39,11 +37,11 @@ class AsyncioServer:
task.cancel()
for task in wait_for:
try:
yield from asyncio.wait_for(task, None)
await asyncio.wait_for(task, None)
except asyncio.CancelledError:
pass
self.server.close()
yield from self.server.wait_closed()
await self.server.wait_closed()
del self.server
def _client_done(self, task):

View File

@ -1,31 +0,0 @@
from time import time
from artiq.protocols import pyon
from artiq.protocols.sync_struct import Notifier
class FlatFileDB:
def __init__(self, filename):
self.filename = filename
self.data = Notifier(pyon.load_file(self.filename))
self.hooks = []
def save(self):
pyon.store_file(self.filename, self.data.read)
def get(self, name):
return self.data.read[name]
def set(self, name, value):
self.data[name] = value
self.save()
timestamp = time()
for hook in self.hooks:
hook.set(timestamp, name, value)
def delete(self, name):
del self.data[name]
self.save()
timestamp = time()
for hook in self.hooks:
hook.delete(timestamp, name)

View File

@ -159,16 +159,15 @@ class AsyncioClient:
self.__target_names = None
self.__description = None
@asyncio.coroutine
def connect_rpc(self, host, port, target_name):
async def connect_rpc(self, host, port, target_name):
"""Connects to the server. This cannot be done in __init__ because
this method is a coroutine. See ``Client`` for a description of the
parameters."""
self.__reader, self.__writer = \
yield from asyncio.open_connection(host, port)
await asyncio.open_connection(host, port)
try:
self.__writer.write(_init_string)
server_identification = yield from self.__recv()
server_identification = await self.__recv()
self.__target_names = server_identification["targets"]
self.__description = server_identification["description"]
if target_name is not None:
@ -205,20 +204,18 @@ class AsyncioClient:
line = pyon.encode(obj) + "\n"
self.__writer.write(line.encode())
@asyncio.coroutine
def __recv(self):
line = yield from self.__reader.readline()
async def __recv(self):
line = await self.__reader.readline()
return pyon.decode(line.decode())
@asyncio.coroutine
def __do_rpc(self, name, args, kwargs):
yield from self.__lock.acquire()
async def __do_rpc(self, name, args, kwargs):
await self.__lock.acquire()
try:
obj = {"action": "call", "name": name,
"args": args, "kwargs": kwargs}
self.__send(obj)
obj = yield from self.__recv()
obj = await self.__recv()
if obj["status"] == "ok":
return obj["ret"]
elif obj["status"] == "failed":
@ -229,9 +226,8 @@ class AsyncioClient:
self.__lock.release()
def __getattr__(self, name):
@asyncio.coroutine
def proxy(*args, **kwargs):
res = yield from self.__do_rpc(name, args, kwargs)
async def proxy(*args, **kwargs):
res = await self.__do_rpc(name, args, kwargs)
return res
return proxy
@ -413,10 +409,9 @@ class Server(_AsyncioServer):
if builtin_terminate:
self._terminate_request = asyncio.Event()
@asyncio.coroutine
def _handle_connection_cr(self, reader, writer):
async def _handle_connection_cr(self, reader, writer):
try:
line = yield from reader.readline()
line = await reader.readline()
if line != _init_string:
return
@ -426,7 +421,7 @@ class Server(_AsyncioServer):
}
line = pyon.encode(obj) + "\n"
writer.write(line.encode())
line = yield from reader.readline()
line = await reader.readline()
if not line:
return
target_name = line.decode()[:-1]
@ -436,7 +431,7 @@ class Server(_AsyncioServer):
return
while True:
line = yield from reader.readline()
line = await reader.readline()
if not line:
break
obj = pyon.decode(line.decode())
@ -486,9 +481,8 @@ class Server(_AsyncioServer):
finally:
writer.close()
@asyncio.coroutine
def wait_terminate(self):
yield from self._terminate_request.wait()
async def wait_terminate(self):
await self._terminate_request.wait()
def simple_server_loop(targets, host, port, description=None):

View File

@ -187,3 +187,23 @@ def load_file(filename):
"""Parses the specified file and returns the decoded Python object."""
with open(filename, "r") as f:
return decode(f.read())
class FlatFileDB:
def __init__(self, filename):
self.filename = filename
self.data = pyon.load_file(self.filename)
def save(self):
pyon.store_file(self.filename, self.data)
def get(self, key):
return self.data[key]
def set(self, key, value):
self.data[key] = value
self.save()
def delete(self, key):
del self.data[key]
self.save()

View File

@ -61,10 +61,9 @@ class Subscriber:
self.target_builders = [target_builder]
self.notify_cb = notify_cb
@asyncio.coroutine
def connect(self, host, port, before_receive_cb=None):
async def connect(self, host, port, before_receive_cb=None):
self.reader, self.writer = \
yield from asyncio.open_connection(host, port)
await asyncio.open_connection(host, port)
try:
if before_receive_cb is not None:
before_receive_cb()
@ -77,12 +76,11 @@ class Subscriber:
del self.writer
raise
@asyncio.coroutine
def close(self):
async def close(self):
try:
self.receive_task.cancel()
try:
yield from asyncio.wait_for(self.receive_task, None)
await asyncio.wait_for(self.receive_task, None)
except asyncio.CancelledError:
pass
finally:
@ -90,11 +88,10 @@ class Subscriber:
del self.reader
del self.writer
@asyncio.coroutine
def _receive_cr(self):
async def _receive_cr(self):
targets = []
while True:
line = yield from self.reader.readline()
line = await self.reader.readline()
if not line:
return
mod = pyon.decode(line.decode())
@ -209,14 +206,13 @@ class Publisher(AsyncioServer):
for notifier in notifiers.values():
notifier.publish = partial(self.publish, notifier)
@asyncio.coroutine
def _handle_connection_cr(self, reader, writer):
async def _handle_connection_cr(self, reader, writer):
try:
line = yield from reader.readline()
line = await reader.readline()
if line != _init_string:
return
line = yield from reader.readline()
line = await reader.readline()
if not line:
return
notifier_name = line.decode()[:-1]
@ -234,10 +230,10 @@ class Publisher(AsyncioServer):
self._recipients[notifier_name].add(queue)
try:
while True:
line = yield from queue.get()
line = await queue.get()
writer.write(line)
# raise exception on connection error
yield from writer.drain()
await writer.drain()
finally:
self._recipients[notifier_name].remove(queue)
except ConnectionResetError:

View File

@ -356,8 +356,7 @@ class Function:
exception_id = self.mappers.exception.encode(exception_class)
return ast.copy_location(
ast.Call(func=ast.Name("EncodedException", ast.Load()),
args=[value_to_ast(exception_id)],
keywords=[], starargs=None, kwargs=None),
args=[value_to_ast(exception_id)], keywords=[]),
e)
def code_visit_Raise(self, node):
@ -514,8 +513,7 @@ def get_attr_writeback(attribute_namespace, rpc_mapper, loc_node):
arg3 = ast.copy_location(
ast.Name(attr_info.mangled_name, ast.Load()), loc_node)
call = ast.copy_location(
ast.Call(func=func, args=[arg1, arg2, arg3],
keywords=[], starargs=None, kwargs=None),
ast.Call(func=func, args=[arg1, arg2, arg3], keywords=[]),
loc_node)
expr = ast.copy_location(ast.Expr(call), loc_node)
attr_writeback.append(expr)

View File

@ -71,8 +71,7 @@ def _interleave_timelines(timelines):
delay_stmt = ast.copy_location(
ast.Expr(ast.Call(
func=ast.Name("delay_mu", ast.Load()),
args=[value_to_ast(dt)],
keywords=[], starargs=[], kwargs=[])),
args=[value_to_ast(dt)], keywords=[])),
ref_stmt)
r.append(delay_stmt)
else:

View File

@ -16,9 +16,9 @@ def _run_on_host(k_class, **arguments):
class _Primes(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("output_list")
self.attr_argument("maximum")
self.setattr_device("core")
self.setattr_argument("output_list")
self.setattr_argument("maximum")
@kernel
def run(self):
@ -36,7 +36,7 @@ class _Primes(EnvExperiment):
class _Misc(EnvExperiment):
def build(self):
self.attr_device("core")
self.setattr_device("core")
self.input = 84
self.al = [1, 2, 3, 4, 5]
@ -54,9 +54,9 @@ class _Misc(EnvExperiment):
class _PulseLogger(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("output_list")
self.attr_argument("name")
self.setattr_device("core")
self.setattr_argument("output_list")
self.setattr_argument("name")
def _append(self, t, l, f):
if not hasattr(self, "first_timestamp"):
@ -81,11 +81,11 @@ class _PulseLogger(EnvExperiment):
class _Pulses(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("output_list")
self.setattr_device("core")
self.setattr_argument("output_list")
for name in "a", "b", "c", "d":
pl = _PulseLogger(*self.dbs(),
pl = _PulseLogger(*self.managers(),
output_list=self.output_list,
name=name)
setattr(self, name, pl)
@ -108,8 +108,8 @@ class _MyException(Exception):
class _Exceptions(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("trace")
self.setattr_device("core")
self.setattr_argument("trace")
@kernel
def run(self):
@ -152,8 +152,8 @@ class _Exceptions(EnvExperiment):
class _RPCExceptions(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("catch", FreeValue(False))
self.setattr_device("core")
self.setattr_argument("catch", FreeValue(False))
self.success = False

View File

@ -10,8 +10,11 @@ from artiq.coredevice.exceptions import RTIOUnderflow, RTIOSequenceError
class RTT(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_inout")
self.setattr_device("core")
self.setattr_device("ttl_inout")
def set_rtt(self, rtt):
self.set_dataset("rtt", rtt)
@kernel
def run(self):
@ -30,9 +33,12 @@ class RTT(EnvExperiment):
class Loopback(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("loop_in")
self.attr_device("loop_out")
self.setattr_device("core")
self.setattr_device("loop_in")
self.setattr_device("loop_out")
def set_rtt(self, rtt):
self.set_dataset("rtt", rtt)
@kernel
def run(self):
@ -49,9 +55,12 @@ class Loopback(EnvExperiment):
class ClockGeneratorLoopback(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("loop_clock_in")
self.attr_device("loop_clock_out")
self.setattr_device("core")
self.setattr_device("loop_clock_in")
self.setattr_device("loop_clock_out")
def set_count(self, count):
self.set_dataset("count", count)
@kernel
def run(self):
@ -68,8 +77,11 @@ class ClockGeneratorLoopback(EnvExperiment):
class PulseRate(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_out")
self.setattr_device("core")
self.setattr_device("ttl_out")
def set_pulse_rate(self, pulse_rate):
self.set_dataset("pulse_rate", pulse_rate)
@kernel
def run(self):
@ -89,7 +101,7 @@ class PulseRate(EnvExperiment):
class Watchdog(EnvExperiment):
def build(self):
self.attr_device("core")
self.setattr_device("core")
@kernel
def run(self):
@ -100,9 +112,12 @@ class Watchdog(EnvExperiment):
class LoopbackCount(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_inout")
self.attr_argument("npulses")
self.setattr_device("core")
self.setattr_device("ttl_inout")
self.setattr_argument("npulses")
def set_count(self, count):
self.set_dataset("count", count)
@kernel
def run(self):
@ -119,8 +134,8 @@ class LoopbackCount(EnvExperiment):
class Underflow(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_out")
self.setattr_device("core")
self.setattr_device("ttl_out")
@kernel
def run(self):
@ -131,8 +146,8 @@ class Underflow(EnvExperiment):
class SequenceError(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_out")
self.setattr_device("core")
self.setattr_device("ttl_out")
@kernel
def run(self):
@ -144,8 +159,8 @@ class SequenceError(EnvExperiment):
class CollisionError(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl_out_serdes")
self.setattr_device("core")
self.setattr_device("ttl_out_serdes")
@kernel
def run(self):
@ -157,7 +172,10 @@ class CollisionError(EnvExperiment):
class TimeKeepsRunning(EnvExperiment):
def build(self):
self.attr_device("core")
self.setattr_device("core")
def set_time_at_start(self, time_at_start):
self.set_dataset("time_at_start", time_at_start)
@kernel
def run(self):
@ -166,40 +184,42 @@ class TimeKeepsRunning(EnvExperiment):
class Handover(EnvExperiment):
def build(self):
self.attr_device("core")
self.setattr_device("core")
@kernel
def get_now(self, var):
self.set_result(var, now_mu())
def run(self):
self.get_now("t1")
self.get_now("t2")
self.get_now()
self.set_dataset("t1", self.time_at_start)
self.get_now()
self.set_dataset("t2", self.time_at_start)
class CoredeviceTest(ExperimentCase):
def test_rtt(self):
self.execute(RTT)
rtt = self.rdb.get("rtt")
rtt = self.dataset_mgr.get("rtt")
print(rtt)
self.assertGreater(rtt, 0*ns)
self.assertLess(rtt, 100*ns)
def test_loopback(self):
self.execute(Loopback)
rtt = self.rdb.get("rtt")
rtt = self.dataset_mgr.get("rtt")
print(rtt)
self.assertGreater(rtt, 0*ns)
self.assertLess(rtt, 50*ns)
def test_clock_generator_loopback(self):
self.execute(ClockGeneratorLoopback)
count = self.rdb.get("count")
count = self.dataset_mgr.get("count")
self.assertEqual(count, 10)
def test_pulse_rate(self):
self.execute(PulseRate)
rate = self.rdb.get("pulse_rate")
rate = self.dataset_mgr.get("pulse_rate")
print(rate)
self.assertGreater(rate, 100*ns)
self.assertLess(rate, 2500*ns)
@ -207,7 +227,7 @@ class CoredeviceTest(ExperimentCase):
def test_loopback_count(self):
npulses = 2
self.execute(LoopbackCount, npulses=npulses)
count = self.rdb.get("count")
count = self.dataset_mgr.get("count")
self.assertEqual(count, npulses)
def test_underflow(self):
@ -229,23 +249,24 @@ class CoredeviceTest(ExperimentCase):
def test_time_keeps_running(self):
self.execute(TimeKeepsRunning)
t1 = self.rdb.get("time_at_start")
t1 = self.dataset_mgr.get("time_at_start")
self.execute(TimeKeepsRunning)
t2 = self.rdb.get("time_at_start")
dead_time = mu_to_seconds(t2 - t1, self.dmgr.get("core"))
t2 = self.dataset_mgr.get("time_at_start")
dead_time = mu_to_seconds(t2 - t1, self.device_mgr.get("core"))
print(dead_time)
self.assertGreater(dead_time, 1*ms)
self.assertLess(dead_time, 500*ms)
def test_handover(self):
self.execute(Handover)
self.assertEqual(self.rdb.get("t1"), self.rdb.get("t2"))
self.assertEqual(self.dataset_mgr.get("t1"),
self.dataset_mgr.get("t2"))
class RPCTiming(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_argument("repeats", FreeValue(100))
self.setattr_device("core")
self.setattr_argument("repeats", FreeValue(100))
def nop(self):
pass
@ -262,14 +283,14 @@ class RPCTiming(EnvExperiment):
self.ts = [0. for _ in range(self.repeats)]
self.bench()
mean = sum(self.ts)/self.repeats
self.set_result("rpc_time_stddev", sqrt(
self.set_dataset("rpc_time_stddev", sqrt(
sum([(t - mean)**2 for t in self.ts])/self.repeats))
self.set_result("rpc_time_mean", mean)
self.set_dataset("rpc_time_mean", mean)
class RPCTest(ExperimentCase):
def test_rpc_timing(self):
self.execute(RPCTiming)
self.assertGreater(self.rdb.get("rpc_time_mean"), 100*ns)
self.assertLess(self.rdb.get("rpc_time_mean"), 15*ms)
self.assertLess(self.rdb.get("rpc_time_stddev"), 1*ms)
self.assertGreater(self.dataset_mgr.get("rpc_time_mean"), 100*ns)
self.assertLess(self.dataset_mgr.get("rpc_time_mean"), 15*ms)
self.assertLess(self.dataset_mgr.get("rpc_time_stddev"), 1*ms)

View File

@ -1,3 +1,4 @@
# Copyright (C) 2015 M-Labs Limited
# Copyright (C) 2014, 2015 Robert Jordens <jordens@gmail.com>
import os
@ -6,9 +7,10 @@ import unittest
import logging
from artiq.language import *
from artiq.master.databases import DeviceDB, DatasetDB
from artiq.master.worker_db import DeviceManager, DatasetManager
from artiq.coredevice.core import CompileError
from artiq.protocols.file_db import FlatFileDB
from artiq.master.worker_db import DeviceManager, ResultDB
from artiq.protocols import pyon
from artiq.frontend.artiq_run import DummyScheduler
@ -19,14 +21,14 @@ logger = logging.getLogger(__name__)
def get_from_ddb(*path, default="skip"):
if not artiq_root:
raise unittest.SkipTest("no ARTIQ_ROOT")
v = FlatFileDB(os.path.join(artiq_root, "ddb.pyon")).data
v = pyon.load_file(os.path.join(artiq_root, "device_db.pyon"))
try:
for p in path:
v = v[p]
return v.read
except KeyError:
if default == "skip":
raise unittest.SkipTest("ddb path {} not found".format(path))
raise unittest.SkipTest("device db path {} not found".format(path))
else:
return default
@ -34,15 +36,15 @@ def get_from_ddb(*path, default="skip"):
@unittest.skipUnless(artiq_root, "no ARTIQ_ROOT")
class ExperimentCase(unittest.TestCase):
def setUp(self):
self.ddb = FlatFileDB(os.path.join(artiq_root, "ddb.pyon"))
self.dmgr = DeviceManager(self.ddb,
self.device_db = DeviceDB(os.path.join(artiq_root, "device_db.pyon"))
self.dataset_db = DatasetDB(os.path.join(artiq_root, "dataset_db.pyon"))
self.device_mgr = DeviceManager(self.device_db,
virtual_devices={"scheduler": DummyScheduler()})
self.pdb = FlatFileDB(os.path.join(artiq_root, "pdb.pyon"))
self.rdb = ResultDB()
self.dataset_mgr = DatasetManager(self.dataset_db)
def create(self, cls, **kwargs):
try:
exp = cls(self.dmgr, self.pdb, self.rdb, **kwargs)
exp = cls(self.device_mgr, self.dataset_mgr, **kwargs)
exp.prepare()
return exp
except KeyError as e:
@ -55,7 +57,7 @@ class ExperimentCase(unittest.TestCase):
"class_name": cls.__name__,
"arguments": kwargs
}
self.dmgr.virtual_devices["scheduler"].expid = expid
self.device_mgr.virtual_devices["scheduler"].expid = expid
try:
exp = self.create(cls, **kwargs)
exp.run()
@ -65,4 +67,4 @@ class ExperimentCase(unittest.TestCase):
# Reduce amount of text on terminal.
raise error from None
finally:
self.dmgr.close_devices()
self.device_mgr.close_devices()

View File

@ -52,23 +52,22 @@ class RPCCase(unittest.TestCase):
def test_blocking_echo(self):
self._run_server_and_test(self._blocking_echo)
@asyncio.coroutine
def _asyncio_echo(self):
async def _asyncio_echo(self):
remote = pc_rpc.AsyncioClient()
for attempt in range(100):
yield from asyncio.sleep(.2)
await asyncio.sleep(.2)
try:
yield from remote.connect_rpc(test_address, test_port, "test")
await remote.connect_rpc(test_address, test_port, "test")
except ConnectionRefusedError:
pass
else:
break
try:
test_object_back = yield from remote.echo(test_object)
test_object_back = await remote.echo(test_object)
self.assertEqual(test_object, test_object_back)
with self.assertRaises(pc_rpc.RemoteError):
yield from remote.non_existing_method()
yield from remote.terminate()
await remote.non_existing_method()
await remote.terminate()
finally:
remote.close_rpc()

View File

@ -18,12 +18,16 @@ class EmptyExperiment(EnvExperiment):
class BackgroundExperiment(EnvExperiment):
def build(self):
self.attr_device("scheduler")
self.setattr_device("scheduler")
def run(self):
while True:
self.scheduler.pause()
sleep(0.2)
try:
while True:
self.scheduler.pause()
sleep(0.2)
except TerminationRequested:
self.set_dataset("termination_ok", True,
broadcast=True, save=False)
def _get_expid(name):
@ -57,11 +61,6 @@ def _get_basic_steps(rid, expid, priority=0, flush=False):
]
_handlers = {
"init_rt_results": lambda description: None
}
class SchedulerCase(unittest.TestCase):
def setUp(self):
if os.name == "nt":
@ -72,7 +71,7 @@ class SchedulerCase(unittest.TestCase):
def test_steps(self):
loop = self.loop
scheduler = Scheduler(0, _handlers, None)
scheduler = Scheduler(0, dict(), None)
expid = _get_expid("EmptyExperiment")
expect = _get_basic_steps(1, expid)
@ -108,13 +107,27 @@ class SchedulerCase(unittest.TestCase):
def test_pause(self):
loop = self.loop
scheduler = Scheduler(0, _handlers, None)
termination_ok = False
def check_termination(mod):
nonlocal termination_ok
self.assertEqual(
mod,
{"action": "setitem", "key": "termination_ok",
"value": (False, True), "path": []})
termination_ok = True
handlers = {
"update_dataset": check_termination
}
scheduler = Scheduler(0, handlers, None)
expid_bg = _get_expid("BackgroundExperiment")
expid = _get_expid("EmptyExperiment")
expect = _get_basic_steps(1, expid)
background_running = asyncio.Event()
done = asyncio.Event()
empty_completed = asyncio.Event()
background_completed = asyncio.Event()
expect_idx = 0
def notify(mod):
nonlocal expect_idx
@ -123,23 +136,34 @@ class SchedulerCase(unittest.TestCase):
"key": "status",
"action": "setitem"}:
background_running.set()
if mod == {"path": [0],
"value": "deleting",
"key": "status",
"action": "setitem"}:
background_completed.set()
if mod["path"] == [1] or (mod["path"] == [] and mod["key"] == 1):
self.assertEqual(mod, expect[expect_idx])
expect_idx += 1
if expect_idx >= len(expect):
done.set()
empty_completed.set()
scheduler.notifier.publish = notify
scheduler.start()
scheduler.submit("main", expid_bg, -99, None, False)
loop.run_until_complete(background_running.wait())
scheduler.submit("main", expid, 0, None, False)
loop.run_until_complete(done.wait())
loop.run_until_complete(empty_completed.wait())
self.assertFalse(termination_ok)
scheduler.request_termination(0)
loop.run_until_complete(background_completed.wait())
self.assertTrue(termination_ok)
loop.run_until_complete(scheduler.stop())
def test_flush(self):
loop = self.loop
scheduler = Scheduler(0, _handlers, None)
scheduler = Scheduler(0, dict(), None)
expid = _get_expid("EmptyExperiment")
expect = _get_basic_steps(1, expid, 1, True)

View File

@ -8,7 +8,6 @@ test_address = "::1"
test_port = 7777
@asyncio.coroutine
def write_test_data(test_dict):
test_values = [5, 2.1, None, True, False,
{"a": 5, 2: np.linspace(0, 10, 1)},
@ -30,19 +29,9 @@ def write_test_data(test_dict):
test_dict["finished"] = True
@asyncio.coroutine
def start_server(publisher_future, test_dict_future):
test_dict = sync_struct.Notifier(dict())
publisher = sync_struct.Publisher(
{"test": test_dict})
yield from publisher.start(test_address, test_port)
publisher_future.set_result(publisher)
test_dict_future.set_result(test_dict)
class SyncStructCase(unittest.TestCase):
def init_test_dict(self, init):
self.test_dict = init
self.received_dict = init
return init
def notify(self, mod):
@ -54,29 +43,27 @@ class SyncStructCase(unittest.TestCase):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
def test_recv(self):
loop = self.loop
async def _do_test_recv(self):
self.receiving_done = asyncio.Event()
publisher = asyncio.Future()
test_dict = asyncio.Future()
asyncio.async(start_server(publisher, test_dict))
loop.run_until_complete(publisher)
loop.run_until_complete(test_dict)
self.publisher = publisher.result()
test_dict = test_dict.result()
test_vector = dict()
loop.run_until_complete(write_test_data(test_vector))
test_dict = sync_struct.Notifier(dict())
publisher = sync_struct.Publisher({"test": test_dict})
await publisher.start(test_address, test_port)
asyncio.async(write_test_data(test_dict))
self.subscriber = sync_struct.Subscriber("test", self.init_test_dict,
self.notify)
loop.run_until_complete(self.subscriber.connect(test_address,
test_port))
loop.run_until_complete(self.receiving_done.wait())
self.assertEqual(self.test_dict, test_vector)
self.loop.run_until_complete(self.subscriber.close())
self.loop.run_until_complete(self.publisher.stop())
subscriber = sync_struct.Subscriber("test", self.init_test_dict,
self.notify)
await subscriber.connect(test_address, test_port)
write_test_data(test_dict)
await self.receiving_done.wait()
await subscriber.close()
await publisher.stop()
self.assertEqual(self.received_dict, test_dict.read)
def test_recv(self):
self.loop.run_until_complete(self._do_test_recv())
def tearDown(self):
self.loop.close()

View File

@ -8,6 +8,22 @@ from artiq import *
from artiq.master.worker import *
class SimpleExperiment(EnvExperiment):
def build(self):
pass
def run(self):
pass
class ExceptionTermination(EnvExperiment):
def build(self):
pass
def run(self):
raise TypeError
class WatchdogNoTimeout(EnvExperiment):
def build(self):
pass
@ -36,15 +52,14 @@ class WatchdogTimeoutInBuild(EnvExperiment):
pass
@asyncio.coroutine
def _call_worker(worker, expid):
async def _call_worker(worker, expid):
try:
yield from worker.build(0, "main", None, expid, 0)
yield from worker.prepare()
yield from worker.run()
yield from worker.analyze()
await worker.build(0, "main", None, expid, 0)
await worker.prepare()
await worker.run()
await worker.analyze()
finally:
yield from worker.close()
await worker.close()
def _run_experiment(class_name):
@ -54,11 +69,11 @@ def _run_experiment(class_name):
"arguments": dict()
}
loop = asyncio.get_event_loop()
worker = Worker()
worker = Worker(handlers={"log": lambda message: None})
loop.run_until_complete(_call_worker(worker, expid))
class WatchdogCase(unittest.TestCase):
class WorkerCase(unittest.TestCase):
def setUp(self):
if os.name == "nt":
self.loop = asyncio.ProactorEventLoop()
@ -66,6 +81,13 @@ class WatchdogCase(unittest.TestCase):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
def test_simple_run(self):
_run_experiment("SimpleExperiment")
def test_exception(self):
with self.assertRaises(WorkerException):
_run_experiment("ExceptionTermination")
def test_watchdog_no_timeout(self):
_run_experiment("WatchdogNoTimeout")

View File

@ -8,6 +8,8 @@ import time
import collections
import os.path
import numpy as np
from artiq.language.environment import is_experiment
from artiq.protocols import pyon
@ -23,6 +25,41 @@ def parse_arguments(arguments):
return d
def elide(s, maxlen):
elided = False
if len(s) > maxlen:
s = s[:maxlen]
elided = True
try:
idx = s.index("\n")
except ValueError:
pass
else:
s = s[:idx]
elided = True
if elided:
maxlen -= 3
if len(s) > maxlen:
s = s[:maxlen]
s += "..."
return s
def short_format(v):
if v is None:
return "None"
t = type(v)
if np.issubdtype(t, int) or np.issubdtype(t, float):
return str(v)
elif t is str:
return "\"" + elide(v, 15) + "\""
else:
r = t.__name__
if t is list or t is dict or t is set:
r += " ({})".format(len(v))
return r
def file_import(filename, prefix="file_import_"):
linecache.checkcache(filename)
@ -79,68 +116,41 @@ def init_logger(args):
logging.basicConfig(level=logging.WARNING + args.quiet*10 - args.verbose*10)
@asyncio.coroutine
def exc_to_warning(coro):
async def exc_to_warning(coro):
try:
yield from coro
await coro
except:
logger.warning("asyncio coroutine terminated with exception",
exc_info=True)
@asyncio.coroutine
def asyncio_process_wait_timeout(process, timeout):
# In Python < 3.5, asyncio.wait_for(process.wait(), ...
# causes a futures.InvalidStateError inside asyncio if and when the
# process terminates after the timeout.
# Work around this problem.
@asyncio.coroutine
def process_wait_returncode_timeout():
while True:
if process.returncode is not None:
break
yield from asyncio.sleep(0.1)
yield from asyncio.wait_for(process_wait_returncode_timeout(),
timeout=timeout)
@asyncio.coroutine
def asyncio_process_wait(process):
r = True
while r:
f, p = yield from asyncio.wait([process.stdout.read(1024)])
r = f.pop().result()
@asyncio.coroutine
def asyncio_wait_or_cancel(fs, **kwargs):
fs = [asyncio.async(f) for f in fs]
async def asyncio_wait_or_cancel(fs, **kwargs):
fs = [asyncio.ensure_future(f) for f in fs]
try:
d, p = yield from asyncio.wait(fs, **kwargs)
d, p = await asyncio.wait(fs, **kwargs)
except:
for f in fs:
f.cancel()
raise
for f in p:
f.cancel()
yield from asyncio.wait([f])
await asyncio.wait([f])
return fs
class TaskObject:
def start(self):
self.task = asyncio.async(self._do())
self.task = asyncio.ensure_future(self._do())
@asyncio.coroutine
def stop(self):
async def stop(self):
self.task.cancel()
try:
yield from asyncio.wait_for(self.task, None)
await asyncio.wait_for(self.task, None)
except asyncio.CancelledError:
pass
del self.task
@asyncio.coroutine
def _do(self):
async def _do(self):
raise NotImplementedError
@ -152,13 +162,12 @@ class Condition:
self._loop = asyncio.get_event_loop()
self._waiters = collections.deque()
@asyncio.coroutine
def wait(self):
async def wait(self):
"""Wait until notified."""
fut = asyncio.Future(loop=self._loop)
self._waiters.append(fut)
try:
yield from fut
await fut
finally:
self._waiters.remove(fut)

View File

@ -0,0 +1,62 @@
"""
This transform implements time management functions (delay_mu/now_mu/at_mu)
using an accumulator 'now' and simple replacement rules:
delay_mu(t) -> now += t
now_mu() -> now
at_mu(t) -> now = t
The function delay(), that uses seconds, must be lowered to delay_mu() before
invoking this transform.
The accumulator is initialized to an int64 value at the beginning of the
output function.
"""
import ast
class _TimeLowerer(ast.NodeTransformer):
def visit_Call(self, node):
if node.func.id == "now_mu":
return ast.copy_location(ast.Name("now", ast.Load()), node)
else:
self.generic_visit(node)
return node
def visit_Expr(self, node):
r = node
if isinstance(node.value, ast.Call):
funcname = node.value.func.id
if funcname == "delay_mu":
r = ast.copy_location(
ast.AugAssign(target=ast.Name("now", ast.Store()),
op=ast.Add(),
value=node.value.args[0]),
node)
elif funcname == "at_mu":
r = ast.copy_location(
ast.Assign(targets=[ast.Name("now", ast.Store())],
value=node.value.args[0]),
node)
self.generic_visit(r)
return r
def lower_time(func_def):
_TimeLowerer().visit(func_def)
call_init = ast.Call(
func=ast.Name("syscall", ast.Load()),
args=[ast.Str("now_init")], keywords=[])
stmt_init = ast.Assign(targets=[ast.Name("now", ast.Store())],
value=call_init)
call_save = ast.Call(
func=ast.Name("syscall", ast.Load()),
args=[ast.Str("now_save"), ast.Name("now", ast.Load())], keywords=[])
stmt_save = ast.Expr(call_save)
func_def.body = [
stmt_init,
ast.Try(body=func_def.body,
handlers=[],
orelse=[],
finalbody=[stmt_save])
]

View File

@ -0,0 +1,109 @@
"""
This transform turns calls to delay() that use non-integer time
expressed in seconds into calls to delay_mu() that use int64 time
expressed in multiples of ref_period.
It does so by inserting multiplication/division/rounding operations around
those calls.
The seconds_to_mu and mu_to_seconds core language functions are also
implemented here, as well as watchdog to syscall conversion.
"""
import ast
from artiq.transforms.tools import value_to_ast
def _seconds_to_mu(ref_period, node):
divided = ast.copy_location(
ast.BinOp(left=node,
op=ast.Div(),
right=value_to_ast(ref_period)),
node)
return ast.copy_location(
ast.Call(func=ast.Name("round64", ast.Load()),
args=[divided], keywords=[]),
divided)
def _mu_to_seconds(ref_period, node):
return ast.copy_location(
ast.BinOp(left=node,
op=ast.Mult(),
right=value_to_ast(ref_period)),
node)
class _TimeQuantizer(ast.NodeTransformer):
def __init__(self, ref_period):
self.ref_period = ref_period
self.watchdog_id_counter = 0
def visit_Call(self, node):
funcname = node.func.id
if funcname == "delay":
node.func.id = "delay_mu"
if (isinstance(node.args[0], ast.Call)
and node.args[0].func.id == "mu_to_seconds"):
# optimize:
# delay(mu_to_seconds(x)) -> delay_mu(x)
node.args[0] = self.visit(node.args[0].args[0])
else:
node.args[0] = _seconds_to_mu(self.ref_period,
self.visit(node.args[0]))
return node
elif funcname == "seconds_to_mu":
return _seconds_to_mu(self.ref_period,
self.visit(node.args[0]))
elif funcname == "mu_to_seconds":
return _mu_to_seconds(self.ref_period,
self.visit(node.args[0]))
else:
self.generic_visit(node)
return node
def visit_With(self, node):
self.generic_visit(node)
if (isinstance(node.items[0].context_expr, ast.Call)
and node.items[0].context_expr.func.id == "watchdog"):
idname = "__watchdog_id_" + str(self.watchdog_id_counter)
self.watchdog_id_counter += 1
time = ast.BinOp(left=node.items[0].context_expr.args[0],
op=ast.Mult(),
right=ast.Num(1000))
time_int = ast.Call(
func=ast.Name("round", ast.Load()),
args=[time], keywords=[])
syscall_set = ast.Call(
func=ast.Name("syscall", ast.Load()),
args=[ast.Str("watchdog_set"), time_int], keywords=[])
stmt_set = ast.copy_location(
ast.Assign(targets=[ast.Name(idname, ast.Store())],
value=syscall_set),
node)
syscall_clear = ast.Call(
func=ast.Name("syscall", ast.Load()),
args=[ast.Str("watchdog_clear"),
ast.Name(idname, ast.Load())], keywords=[])
stmt_clear = ast.copy_location(ast.Expr(syscall_clear), node)
node.items[0] = ast.withitem(
context_expr=ast.Name(id="sequential",
ctx=ast.Load()),
optional_vars=None)
node.body = [
stmt_set,
ast.Try(body=node.body,
handlers=[],
orelse=[],
finalbody=[stmt_clear])
]
return node
def quantize_time(func_def, ref_period):
_TimeQuantizer(ref_period).visit(func_def)

139
artiq/transforms/tools.py Normal file
View File

@ -0,0 +1,139 @@
import ast
from fractions import Fraction
from artiq.language import core as core_language
from artiq.language import units
embeddable_funcs = (
core_language.delay_mu, core_language.at_mu, core_language.now_mu,
core_language.delay,
core_language.seconds_to_mu, core_language.mu_to_seconds,
core_language.syscall, core_language.watchdog,
range, bool, int, float, round, len,
core_language.int64, core_language.round64,
Fraction, core_language.EncodedException
)
embeddable_func_names = {func.__name__ for func in embeddable_funcs}
def is_embeddable(func):
for ef in embeddable_funcs:
if func is ef:
return True
return False
def eval_ast(expr, symdict=dict()):
if not isinstance(expr, ast.Expression):
expr = ast.copy_location(ast.Expression(expr), expr)
ast.fix_missing_locations(expr)
code = compile(expr, "<ast>", "eval")
return eval(code, symdict)
class NotASTRepresentable(Exception):
pass
def value_to_ast(value):
if isinstance(value, core_language.int64): # must be before int
return ast.Call(
func=ast.Name("int64", ast.Load()),
args=[ast.Num(int(value))], keywords=[])
elif isinstance(value, bool) or value is None:
# must also be before int
# isinstance(True/False, int) == True
return ast.NameConstant(value)
elif isinstance(value, (int, float)):
return ast.Num(value)
elif isinstance(value, Fraction):
return ast.Call(
func=ast.Name("Fraction", ast.Load()),
args=[ast.Num(value.numerator), ast.Num(value.denominator)], keywords=[])
elif isinstance(value, str):
return ast.Str(value)
elif isinstance(value, list):
elts = [value_to_ast(elt) for elt in value]
return ast.List(elts, ast.Load())
else:
for kg in core_language.kernel_globals:
if value is getattr(core_language, kg):
return ast.Name(kg, ast.Load())
raise NotASTRepresentable(str(value))
class NotConstant(Exception):
pass
def eval_constant(node):
if isinstance(node, ast.Num):
return node.n
elif isinstance(node, ast.Str):
return node.s
elif isinstance(node, ast.NameConstant):
return node.value
elif isinstance(node, ast.Call):
funcname = node.func.id
if funcname == "int64":
return core_language.int64(eval_constant(node.args[0]))
elif funcname == "Fraction":
numerator = eval_constant(node.args[0])
denominator = eval_constant(node.args[1])
return Fraction(numerator, denominator)
else:
raise NotConstant
else:
raise NotConstant
_replaceable_funcs = {
"bool", "int", "float", "round",
"int64", "round64", "Fraction",
"seconds_to_mu", "mu_to_seconds"
}
def _is_ref_transparent(dependencies, expr):
if isinstance(expr, (ast.NameConstant, ast.Num, ast.Str)):
return True
elif isinstance(expr, ast.Name):
dependencies.add(expr.id)
return True
elif isinstance(expr, ast.UnaryOp):
return _is_ref_transparent(dependencies, expr.operand)
elif isinstance(expr, ast.BinOp):
return (_is_ref_transparent(dependencies, expr.left)
and _is_ref_transparent(dependencies, expr.right))
elif isinstance(expr, ast.BoolOp):
return all(_is_ref_transparent(dependencies, v) for v in expr.values)
elif isinstance(expr, ast.Call):
return (expr.func.id in _replaceable_funcs and
all(_is_ref_transparent(dependencies, arg)
for arg in expr.args))
else:
return False
def is_ref_transparent(expr):
dependencies = set()
if _is_ref_transparent(dependencies, expr):
return True, dependencies
else:
return False, None
class _NodeCounter(ast.NodeVisitor):
def __init__(self):
self.count = 0
def generic_visit(self, node):
self.count += 1
ast.NodeVisitor.generic_visit(self, node)
def count_all_nodes(node):
nc = _NodeCounter()
nc.visit(node)
return nc.count

575
artiq/transforms/unparse.py Normal file
View File

@ -0,0 +1,575 @@
import sys
import ast
# Large float and imaginary literals get turned into infinities in the AST.
# We unparse those infinities to INFSTR.
INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
def _interleave(inter, f, seq):
"""Call f on each item in seq, calling inter() in between.
"""
seq = iter(seq)
try:
f(next(seq))
except StopIteration:
pass
else:
for x in seq:
inter()
f(x)
class _Unparser:
"""Methods in this class recursively traverse an AST and
output source code for the abstract syntax; original formatting
is disregarded. """
def __init__(self, tree):
"""Print the source for tree to the "result" string."""
self.result = ""
self._indent = 0
self.dispatch(tree)
self.result += "\n"
def fill(self, text=""):
"Indent a piece of text, according to the current indentation level"
self.result += "\n"+" "*self._indent + text
def write(self, text):
"Append a piece of text to the current line."
self.result += text
def enter(self):
"Print ':', and increase the indentation."
self.write(":")
self._indent += 1
def leave(self):
"Decrease the indentation level."
self._indent -= 1
def dispatch(self, tree):
"Dispatcher function, dispatching tree type T to method _T."
if isinstance(tree, list):
for t in tree:
self.dispatch(t)
return
meth = getattr(self, "_"+tree.__class__.__name__)
meth(tree)
# Unparsing methods
#
# There should be one method per concrete grammar type
# Constructors should be grouped by sum type. Ideally,
# this would follow the order in the grammar, but
# currently doesn't.
def _Module(self, tree):
for stmt in tree.body:
self.dispatch(stmt)
# stmt
def _Expr(self, tree):
self.fill()
self.dispatch(tree.value)
def _Import(self, t):
self.fill("import ")
_interleave(lambda: self.write(", "), self.dispatch, t.names)
def _ImportFrom(self, t):
self.fill("from ")
self.write("." * t.level)
if t.module:
self.write(t.module)
self.write(" import ")
_interleave(lambda: self.write(", "), self.dispatch, t.names)
def _Assign(self, t):
self.fill()
for target in t.targets:
self.dispatch(target)
self.write(" = ")
self.dispatch(t.value)
def _AugAssign(self, t):
self.fill()
self.dispatch(t.target)
self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
self.dispatch(t.value)
def _Return(self, t):
self.fill("return")
if t.value:
self.write(" ")
self.dispatch(t.value)
def _Pass(self, t):
self.fill("pass")
def _Break(self, t):
self.fill("break")
def _Continue(self, t):
self.fill("continue")
def _Delete(self, t):
self.fill("del ")
_interleave(lambda: self.write(", "), self.dispatch, t.targets)
def _Assert(self, t):
self.fill("assert ")
self.dispatch(t.test)
if t.msg:
self.write(", ")
self.dispatch(t.msg)
def _Global(self, t):
self.fill("global ")
_interleave(lambda: self.write(", "), self.write, t.names)
def _Nonlocal(self, t):
self.fill("nonlocal ")
_interleave(lambda: self.write(", "), self.write, t.names)
def _Yield(self, t):
self.write("(")
self.write("yield")
if t.value:
self.write(" ")
self.dispatch(t.value)
self.write(")")
def _YieldFrom(self, t):
self.write("(")
self.write("yield from")
if t.value:
self.write(" ")
self.dispatch(t.value)
self.write(")")
def _Raise(self, t):
self.fill("raise")
if not t.exc:
assert not t.cause
return
self.write(" ")
self.dispatch(t.exc)
if t.cause:
self.write(" from ")
self.dispatch(t.cause)
def _Try(self, t):
self.fill("try")
self.enter()
self.dispatch(t.body)
self.leave()
for ex in t.handlers:
self.dispatch(ex)
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
if t.finalbody:
self.fill("finally")
self.enter()
self.dispatch(t.finalbody)
self.leave()
def _ExceptHandler(self, t):
self.fill("except")
if t.type:
self.write(" ")
self.dispatch(t.type)
if t.name:
self.write(" as ")
self.write(t.name)
self.enter()
self.dispatch(t.body)
self.leave()
def _ClassDef(self, t):
self.write("\n")
for deco in t.decorator_list:
self.fill("@")
self.dispatch(deco)
self.fill("class "+t.name)
self.write("(")
comma = False
for e in t.bases:
if comma:
self.write(", ")
else:
comma = True
self.dispatch(e)
for e in t.keywords:
if comma:
self.write(", ")
else:
comma = True
self.dispatch(e)
self.write(")")
self.enter()
self.dispatch(t.body)
self.leave()
def _FunctionDef(self, t):
self.write("\n")
for deco in t.decorator_list:
self.fill("@")
self.dispatch(deco)
self.fill("def "+t.name + "(")
self.dispatch(t.args)
self.write(")")
if t.returns:
self.write(" -> ")
self.dispatch(t.returns)
self.enter()
self.dispatch(t.body)
self.leave()
def _For(self, t):
self.fill("for ")
self.dispatch(t.target)
self.write(" in ")
self.dispatch(t.iter)
self.enter()
self.dispatch(t.body)
self.leave()
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _If(self, t):
self.fill("if ")
self.dispatch(t.test)
self.enter()
self.dispatch(t.body)
self.leave()
# collapse nested ifs into equivalent elifs.
while (t.orelse and len(t.orelse) == 1 and
isinstance(t.orelse[0], ast.If)):
t = t.orelse[0]
self.fill("elif ")
self.dispatch(t.test)
self.enter()
self.dispatch(t.body)
self.leave()
# final else
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _While(self, t):
self.fill("while ")
self.dispatch(t.test)
self.enter()
self.dispatch(t.body)
self.leave()
if t.orelse:
self.fill("else")
self.enter()
self.dispatch(t.orelse)
self.leave()
def _With(self, t):
self.fill("with ")
_interleave(lambda: self.write(", "), self.dispatch, t.items)
self.enter()
self.dispatch(t.body)
self.leave()
# expr
def _Bytes(self, t):
self.write(repr(t.s))
def _Str(self, tree):
self.write(repr(tree.s))
def _Name(self, t):
self.write(t.id)
def _NameConstant(self, t):
self.write(repr(t.value))
def _Num(self, t):
# Substitute overflowing decimal literal for AST infinities.
self.write(repr(t.n).replace("inf", INFSTR))
def _List(self, t):
self.write("[")
_interleave(lambda: self.write(", "), self.dispatch, t.elts)
self.write("]")
def _ListComp(self, t):
self.write("[")
self.dispatch(t.elt)
for gen in t.generators:
self.dispatch(gen)
self.write("]")
def _GeneratorExp(self, t):
self.write("(")
self.dispatch(t.elt)
for gen in t.generators:
self.dispatch(gen)
self.write(")")
def _SetComp(self, t):
self.write("{")
self.dispatch(t.elt)
for gen in t.generators:
self.dispatch(gen)
self.write("}")
def _DictComp(self, t):
self.write("{")
self.dispatch(t.key)
self.write(": ")
self.dispatch(t.value)
for gen in t.generators:
self.dispatch(gen)
self.write("}")
def _comprehension(self, t):
self.write(" for ")
self.dispatch(t.target)
self.write(" in ")
self.dispatch(t.iter)
for if_clause in t.ifs:
self.write(" if ")
self.dispatch(if_clause)
def _IfExp(self, t):
self.write("(")
self.dispatch(t.body)
self.write(" if ")
self.dispatch(t.test)
self.write(" else ")
self.dispatch(t.orelse)
self.write(")")
def _Set(self, t):
assert(t.elts) # should be at least one element
self.write("{")
_interleave(lambda: self.write(", "), self.dispatch, t.elts)
self.write("}")
def _Dict(self, t):
self.write("{")
def write_pair(pair):
(k, v) = pair
self.dispatch(k)
self.write(": ")
self.dispatch(v)
_interleave(lambda: self.write(", "), write_pair,
zip(t.keys, t.values))
self.write("}")
def _Tuple(self, t):
self.write("(")
if len(t.elts) == 1:
(elt,) = t.elts
self.dispatch(elt)
self.write(",")
else:
_interleave(lambda: self.write(", "), self.dispatch, t.elts)
self.write(")")
unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
def _UnaryOp(self, t):
self.write("(")
self.write(self.unop[t.op.__class__.__name__])
self.write(" ")
self.dispatch(t.operand)
self.write(")")
binop = {"Add": "+", "Sub": "-", "Mult": "*", "Div": "/", "Mod": "%",
"LShift": "<<", "RShift": ">>",
"BitOr": "|", "BitXor": "^", "BitAnd": "&",
"FloorDiv": "//", "Pow": "**"}
def _BinOp(self, t):
self.write("(")
self.dispatch(t.left)
self.write(" " + self.binop[t.op.__class__.__name__] + " ")
self.dispatch(t.right)
self.write(")")
cmpops = {"Eq": "==", "NotEq": "!=",
"Lt": "<", "LtE": "<=", "Gt": ">", "GtE": ">=",
"Is": "is", "IsNot": "is not", "In": "in", "NotIn": "not in"}
def _Compare(self, t):
self.write("(")
self.dispatch(t.left)
for o, e in zip(t.ops, t.comparators):
self.write(" " + self.cmpops[o.__class__.__name__] + " ")
self.dispatch(e)
self.write(")")
boolops = {ast.And: "and", ast.Or: "or"}
def _BoolOp(self, t):
self.write("(")
s = " %s " % self.boolops[t.op.__class__]
_interleave(lambda: self.write(s), self.dispatch, t.values)
self.write(")")
def _Attribute(self, t):
self.dispatch(t.value)
# Special case: 3.__abs__() is a syntax error, so if t.value
# is an integer literal then we need to either parenthesize
# it or add an extra space to get 3 .__abs__().
if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
self.write(" ")
self.write(".")
self.write(t.attr)
def _Call(self, t):
self.dispatch(t.func)
self.write("(")
comma = False
for e in t.args:
if comma:
self.write(", ")
else:
comma = True
self.dispatch(e)
for e in t.keywords:
if comma:
self.write(", ")
else:
comma = True
self.dispatch(e)
self.write(")")
def _Subscript(self, t):
self.dispatch(t.value)
self.write("[")
self.dispatch(t.slice)
self.write("]")
def _Starred(self, t):
self.write("*")
self.dispatch(t.value)
# slice
def _Ellipsis(self, t):
self.write("...")
def _Index(self, t):
self.dispatch(t.value)
def _Slice(self, t):
if t.lower:
self.dispatch(t.lower)
self.write(":")
if t.upper:
self.dispatch(t.upper)
if t.step:
self.write(":")
self.dispatch(t.step)
def _ExtSlice(self, t):
_interleave(lambda: self.write(', '), self.dispatch, t.dims)
# argument
def _arg(self, t):
self.write(t.arg)
if t.annotation:
self.write(": ")
self.dispatch(t.annotation)
# others
def _arguments(self, t):
first = True
# normal arguments
defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
for a, d in zip(t.args, defaults):
if first:
first = False
else:
self.write(", ")
self.dispatch(a)
if d:
self.write("=")
self.dispatch(d)
# varargs, or bare '*' if no varargs but keyword-only arguments present
if t.vararg or t.kwonlyargs:
if first:
first = False
else:
self.write(", ")
self.write("*")
if t.vararg:
self.write(t.vararg.arg)
if t.vararg.annotation:
self.write(": ")
self.dispatch(t.vararg.annotation)
# keyword-only arguments
if t.kwonlyargs:
for a, d in zip(t.kwonlyargs, t.kw_defaults):
if first:
first = False
else:
self.write(", ")
self.dispatch(a),
if d:
self.write("=")
self.dispatch(d)
# kwargs
if t.kwarg:
if first:
first = False
else:
self.write(", ")
self.write("**"+t.kwarg.arg)
if t.kwarg.annotation:
self.write(": ")
self.dispatch(t.kwarg.annotation)
def _keyword(self, t):
if t.arg is None:
self.write("**")
else:
self.write(t.arg)
self.write("=")
self.dispatch(t.value)
def _Lambda(self, t):
self.write("(")
self.write("lambda ")
self.dispatch(t.args)
self.write(": ")
self.dispatch(t.body)
self.write(")")
def _alias(self, t):
self.write(t.name)
if t.asname:
self.write(" as "+t.asname)
def _withitem(self, t):
self.dispatch(t.context_expr)
if t.optional_vars:
self.write(" as ")
self.dispatch(t.optional_vars)
def unparse(tree):
unparser = _Unparser(tree)
return unparser.result

18
conda/README.md Normal file
View File

@ -0,0 +1,18 @@
Uploading conda packages (Python 3.5)
=====================================
Preparing:
1. [Install miniconda][miniconda]
2. `conda update -q conda`
3. `conda install conda-build`
4. `conda create -q -n py35 python=3.5`
5. `conda config --add channels https://conda.anaconda.org/m-labs/channel/dev`
Building:
1. `source activate py35`
2. `conda build pkgname --python 3.5`; this command displays a path to the freshly built package
3. `anaconda upload <package> -c main -c dev`
[miniconda]: http://conda.pydata.org/docs/install/quick.html#linux-miniconda-install

View File

@ -11,7 +11,7 @@ $PYTHON setup.py install --single-version-externally-managed --record=record.txt
git clone --recursive https://github.com/m-labs/misoc
export MSCDIR=$SRC_DIR/misoc
ARTIQ_PREFIX=$PREFIX/lib/python3.4/site-packages/artiq
ARTIQ_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq
BIN_PREFIX=$ARTIQ_PREFIX/binaries/
mkdir -p $ARTIQ_PREFIX/misc
mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello
@ -20,14 +20,14 @@ mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd -
make -C soc/runtime clean runtime.fbi
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd -
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
# install KC705 NIST_QC1 binaries
mkdir -p $BIN_PREFIX/kc705/nist_qc1
cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc1/
cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/kc705/
cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc1-kc705.bit $BIN_PREFIX/kc705/
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc1-kc705.bit $BIN_PREFIX/kc705/
wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
mv bscan_spi_kc705.bit $BIN_PREFIX/kc705/
@ -35,13 +35,13 @@ mv bscan_spi_kc705.bit $BIN_PREFIX/kc705/
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello build-headers build-bios; cd -
make -C soc/runtime clean runtime.fbi
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream; cd -
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream)
# install Pipistrello binaries
cp soc/runtime/runtime.fbi $BIN_PREFIX/pipistrello/
cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/pipistrello/
cp $SRC_DIR/misoc/build/artiq_pipistrello-nist_qc1-pipistrello.bit $BIN_PREFIX/pipistrello/
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_pipistrello-nist_qc1-pipistrello.bit $BIN_PREFIX/pipistrello/
wget https://people.phys.ethz.ch/~robertjo/bscan_spi_lx45_csg324.bit
mv bscan_spi_lx45_csg324.bit $BIN_PREFIX/pipistrello/
@ -49,13 +49,13 @@ mv bscan_spi_lx45_csg324.bit $BIN_PREFIX/pipistrello/
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 build-headers; cd -
make -C soc/runtime clean runtime.fbi
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd -
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
# install KC705 NIST_QC2 binaries
mkdir -p $BIN_PREFIX/kc705/nist_qc2
cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc2/
cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc2-kc705.bit $BIN_PREFIX/kc705/
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc2-kc705.bit $BIN_PREFIX/kc705/
cp artiq/frontend/artiq_flash.sh $PREFIX/bin

View File

@ -28,14 +28,14 @@ build:
requirements:
build:
- python >=3.4.3
- python >=3.5.0
- setuptools
- numpy
- migen
- migen 0.0
- pyelftools
- binutils-or1k-linux
run:
- python >=3.4.3
- python >=3.5.0
- llvmlite-artiq
- scipy
- numpy

2
conda/chardet/bld.bat Normal file
View File

@ -0,0 +1,2 @@
"%PYTHON%" setup.py install
if errorlevel 1 exit 1

1
conda/chardet/build.sh Normal file
View File

@ -0,0 +1 @@
$PYTHON setup.py install

33
conda/chardet/meta.yaml Normal file
View File

@ -0,0 +1,33 @@
package:
name: chardet
version: 2.2.1
source:
fn: chardet-2.2.1.tar.gz
url: https://pypi.python.org/packages/source/c/chardet/chardet-2.2.1.tar.gz
md5: 4a758402eaefd0331bdedc7ecb6f452c
build:
entry_points:
- chardetect = chardet.chardetect:main
number: 0
requirements:
build:
- python
- setuptools
run:
- python
test:
# Python imports
imports:
- chardet
commands:
- chardetect run_test.py
about:
home: https://github.com/chardet/chardet
license: GNU Library or Lesser General Public License (LGPL)

1
conda/dateutil/bld.bat Normal file
View File

@ -0,0 +1 @@
%PYTHON% setup.py install

1
conda/dateutil/build.sh Normal file
View File

@ -0,0 +1 @@
$PYTHON setup.py install

30
conda/dateutil/meta.yaml Normal file
View File

@ -0,0 +1,30 @@
package:
name: dateutil
version: 2.4.2
source:
fn: python-dateutil-2.4.2.tar.gz
url: https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.2.tar.gz
md5: 4ef68e1c485b09e9f034e10473e5add2
build:
number: 0
requirements:
build:
- python
- setuptools
- six >=1.5
run:
- python
- six >=1.5
test:
imports:
- dateutil
- dateutil.zoneinfo
about:
home: https://dateutil.readthedocs.org
license: BSD
summary: 'Extensions to the standard Python datetime module'

1
conda/pyelftools/bld.bat Normal file
View File

@ -0,0 +1 @@
%PYTHON% setup.py install

View File

@ -0,0 +1 @@
$PYTHON setup.py install

View File

@ -0,0 +1,26 @@
package:
name: pyelftools
version: 0.23
source:
git_url: https://github.com/eliben/pyelftools.git
git_tag: v0.23
build:
number: 0
requirements:
build:
- python
- setuptools
run:
- python
test:
imports:
- elftools
about:
home: https://github.com/eliben/pyelftools.git
license: Public domain
summary: 'Library for analyzing ELF files and DWARF debugging information'

View File

@ -1,10 +1,10 @@
package:
name: pyqtgraph
version: 0.9.10~a6d5e28
version: 0.9.10.1036edf
source:
git_url: https://github.com/pyqtgraph/pyqtgraph.git
git_rev: a6d5e28
git_rev: 1036edf
requirements:
build:

View File

@ -0,0 +1 @@
%PYTHON% setup.py install

View File

@ -0,0 +1 @@
$PYTHON setup.py install

View File

@ -0,0 +1,28 @@
package:
name: sphinx-argparse
version: 0.1.13
source:
fn: sphinx-argparse-0.1.13.tar.gz
url: https://pypi.python.org/packages/source/s/sphinx-argparse/sphinx-argparse-0.1.13.tar.gz
md5: 5ec84e75e1c4b2ae7ca5fb92a6abd738
build:
number: 0
requirements:
build:
- python
- setuptools
- sphinx
run:
- python
- sphinx
test:
imports:
- sphinxarg
about:
license: MIT
summary: 'Sphinx extension that automatically documents argparse commands and options'

View File

@ -1,7 +1,13 @@
Core drivers reference
======================
These drivers are for peripherals closely integrated into the core device, which do not use the controller mechanism.
These drivers are for the core device and the peripherals closely integrated into it, which do not use the controller mechanism.
:mod:`artiq.coredevice.core` module
-----------------------------------
.. automodule:: artiq.coredevice.core
:members:
:mod:`artiq.coredevice.ttl` module
-----------------------------------

View File

@ -13,14 +13,14 @@ As a very first step, we will turn on a LED on the core device. Create a file ``
class LED(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("led")
self.setattr_device("core")
self.setattr_device("led")
@kernel
def run(self):
self.led.on()
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``attr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``attr_device`` like any other.
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``setattr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``setattr_device`` like any other.
Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples/master`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform.
@ -48,8 +48,8 @@ Modify the code as follows: ::
class LED(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("led")
self.setattr_device("core")
self.setattr_device("led")
@kernel
def run(self):
@ -98,8 +98,8 @@ Create a new file ``rtio.py`` containing the following: ::
class Tutorial(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl0")
self.setattr_device("core")
self.setattr_device("ttl0")
@kernel
def run(self):
@ -122,8 +122,8 @@ Try reducing the period of the generated waveform until the CPU cannot keep up w
class Tutorial(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("ttl0")
self.setattr_device("core")
self.setattr_device("ttl0")
@kernel
def run(self):

View File

@ -53,7 +53,7 @@ Experiments may have arguments whose values can be set in the GUI and used in th
def build(self):
self.attr_argument("count", NumberValue(ndecimals=0))
self.setattr_argument("count", NumberValue(ndecimals=0))
def run(self):
for i in range(int(self.count)):

View File

@ -13,9 +13,9 @@ Installing using conda
Installing Anaconda or Miniconda
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
* You can either install Anaconda (chose Python 3.4) from https://store.continuum.io/cshop/anaconda/
* You can either install Anaconda (chose Python 3.5) from https://store.continuum.io/cshop/anaconda/
* Or install the more minimalistic Miniconda (chose Python 3.4) from http://conda.pydata.org/miniconda.html
* Or install the more minimalistic Miniconda (chose Python 3.5) from http://conda.pydata.org/miniconda.html
.. warning::
If you are installing on Windows, chose the Windows 32-bit version regardless of whether you have
@ -148,7 +148,7 @@ These steps are required to generate bitstream (``.bit``) files, build the MiSoC
$ python3 setup.py develop --user
.. note::
The options ``develop`` and ``--user`` are for setup.py to install Migen in ``~/.local/lib/python3.4``.
The options ``develop`` and ``--user`` are for setup.py to install Migen in ``~/.local/lib/python3.5``.
.. _install-xc3sprog:
@ -369,7 +369,7 @@ This command installs all the required packages: ::
$ sudo apt-get install build-essential autotools-dev file git patch perl xutils-dev python3-pip texinfo flex bison libmpc-dev python3-serial python3-dateutil python3-prettytable python3-setuptools python3-numpy python3-scipy python3-sphinx python3-h5py python3-dev python-dev subversion cmake libusb-dev libftdi-dev pkg-config libffi-dev libgit2-dev
Note that ARTIQ requires Python 3.4.3 or above.
Note that ARTIQ requires Python 3.5.0 or above.
To set user permissions on the JTAG and serial ports of the Pipistrello, create a ``/etc/udev/rules.d/30-usb-papilio.rules`` file containing the following: ::

View File

@ -22,6 +22,8 @@ A controller manager connects to the master and uses the device database to dete
Controller managers use the local network address of the connection to the master to filter the device database and run only those controllers that are allocated to the current node. Hostname resolution is supported.
.. warning:: With some network setups, the current machine's hostname without the domain name resolves to a localhost address (127.0.0.1 or even 127.0.1.1). If you wish to use controllers across a network, make sure that the hostname you provide resolves to an IP address visible on the network (e.g. try providing the full hostname including the domain name).
Command-line client
-------------------

View File

@ -0,0 +1 @@
{"flopping_freq": 1499.9876804260716}

View File

@ -1 +0,0 @@
{"flopping_freq": 1500.0164816344934}

View File

@ -3,10 +3,11 @@ from artiq import *
class SubComponent1(HasEnvironment):
def build(self):
self.attr_argument("sc1_scan", Scannable(default=NoScan(325)),
"Flux capacitor")
self.attr_argument("sc1_enum", EnumerationValue(["1", "2", "3"]),
"Flux capacitor")
self.setattr_argument("sc1_scan", Scannable(default=NoScan(3250),
scale=1e3, unit="Hz"),
"Flux capacitor")
self.setattr_argument("sc1_enum", EnumerationValue(["1", "2", "3"]),
"Flux capacitor")
def do(self):
print("SC1:")
@ -17,12 +18,12 @@ class SubComponent1(HasEnvironment):
class SubComponent2(HasEnvironment):
def build(self):
self.attr_argument("sc2_boolean", BooleanValue(False),
"Transporter")
self.attr_argument("sc2_scan", Scannable(default=NoScan(325)),
"Transporter")
self.attr_argument("sc2_enum", EnumerationValue(["3", "4", "5"]),
"Transporter")
self.setattr_argument("sc2_boolean", BooleanValue(False),
"Transporter")
self.setattr_argument("sc2_scan", Scannable(default=NoScan(325)),
"Transporter")
self.setattr_argument("sc2_enum", EnumerationValue(["3", "4", "5"]),
"Transporter")
def do(self):
print("SC2:")
@ -34,15 +35,16 @@ class SubComponent2(HasEnvironment):
class ArgumentsDemo(EnvExperiment):
def build(self):
self.attr_argument("free_value", FreeValue(None))
self.attr_argument("number", NumberValue(42, unit="s", step=0.1,
ndecimals=4))
self.attr_argument("string", StringValue("Hello World"))
self.attr_argument("scan", Scannable(global_max=400,
default=NoScan(325),
ndecimals=6))
self.attr_argument("boolean", BooleanValue(True), "Group")
self.attr_argument("enum", EnumerationValue(
self.setattr_argument("free_value", FreeValue(None))
self.setattr_argument("number", NumberValue(42e-6,
unit="s", scale=1e-6,
ndecimals=4))
self.setattr_argument("string", StringValue("Hello World"))
self.setattr_argument("scan", Scannable(global_max=400,
default=NoScan(325),
ndecimals=6))
self.setattr_argument("boolean", BooleanValue(True), "Group")
self.setattr_argument("enum", EnumerationValue(
["foo", "bar", "quux"], "foo"), "Group")
self.sc1 = SubComponent1(parent=self)

View File

@ -0,0 +1,25 @@
from operator import itemgetter
from artiq import *
class DDSSetter(EnvExperiment):
"""DDS Setter"""
def build(self):
self.dds = dict()
device_db = self.get_device_db()
for k, v in sorted(device_db.items(), key=itemgetter(0)):
if (isinstance(v, dict)
and v["type"] == "local"
and v["module"] == "artiq.coredevice.dds"
and v["class"] in {"AD9858", "AD9914"}):
self.dds[k] = {
"driver": self.get_device(k),
"frequency": self.get_argument("{}_frequency".format(k),
NumberValue())
}
def run(self):
for k, v in self.dds.items():
v["driver"].set(v["frequency"])

View File

@ -5,15 +5,15 @@ class DDSTest(EnvExperiment):
"""DDS test"""
def build(self):
self.attr_device("core")
self.attr_device("dds_bus")
self.attr_device("dds0")
self.attr_device("dds1")
self.attr_device("dds2")
self.attr_device("ttl0")
self.attr_device("ttl1")
self.attr_device("ttl2")
self.attr_device("led")
self.setattr_device("core")
self.setattr_device("dds_bus")
self.setattr_device("dds0")
self.setattr_device("dds1")
self.setattr_device("dds2")
self.setattr_device("ttl0")
self.setattr_device("ttl1")
self.setattr_device("ttl2")
self.setattr_device("led")
@kernel
def run(self):

View File

@ -27,21 +27,21 @@ class FloppingF(EnvExperiment):
"""Flopping F simulation"""
def build(self):
self.attr_argument("frequency_scan", Scannable(
self.setattr_argument("frequency_scan", Scannable(
default=LinearScan(1000, 2000, 100)))
self.attr_argument("F0", NumberValue(1500, min=1000, max=2000))
self.attr_argument("noise_amplitude", NumberValue(0.1, min=0, max=100,
self.setattr_argument("F0", NumberValue(1500, min=1000, max=2000))
self.setattr_argument("noise_amplitude", NumberValue(0.1, min=0, max=100,
step=0.01))
self.attr_device("scheduler")
self.setattr_device("scheduler")
def run(self):
frequency = self.set_result("flopping_f_frequency", [],
realtime=True, store=False)
brightness = self.set_result("flopping_f_brightness", [],
realtime=True)
self.set_result("flopping_f_fit", [], realtime=True, store=False)
frequency = self.set_dataset("flopping_f_frequency", [],
broadcast=True, save=False)
brightness = self.set_dataset("flopping_f_brightness", [],
broadcast=True)
self.set_dataset("flopping_f_fit", [], broadcast=True, save=False)
for f in self.frequency_scan:
m_brightness = model(f, self.F0) + self.noise_amplitude*random.random()
@ -52,16 +52,16 @@ class FloppingF(EnvExperiment):
self.scheduler.priority, time.time() + 20, False)
def analyze(self):
# Use get_result so that analyze can be run stand-alone.
frequency = self.get_result("flopping_f_frequency")
brightness = self.get_result("flopping_f_brightness")
# Use get_dataset so that analyze can be run stand-alone.
frequency = self.get_dataset("flopping_f_frequency")
brightness = self.get_dataset("flopping_f_brightness")
popt, pcov = curve_fit(model_numpy,
frequency, brightness,
p0=[self.get_parameter("flopping_freq")])
p0=[self.get_dataset("flopping_freq")])
perr = np.sqrt(np.diag(pcov))
if perr < 0.1:
F0 = float(popt)
self.set_parameter("flopping_freq", F0)
self.set_result("flopping_f_fit",
[model(x, F0) for x in frequency],
realtime=True, store=False)
self.set_dataset("flopping_freq", F0, persist=True, save=False)
self.set_dataset("flopping_f_fit",
[model(x, F0) for x in frequency],
broadcast=True, save=False)

View File

@ -3,8 +3,8 @@ from artiq import *
class Handover(EnvExperiment):
def build(self):
self.attr_device("core")
self.attr_device("led")
self.setattr_device("core")
self.setattr_device("led")
@kernel
def blink_once(self):

View File

@ -7,7 +7,7 @@ class Mandelbrot(EnvExperiment):
"""Mandelbrot set demo"""
def build(self):
self.attr_device("core")
self.setattr_device("core")
def col(self, i):
sys.stdout.write(" .,-:;i+hHM$*#@ "[i])

View File

@ -5,20 +5,20 @@ class PhotonHistogram(EnvExperiment):
"""Photon histogram"""
def build(self):
self.attr_device("core")
self.attr_device("dds_bus")
self.attr_device("bd_dds")
self.attr_device("bd_sw")
self.attr_device("bdd_dds")
self.attr_device("bdd_sw")
self.attr_device("pmt")
self.setattr_device("core")
self.setattr_device("dds_bus")
self.setattr_device("bd_dds")
self.setattr_device("bd_sw")
self.setattr_device("bdd_dds")
self.setattr_device("bdd_sw")
self.setattr_device("pmt")
self.attr_argument("nbins", FreeValue(100))
self.attr_argument("repeats", FreeValue(100))
self.setattr_argument("nbins", FreeValue(100))
self.setattr_argument("repeats", FreeValue(100))
self.attr_parameter("cool_f", 230*MHz)
self.attr_parameter("detect_f", 220*MHz)
self.attr_parameter("detect_t", 100*us)
self.setattr_dataset("cool_f", 230*MHz)
self.setattr_dataset("detect_f", 220*MHz)
self.setattr_dataset("detect_t", 100*us)
@kernel
def program_cooling(self):
@ -60,8 +60,9 @@ class PhotonHistogram(EnvExperiment):
hist[n] += 1
total += n
self.set_result("cooling_photon_histogram", hist)
self.set_parameter("ion_present", total > 5*self.repeats)
self.set_dataset("cooling_photon_histogram", hist)
self.set_dataset("ion_present", total > 5*self.repeats,
broadcast=True)
if __name__ == "__main__":

View File

@ -0,0 +1,18 @@
from itertools import count
from time import sleep
from artiq import *
class RunForever(EnvExperiment):
def build(self):
self.setattr_device("scheduler")
def run(self):
try:
for i in count():
self.scheduler.pause()
sleep(1)
print("ping", i)
except TerminationRequested:
print("Terminated gracefully")

View File

@ -13,7 +13,7 @@ class _PayloadNOP(EnvExperiment):
class _PayloadCoreNOP(EnvExperiment):
def build(self):
self.attr_device("core")
self.setattr_device("core")
@kernel
def run(self):
@ -22,7 +22,7 @@ class _PayloadCoreNOP(EnvExperiment):
class _PayloadCoreSend100Ints(EnvExperiment):
def build(self):
self.attr_device("core")
self.setattr_device("core")
def devnull(self, d):
pass
@ -35,7 +35,7 @@ class _PayloadCoreSend100Ints(EnvExperiment):
class _PayloadCoreSend1MB(EnvExperiment):
def build(self):
self.attr_device("core")
self.setattr_device("core")
def devnull(self, d):
pass
@ -48,7 +48,7 @@ class _PayloadCoreSend1MB(EnvExperiment):
class _PayloadCorePrimes(EnvExperiment):
def build(self):
self.attr_device("core")
self.setattr_device("core")
def devnull(self, d):
pass
@ -70,17 +70,17 @@ class _PayloadCorePrimes(EnvExperiment):
class SpeedBenchmark(EnvExperiment):
"""Speed benchmark"""
def build(self):
self.attr_argument("mode", EnumerationValue(["Single experiment",
"With pause",
"With scheduler"]))
self.attr_argument("payload", EnumerationValue(["NOP",
"CoreNOP",
"CoreSend100Ints",
"CoreSend1MB",
"CorePrimes"]))
self.attr_argument("nruns", NumberValue(10, min=1, max=1000))
self.attr_device("core")
self.attr_device("scheduler")
self.setattr_argument("mode", EnumerationValue(["Single experiment",
"With pause",
"With scheduler"]))
self.setattr_argument("payload", EnumerationValue(["NOP",
"CoreNOP",
"CoreSend100Ints",
"CoreSend1MB",
"CorePrimes"]))
self.setattr_argument("nruns", NumberValue(10, min=1, max=1000, ndecimals=0))
self.setattr_device("core")
self.setattr_device("scheduler")
def run_with_scheduler(self):
nruns = int(self.nruns)
@ -111,9 +111,9 @@ class SpeedBenchmark(EnvExperiment):
self.scheduler.pause()
end_time = time.monotonic()
self.set_result("benchmark_run_time",
(end_time-start_time)/self.nruns,
realtime=True)
self.set_dataset("benchmark_run_time",
(end_time-start_time)/self.nruns,
broadcast=True)
def run(self):
if self.mode == "Single experiment":
@ -128,11 +128,11 @@ class SpeedBenchmark(EnvExperiment):
class _Report(EnvExperiment):
def build(self):
self.attr_argument("start_time")
self.attr_argument("nruns")
self.setattr_argument("start_time")
self.setattr_argument("nruns")
def run(self):
end_time = time.monotonic()
self.set_result("benchmark_run_time",
(end_time-self.start_time)/self.nruns,
realtime=True)
self.set_dataset("benchmark_run_time",
(end_time-self.start_time)/self.nruns,
broadcast=True)

View File

@ -31,9 +31,9 @@ class TDR(EnvExperiment):
This is also equivalent to a loopback tester or a delay measurement.
"""
def build(self):
self.attr_device("core")
self.attr_device("pmt0")
self.attr_device("ttl2")
self.setattr_device("core")
self.setattr_device("pmt0")
self.setattr_device("ttl2")
def run(self):
n = 1000 # repetitions

View File

@ -16,16 +16,16 @@ class Transport(EnvExperiment):
"""Transport"""
def build(self):
self.attr_device("core")
self.attr_device("bd")
self.attr_device("bdd")
self.attr_device("pmt")
self.attr_device("electrodes")
self.setattr_device("core")
self.setattr_device("bd")
self.setattr_device("bdd")
self.setattr_device("pmt")
self.setattr_device("electrodes")
self.attr_argument("wait_at_stop", FreeValue(100*us))
self.attr_argument("speed", FreeValue(1.5))
self.attr_argument("repeats", FreeValue(100))
self.attr_argument("nbins", FreeValue(100))
self.setattr_argument("wait_at_stop", FreeValue(100*us))
self.setattr_argument("speed", FreeValue(1.5))
self.setattr_argument("repeats", FreeValue(100))
self.setattr_argument("nbins", FreeValue(100))
def calc_waveforms(self, stop):
t = transport_data["t"][:stop]*self.speed

View File

@ -5,16 +5,16 @@ class AluminumSpectroscopy(EnvExperiment):
"""Aluminum spectroscopy (simulation)"""
def build(self):
self.attr_device("core")
self.attr_device("mains_sync")
self.attr_device("laser_cooling")
self.attr_device("spectroscopy")
self.attr_device("spectroscopy_b")
self.attr_device("state_detection")
self.attr_device("pmt")
self.attr_parameter("spectroscopy_freq", 432*MHz)
self.attr_argument("photon_limit_low", FreeValue(10))
self.attr_argument("photon_limit_high", FreeValue(15))
self.setattr_device("core")
self.setattr_device("mains_sync")
self.setattr_device("laser_cooling")
self.setattr_device("spectroscopy")
self.setattr_device("spectroscopy_b")
self.setattr_device("state_detection")
self.setattr_device("pmt")
self.setattr_dataset("spectroscopy_freq", 432*MHz)
self.setattr_argument("photon_limit_low", FreeValue(10))
self.setattr_argument("photon_limit_high", FreeValue(15))
@kernel
def run(self):

View File

@ -5,9 +5,9 @@ class SimpleSimulation(EnvExperiment):
"""Simple simulation"""
def build(self):
self.attr_device("core")
self.setattr_device("core")
for wo in "abcd":
self.attr_device(wo)
self.setattr_device(wo)
@kernel
def run(self):

View File

@ -5,8 +5,8 @@ import sys
import os
if sys.version_info[:3] < (3, 4, 3):
raise Exception("You need at least Python 3.4.3 to run ARTIQ")
if sys.version_info[:3] < (3, 5, 0):
raise Exception("You need Python 3.5.0+")
class PushDocCommand(Command):