forked from M-Labs/artiq
Merge branch 'master' into new-py2llvm
This commit is contained in:
commit
bd5b324fc2
|
@ -12,6 +12,7 @@ doc/manual/_build
|
||||||
/dist
|
/dist
|
||||||
/*.egg-info
|
/*.egg-info
|
||||||
/.coverage
|
/.coverage
|
||||||
|
artiq/test/results
|
||||||
examples/master/results
|
examples/master/results
|
||||||
Output/
|
Output/
|
||||||
/lit-test/libartiq_support/libartiq_support.so
|
/lit-test/libartiq_support/libartiq_support.so
|
||||||
|
|
20
.travis.yml
20
.travis.yml
|
@ -1,33 +1,39 @@
|
||||||
language: python
|
language: python
|
||||||
python:
|
python:
|
||||||
- '3.4'
|
- '3.5'
|
||||||
branches:
|
branches:
|
||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
sudo: false
|
sudo: false
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
- BUILD_SOC=1
|
|
||||||
- secure: "DUk/Ihg8KbbzEgPF0qrHqlxU8e8eET9i/BtzNvFddIGX4HP/P2qz0nk3cVkmjuWhqJXSbC22RdKME9qqPzw6fJwJ6dpJ3OR6dDmSd7rewavq+niwxu52PVa+yK8mL4yf1terM7QQ5tIRf+yUL9qGKrZ2xyvEuRit6d4cFep43Ws="
|
- secure: "DUk/Ihg8KbbzEgPF0qrHqlxU8e8eET9i/BtzNvFddIGX4HP/P2qz0nk3cVkmjuWhqJXSbC22RdKME9qqPzw6fJwJ6dpJ3OR6dDmSd7rewavq+niwxu52PVa+yK8mL4yf1terM7QQ5tIRf+yUL9qGKrZ2xyvEuRit6d4cFep43Ws="
|
||||||
|
matrix:
|
||||||
|
- BUILD_SOC=0
|
||||||
|
- BUILD_SOC=1
|
||||||
before_install:
|
before_install:
|
||||||
- mkdir -p $HOME/.mlabs
|
- mkdir -p $HOME/.mlabs
|
||||||
- if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=0; fi
|
- if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=0; fi
|
||||||
- if [ $BUILD_SOC -ne 0 ]; then ./.travis/get-xilinx.sh; fi
|
- if [ $BUILD_SOC -ne 0 ]; then ./.travis/get-xilinx.sh; fi
|
||||||
- . ./.travis/get-toolchain.sh
|
- . ./.travis/get-toolchain.sh
|
||||||
- . ./.travis/get-anaconda.sh
|
- . ./.travis/get-anaconda.sh
|
||||||
- source $HOME/miniconda/bin/activate py34
|
- echo "BUILD_SOC=$BUILD_SOC" >> $HOME/.mlabs/build_settings.sh
|
||||||
|
- source $HOME/miniconda/bin/activate py35
|
||||||
- conda install -q pip coverage anaconda-client migen cython
|
- conda install -q pip coverage anaconda-client migen cython
|
||||||
- pip install coveralls
|
- pip install coveralls
|
||||||
install:
|
install:
|
||||||
- conda build conda/artiq
|
- conda build --python 3.5 conda/artiq
|
||||||
- conda install -q artiq --use-local
|
- conda install -q artiq --use-local
|
||||||
script:
|
script:
|
||||||
- coverage run --source=artiq setup.py test
|
- coverage run --source=artiq setup.py test
|
||||||
- make -C doc/manual html
|
- make -C doc/manual html
|
||||||
after_success:
|
after_success:
|
||||||
- anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password
|
- |
|
||||||
- if [ "$TRAVIS_BRANCH" == "master" ]; then anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2; fi
|
if [ "$TRAVIS_BRANCH" == "master" -a $BUILD_SOC -eq 1 ]; then
|
||||||
- anaconda -q logout
|
anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password
|
||||||
|
anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2
|
||||||
|
anaconda -q logout
|
||||||
|
fi
|
||||||
- coveralls
|
- coveralls
|
||||||
notifications:
|
notifications:
|
||||||
email:
|
email:
|
||||||
|
|
|
@ -9,5 +9,5 @@ conda config --set always_yes yes --set changeps1 no
|
||||||
conda update -q conda
|
conda update -q conda
|
||||||
conda info -a
|
conda info -a
|
||||||
conda install conda-build jinja2
|
conda install conda-build jinja2
|
||||||
conda create -q -n py34 python=$TRAVIS_PYTHON_VERSION
|
conda create -q -n py35 python=$TRAVIS_PYTHON_VERSION
|
||||||
conda config --add channels https://conda.anaconda.org/m-labs/channel/dev
|
conda config --add channels https://conda.anaconda.org/m-labs/channel/dev
|
||||||
|
|
|
@ -19,11 +19,10 @@ do
|
||||||
done
|
done
|
||||||
|
|
||||||
export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:$PATH
|
export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:$PATH
|
||||||
export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=$PWD/packages/usr/local/llvm-or1k/lib:$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH
|
||||||
|
|
||||||
echo "export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh
|
|
||||||
echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:\$PATH" >> $HOME/.mlabs/build_settings.sh
|
echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:\$PATH" >> $HOME/.mlabs/build_settings.sh
|
||||||
|
echo "export LD_LIBRARY_PATH=$PWD/packages/usr/local/llvm-or1k/lib:$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh
|
||||||
|
|
||||||
or1k-linux-as --version
|
|
||||||
llc --version
|
llc --version
|
||||||
clang --version
|
clang --version
|
||||||
|
|
|
@ -30,7 +30,7 @@ git clone https://github.com/fallen/impersonate_macaddress
|
||||||
make -C impersonate_macaddress
|
make -C impersonate_macaddress
|
||||||
# Tell mibuild where Xilinx toolchains are installed
|
# Tell mibuild where Xilinx toolchains are installed
|
||||||
# and feed it the mac address corresponding to the license
|
# and feed it the mac address corresponding to the license
|
||||||
cat > $HOME/.mlabs/build_settings.sh << EOF
|
cat >> $HOME/.mlabs/build_settings.sh << EOF
|
||||||
MISOC_EXTRA_VIVADO_CMDLINE="-Ob vivado_path $HOME/Xilinx/Vivado"
|
MISOC_EXTRA_VIVADO_CMDLINE="-Ob vivado_path $HOME/Xilinx/Vivado"
|
||||||
MISOC_EXTRA_ISE_CMDLINE="-Ob ise_path $HOME/opt/Xilinx/"
|
MISOC_EXTRA_ISE_CMDLINE="-Ob ise_path $HOME/opt/Xilinx/"
|
||||||
export MACADDR=$macaddress
|
export MACADDR=$macaddress
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
import sys, os
|
import sys, os
|
||||||
|
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
from artiq.master.databases import DeviceDB
|
||||||
from artiq.master.worker_db import DeviceManager
|
from artiq.master.worker_db import DeviceManager
|
||||||
|
|
||||||
from artiq.coredevice.core import Core, CompileError
|
from artiq.coredevice.core import Core, CompileError
|
||||||
|
@ -17,10 +17,10 @@ def main():
|
||||||
testcase_vars = {'__name__': 'testbench'}
|
testcase_vars = {'__name__': 'testbench'}
|
||||||
exec(testcase_code, testcase_vars)
|
exec(testcase_code, testcase_vars)
|
||||||
|
|
||||||
ddb_path = os.path.join(os.path.dirname(sys.argv[1]), "ddb.pyon")
|
ddb_path = os.path.join(os.path.dirname(sys.argv[1]), "device_db.pyon")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
core = Core(dmgr=DeviceManager(FlatFileDB(ddb_path)))
|
core = Core(dmgr=DeviceManager(DeviceDB(ddb_path)))
|
||||||
if compile_only:
|
if compile_only:
|
||||||
core.compile(testcase_vars["entrypoint"], (), {})
|
core.compile(testcase_vars["entrypoint"], (), {})
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -34,10 +34,22 @@ def rtio_get_counter() -> TInt64:
|
||||||
raise NotImplementedError("syscall not simulated")
|
raise NotImplementedError("syscall not simulated")
|
||||||
|
|
||||||
class Core:
|
class Core:
|
||||||
def __init__(self, dmgr, ref_period=8*ns, external_clock=False):
|
"""Core device driver.
|
||||||
self.comm = dmgr.get("comm")
|
|
||||||
|
:param ref_period: period of the reference clock for the RTIO subsystem.
|
||||||
|
On platforms that use clock multiplication and SERDES-based PHYs,
|
||||||
|
this is the period after multiplication. For example, with a RTIO core
|
||||||
|
clocked at 125MHz and a SERDES multiplication factor of 8, the
|
||||||
|
reference period is 1ns.
|
||||||
|
The time machine unit is equal to this period.
|
||||||
|
:param external_clock: whether the core device should switch to its
|
||||||
|
external RTIO clock input instead of using its internal oscillator.
|
||||||
|
:param comm_device: name of the device used for communications.
|
||||||
|
"""
|
||||||
|
def __init__(self, dmgr, ref_period=8*ns, external_clock=False, comm_device="comm"):
|
||||||
self.ref_period = ref_period
|
self.ref_period = ref_period
|
||||||
self.external_clock = external_clock
|
self.external_clock = external_clock
|
||||||
|
self.comm = dmgr.get(comm_device)
|
||||||
|
|
||||||
self.first_run = True
|
self.first_run = True
|
||||||
self.core = self
|
self.core = self
|
||||||
|
@ -80,6 +92,8 @@ class Core:
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def break_realtime(self):
|
def break_realtime(self):
|
||||||
|
"""Set the timeline to the current value of the hardware RTIO counter
|
||||||
|
plus a margin of 125000 machine units."""
|
||||||
min_now = rtio_get_counter() + 125000
|
min_now = rtio_get_counter() + 125000
|
||||||
if now_mu() < min_now:
|
if now_mu() < min_now:
|
||||||
at_mu(min_now)
|
at_mu(min_now)
|
||||||
|
|
|
@ -137,16 +137,17 @@ class Novatech409B:
|
||||||
else:
|
else:
|
||||||
self._ser_send("I a")
|
self._ser_send("I a")
|
||||||
|
|
||||||
|
def do_simultaneous_update(self):
|
||||||
|
"""Apply update in simultaneous update mode."""
|
||||||
|
self._ser_send("I p")
|
||||||
|
|
||||||
def set_freq(self, ch_no, freq):
|
def set_freq(self, ch_no, freq):
|
||||||
"""Set frequency of one channel."""
|
"""Set frequency of one channel."""
|
||||||
self.set_simultaneous_update(False)
|
|
||||||
# Novatech expects MHz
|
# Novatech expects MHz
|
||||||
self._ser_send("F{:d} {:f}".format(ch_no, freq/1e6))
|
self._ser_send("F{:d} {:f}".format(ch_no, freq/1e6))
|
||||||
|
|
||||||
def set_phase(self, ch_no, phase):
|
def set_phase(self, ch_no, phase):
|
||||||
"""Set phase of one channel."""
|
"""Set phase of one channel."""
|
||||||
# do this immediately, disable SimultaneousUpdate mode
|
|
||||||
self.set_simultaneous_update(False)
|
|
||||||
# phase word is required by device
|
# phase word is required by device
|
||||||
# N is an integer from 0 to 16383. Phase is set to
|
# N is an integer from 0 to 16383. Phase is set to
|
||||||
# N*360/16384 deg; in ARTIQ represent phase in cycles [0, 1]
|
# N*360/16384 deg; in ARTIQ represent phase in cycles [0, 1]
|
||||||
|
@ -154,33 +155,6 @@ class Novatech409B:
|
||||||
cmd = "P{:d} {:d}".format(ch_no, phase_word)
|
cmd = "P{:d} {:d}".format(ch_no, phase_word)
|
||||||
self._ser_send(cmd)
|
self._ser_send(cmd)
|
||||||
|
|
||||||
def set_freq_all_phase_continuous(self, freq):
|
|
||||||
"""Set frequency of all channels simultaneously.
|
|
||||||
|
|
||||||
Set frequency of all channels simultaneously.
|
|
||||||
1) all DDSs are set to phase continuous mode
|
|
||||||
2) all DDSs are simultaneously set to new frequency
|
|
||||||
Together 1 and 2 ensure phase continuous frequency switching.
|
|
||||||
"""
|
|
||||||
self.set_simultaneous_update(True)
|
|
||||||
self.set_phase_continuous(True)
|
|
||||||
for i in range(4):
|
|
||||||
self.set_freq(i, freq)
|
|
||||||
# send command necessary to update all channels at the same time
|
|
||||||
self._ser_send("I p")
|
|
||||||
|
|
||||||
def set_phase_all(self, phase):
|
|
||||||
"""Set phase of all channels simultaneously."""
|
|
||||||
|
|
||||||
self.set_simultaneous_update(True)
|
|
||||||
# Note that this only works if the continuous
|
|
||||||
# phase switching is turned off.
|
|
||||||
self.set_phase_continuous(False)
|
|
||||||
for i in range(4):
|
|
||||||
self.set_phase(i, phase)
|
|
||||||
# send command necessary to update all channels at the same time
|
|
||||||
self._ser_send("I p")
|
|
||||||
|
|
||||||
def set_gain(self, ch_no, volts):
|
def set_gain(self, ch_no, volts):
|
||||||
"""Set amplitude of one channel."""
|
"""Set amplitude of one channel."""
|
||||||
|
|
||||||
|
@ -191,7 +165,6 @@ class Novatech409B:
|
||||||
s = "Amplitude out of range {v}".format(v=volts)
|
s = "Amplitude out of range {v}".format(v=volts)
|
||||||
raise ValueError(s)
|
raise ValueError(s)
|
||||||
|
|
||||||
self.set_simultaneous_update(False)
|
|
||||||
s = "V{:d} {:d}".format(ch_no, dac_value)
|
s = "V{:d} {:d}".format(ch_no, dac_value)
|
||||||
self._ser_send(s)
|
self._ser_send(s)
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,7 @@ from prettytable import PrettyTable
|
||||||
from artiq.protocols.pc_rpc import Client
|
from artiq.protocols.pc_rpc import Client
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
|
from artiq.tools import short_format
|
||||||
|
|
||||||
|
|
||||||
def clear_screen():
|
def clear_screen():
|
||||||
|
@ -58,40 +59,37 @@ def get_argparser():
|
||||||
parser_delete = subparsers.add_parser("delete",
|
parser_delete = subparsers.add_parser("delete",
|
||||||
help="delete an experiment "
|
help="delete an experiment "
|
||||||
"from the schedule")
|
"from the schedule")
|
||||||
|
parser_delete.add_argument("-g", action="store_true",
|
||||||
|
help="request graceful termination")
|
||||||
parser_delete.add_argument("rid", type=int,
|
parser_delete.add_argument("rid", type=int,
|
||||||
help="run identifier (RID)")
|
help="run identifier (RID)")
|
||||||
|
|
||||||
parser_set_device = subparsers.add_parser(
|
parser_set_dataset = subparsers.add_parser(
|
||||||
"set-device", help="add or modify a device")
|
"set-dataset", help="add or modify a dataset")
|
||||||
parser_set_device.add_argument("name", help="name of the device")
|
parser_set_dataset.add_argument("name", help="name of the dataset")
|
||||||
parser_set_device.add_argument("description",
|
parser_set_dataset.add_argument("value",
|
||||||
help="description in PYON format")
|
help="value in PYON format")
|
||||||
|
parser_set_dataset.add_argument("-p", "--persist", action="store_true",
|
||||||
|
help="make the dataset persistent")
|
||||||
|
|
||||||
parser_del_device = subparsers.add_parser(
|
parser_del_dataset = subparsers.add_parser(
|
||||||
"del-device", help="delete a device")
|
"del-dataset", help="delete a dataset")
|
||||||
parser_del_device.add_argument("name", help="name of the device")
|
parser_del_dataset.add_argument("name", help="name of the dataset")
|
||||||
|
|
||||||
parser_set_parameter = subparsers.add_parser(
|
|
||||||
"set-parameter", help="add or modify a parameter")
|
|
||||||
parser_set_parameter.add_argument("name", help="name of the parameter")
|
|
||||||
parser_set_parameter.add_argument("value",
|
|
||||||
help="value in PYON format")
|
|
||||||
|
|
||||||
parser_del_parameter = subparsers.add_parser(
|
|
||||||
"del-parameter", help="delete a parameter")
|
|
||||||
parser_del_parameter.add_argument("name", help="name of the parameter")
|
|
||||||
|
|
||||||
parser_show = subparsers.add_parser(
|
parser_show = subparsers.add_parser(
|
||||||
"show", help="show schedule, log, devices or parameters")
|
"show", help="show schedule, log, devices or datasets")
|
||||||
parser_show.add_argument(
|
parser_show.add_argument(
|
||||||
"what",
|
"what",
|
||||||
help="select object to show: schedule/log/devices/parameters")
|
help="select object to show: schedule/log/devices/datasets")
|
||||||
|
|
||||||
parser_scan = subparsers.add_parser("scan-repository",
|
subparsers.add_parser(
|
||||||
help="trigger a repository (re)scan")
|
"scan-devices", help="trigger a device database (re)scan")
|
||||||
parser_scan.add_argument("revision", default=None, nargs="?",
|
|
||||||
help="use a specific repository revision "
|
parser_scan_repos = subparsers.add_parser(
|
||||||
"(defaults to head)")
|
"scan-repository", help="trigger a repository (re)scan")
|
||||||
|
parser_scan_repos.add_argument("revision", default=None, nargs="?",
|
||||||
|
help="use a specific repository revision "
|
||||||
|
"(defaults to head)")
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
@ -128,23 +126,22 @@ def _action_submit(remote, args):
|
||||||
|
|
||||||
|
|
||||||
def _action_delete(remote, args):
|
def _action_delete(remote, args):
|
||||||
remote.delete(args.rid)
|
if args.g:
|
||||||
|
remote.request_termination(args.rid)
|
||||||
|
else:
|
||||||
|
remote.delete(args.rid)
|
||||||
|
|
||||||
|
|
||||||
def _action_set_device(remote, args):
|
def _action_set_dataset(remote, args):
|
||||||
remote.set(args.name, pyon.decode(args.description))
|
remote.set(args.name, pyon.decode(args.value), args.persist)
|
||||||
|
|
||||||
|
|
||||||
def _action_del_device(remote, args):
|
def _action_del_dataset(remote, args):
|
||||||
remote.delete(args.name)
|
remote.delete(args.name)
|
||||||
|
|
||||||
|
|
||||||
def _action_set_parameter(remote, args):
|
def _action_scan_devices(remote, args):
|
||||||
remote.set(args.name, pyon.decode(args.value))
|
remote.scan()
|
||||||
|
|
||||||
|
|
||||||
def _action_del_parameter(remote, args):
|
|
||||||
remote.delete(args.name)
|
|
||||||
|
|
||||||
|
|
||||||
def _action_scan_repository(remote, args):
|
def _action_scan_repository(remote, args):
|
||||||
|
@ -192,11 +189,11 @@ def _show_devices(devices):
|
||||||
print(table)
|
print(table)
|
||||||
|
|
||||||
|
|
||||||
def _show_parameters(parameters):
|
def _show_datasets(datasets):
|
||||||
clear_screen()
|
clear_screen()
|
||||||
table = PrettyTable(["Parameter", "Value"])
|
table = PrettyTable(["Dataset", "Persistent", "Value"])
|
||||||
for k, v in sorted(parameters.items(), key=itemgetter(0)):
|
for k, (persist, value) in sorted(datasets.items(), key=itemgetter(0)):
|
||||||
table.add_row([k, str(v)])
|
table.add_row([k, "Y" if persist else "N", short_format(value)])
|
||||||
print(table)
|
print(table)
|
||||||
|
|
||||||
|
|
||||||
|
@ -265,8 +262,8 @@ def main():
|
||||||
_show_log(args)
|
_show_log(args)
|
||||||
elif args.what == "devices":
|
elif args.what == "devices":
|
||||||
_show_dict(args, "devices", _show_devices)
|
_show_dict(args, "devices", _show_devices)
|
||||||
elif args.what == "parameters":
|
elif args.what == "datasets":
|
||||||
_show_dict(args, "parameters", _show_parameters)
|
_show_dict(args, "datasets", _show_datasets)
|
||||||
else:
|
else:
|
||||||
print("Unknown object to show, use -h to list valid names.")
|
print("Unknown object to show, use -h to list valid names.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
@ -275,10 +272,9 @@ def main():
|
||||||
target_name = {
|
target_name = {
|
||||||
"submit": "master_schedule",
|
"submit": "master_schedule",
|
||||||
"delete": "master_schedule",
|
"delete": "master_schedule",
|
||||||
"set_device": "master_ddb",
|
"set_dataset": "master_dataset_db",
|
||||||
"del_device": "master_ddb",
|
"del_dataset": "master_dataset_db",
|
||||||
"set_parameter": "master_pdb",
|
"scan_devices": "master_device_db",
|
||||||
"del_parameter": "master_pdb",
|
|
||||||
"scan_repository": "master_repository"
|
"scan_repository": "master_repository"
|
||||||
}[action]
|
}[action]
|
||||||
remote = Client(args.server, port, target_name)
|
remote = Client(args.server, port, target_name)
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
|
|
||||||
import sys, logging, argparse
|
import sys, logging, argparse
|
||||||
|
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
from artiq.master.worker_db import DeviceManager
|
from artiq.master.worker_db import DeviceManager, DatasetManager
|
||||||
from artiq.coredevice.core import CompileError
|
from artiq.coredevice.core import CompileError
|
||||||
from artiq.tools import *
|
from artiq.tools import *
|
||||||
|
|
||||||
|
@ -15,10 +15,10 @@ def get_argparser():
|
||||||
parser = argparse.ArgumentParser(description="ARTIQ static compiler")
|
parser = argparse.ArgumentParser(description="ARTIQ static compiler")
|
||||||
|
|
||||||
verbosity_args(parser)
|
verbosity_args(parser)
|
||||||
parser.add_argument("-d", "--ddb", default="ddb.pyon",
|
parser.add_argument("--device-db", default="device_db.pyon",
|
||||||
help="device database file")
|
help="device database file (default: '%(default)s')")
|
||||||
parser.add_argument("-p", "--pdb", default="pdb.pyon",
|
parser.add_argument("--dataset-db", default="dataset_db.pyon",
|
||||||
help="parameter database file")
|
help="dataset file (default: '%(default)s')")
|
||||||
|
|
||||||
parser.add_argument("-e", "--experiment", default=None,
|
parser.add_argument("-e", "--experiment", default=None,
|
||||||
help="experiment to compile")
|
help="experiment to compile")
|
||||||
|
@ -36,14 +36,14 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
|
|
||||||
dmgr = DeviceManager(FlatFileDB(args.ddb))
|
device_mgr = DeviceManager(DeviceDB(args.device_db))
|
||||||
pdb = FlatFileDB(args.pdb)
|
dataset_mgr = DatasetManager(DatasetDB(args.dataset_db))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
module = file_import(args.file)
|
module = file_import(args.file)
|
||||||
exp = get_experiment(module, args.experiment)
|
exp = get_experiment(module, args.experiment)
|
||||||
arguments = parse_arguments(args.arguments)
|
arguments = parse_arguments(args.arguments)
|
||||||
exp_inst = exp(dmgr, pdb, **arguments)
|
exp_inst = exp(device_mgr, dataset_mgr, **arguments)
|
||||||
|
|
||||||
if not hasattr(exp.run, "artiq_embedded"):
|
if not hasattr(exp.run, "artiq_embedded"):
|
||||||
raise ValueError("Experiment entry point must be a kernel")
|
raise ValueError("Experiment entry point must be a kernel")
|
||||||
|
@ -57,7 +57,7 @@ def main():
|
||||||
print(error.render_string(colored=True), file=sys.stderr)
|
print(error.render_string(colored=True), file=sys.stderr)
|
||||||
return
|
return
|
||||||
finally:
|
finally:
|
||||||
dmgr.close_devices()
|
device_mgr.close_devices()
|
||||||
|
|
||||||
if object_map.has_rpc():
|
if object_map.has_rpc():
|
||||||
raise ValueError("Experiment must not use RPC")
|
raise ValueError("Experiment must not use RPC")
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
|
from artiq.master.databases import DeviceDB
|
||||||
from artiq.master.worker_db import DeviceManager
|
from artiq.master.worker_db import DeviceManager
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
|
||||||
|
|
||||||
|
|
||||||
def to_bytes(string):
|
def to_bytes(string):
|
||||||
|
@ -13,8 +13,8 @@ def to_bytes(string):
|
||||||
def get_argparser():
|
def get_argparser():
|
||||||
parser = argparse.ArgumentParser(description="ARTIQ core device "
|
parser = argparse.ArgumentParser(description="ARTIQ core device "
|
||||||
"remote access tool")
|
"remote access tool")
|
||||||
parser.add_argument("-d", "--ddb", default="ddb.pyon",
|
parser.add_argument("--device-db", default="device_db.pyon",
|
||||||
help="device database file")
|
help="device database file (default: '%(default)s')")
|
||||||
|
|
||||||
subparsers = parser.add_subparsers(dest="action")
|
subparsers = parser.add_subparsers(dest="action")
|
||||||
subparsers.required = True
|
subparsers.required = True
|
||||||
|
@ -58,9 +58,9 @@ def get_argparser():
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
dmgr = DeviceManager(FlatFileDB(args.ddb))
|
device_mgr = DeviceManager(DeviceDB(args.device_db))
|
||||||
try:
|
try:
|
||||||
comm = dmgr.get("comm")
|
comm = device_mgr.get("comm")
|
||||||
comm.check_ident()
|
comm.check_ident()
|
||||||
|
|
||||||
if args.action == "log":
|
if args.action == "log":
|
||||||
|
@ -83,7 +83,7 @@ def main():
|
||||||
elif args.action == "cfg-erase":
|
elif args.action == "cfg-erase":
|
||||||
comm.flash_storage_erase()
|
comm.flash_storage_erase()
|
||||||
finally:
|
finally:
|
||||||
dmgr.close_devices()
|
device_mgr.close_devices()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -11,7 +11,7 @@ import socket
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.protocols.pc_rpc import AsyncioClient, Server
|
from artiq.protocols.pc_rpc import AsyncioClient, Server
|
||||||
from artiq.tools import verbosity_args, init_logger
|
from artiq.tools import verbosity_args, init_logger
|
||||||
from artiq.tools import TaskObject, asyncio_process_wait_timeout, Condition
|
from artiq.tools import TaskObject, Condition
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -56,60 +56,55 @@ class Controller:
|
||||||
self.process = None
|
self.process = None
|
||||||
self.launch_task = asyncio.Task(self.launcher())
|
self.launch_task = asyncio.Task(self.launcher())
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def end(self):
|
||||||
def end(self):
|
|
||||||
self.launch_task.cancel()
|
self.launch_task.cancel()
|
||||||
yield from asyncio.wait_for(self.launch_task, None)
|
await asyncio.wait_for(self.launch_task, None)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _call_controller(self, method):
|
||||||
def _call_controller(self, method):
|
|
||||||
remote = AsyncioClient()
|
remote = AsyncioClient()
|
||||||
yield from remote.connect_rpc(self.host, self.port, None)
|
await remote.connect_rpc(self.host, self.port, None)
|
||||||
try:
|
try:
|
||||||
targets, _ = remote.get_rpc_id()
|
targets, _ = remote.get_rpc_id()
|
||||||
remote.select_rpc_target(targets[0])
|
remote.select_rpc_target(targets[0])
|
||||||
r = yield from getattr(remote, method)()
|
r = await getattr(remote, method)()
|
||||||
finally:
|
finally:
|
||||||
remote.close_rpc()
|
remote.close_rpc()
|
||||||
return r
|
return r
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _ping(self):
|
||||||
def _ping(self):
|
|
||||||
try:
|
try:
|
||||||
ok = yield from asyncio.wait_for(self._call_controller("ping"),
|
ok = await asyncio.wait_for(self._call_controller("ping"),
|
||||||
self.ping_timeout)
|
self.ping_timeout)
|
||||||
if ok:
|
if ok:
|
||||||
self.retry_timer_cur = self.retry_timer
|
self.retry_timer_cur = self.retry_timer
|
||||||
return ok
|
return ok
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _wait_and_ping(self):
|
||||||
def _wait_and_ping(self):
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
yield from asyncio_process_wait_timeout(self.process,
|
await asyncio.wait_for(self.process.wait(),
|
||||||
self.ping_timer)
|
self.ping_timer)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
logger.debug("pinging controller %s", self.name)
|
logger.debug("pinging controller %s", self.name)
|
||||||
ok = yield from self._ping()
|
ok = await self._ping()
|
||||||
if not ok:
|
if not ok:
|
||||||
logger.warning("Controller %s ping failed", self.name)
|
logger.warning("Controller %s ping failed", self.name)
|
||||||
yield from self._terminate()
|
await self._terminate()
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def launcher(self):
|
||||||
def launcher(self):
|
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
logger.info("Starting controller %s with command: %s",
|
logger.info("Starting controller %s with command: %s",
|
||||||
self.name, self.command)
|
self.name, self.command)
|
||||||
try:
|
try:
|
||||||
self.process = yield from asyncio.create_subprocess_exec(
|
self.process = await asyncio.create_subprocess_exec(
|
||||||
*shlex.split(self.command))
|
*shlex.split(self.command))
|
||||||
yield from self._wait_and_ping()
|
await self._wait_and_ping()
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
logger.warning("Controller %s failed to start", self.name)
|
logger.warning("Controller %s failed to start", self.name)
|
||||||
else:
|
else:
|
||||||
|
@ -117,33 +112,32 @@ class Controller:
|
||||||
logger.warning("Restarting in %.1f seconds",
|
logger.warning("Restarting in %.1f seconds",
|
||||||
self.retry_timer_cur)
|
self.retry_timer_cur)
|
||||||
try:
|
try:
|
||||||
yield from asyncio.wait_for(self.retry_now.wait(),
|
await asyncio.wait_for(self.retry_now.wait(),
|
||||||
self.retry_timer_cur)
|
self.retry_timer_cur)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
pass
|
pass
|
||||||
self.retry_timer_cur *= self.retry_timer_backoff
|
self.retry_timer_cur *= self.retry_timer_backoff
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
yield from self._terminate()
|
await self._terminate()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _terminate(self):
|
||||||
def _terminate(self):
|
|
||||||
logger.info("Terminating controller %s", self.name)
|
logger.info("Terminating controller %s", self.name)
|
||||||
if self.process is not None and self.process.returncode is None:
|
if self.process is not None and self.process.returncode is None:
|
||||||
try:
|
try:
|
||||||
yield from asyncio.wait_for(self._call_controller("terminate"),
|
await asyncio.wait_for(self._call_controller("terminate"),
|
||||||
self.term_timeout)
|
self.term_timeout)
|
||||||
except:
|
except:
|
||||||
logger.warning("Controller %s did not respond to terminate "
|
logger.warning("Controller %s did not respond to terminate "
|
||||||
"command, killing", self.name)
|
"command, killing", self.name)
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
try:
|
try:
|
||||||
yield from asyncio_process_wait_timeout(self.process,
|
await asyncio.wait_for(self.process.wait(),
|
||||||
self.term_timeout)
|
self.term_timeout)
|
||||||
except:
|
except:
|
||||||
logger.warning("Controller %s failed to exit, killing",
|
logger.warning("Controller %s failed to exit, killing",
|
||||||
self.name)
|
self.name)
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
yield from self.process.wait()
|
await self.process.wait()
|
||||||
logger.debug("Controller %s terminated", self.name)
|
logger.debug("Controller %s terminated", self.name)
|
||||||
|
|
||||||
|
|
||||||
|
@ -163,17 +157,16 @@ class Controllers:
|
||||||
self.active = dict()
|
self.active = dict()
|
||||||
self.process_task = asyncio.Task(self._process())
|
self.process_task = asyncio.Task(self._process())
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _process(self):
|
||||||
def _process(self):
|
|
||||||
while True:
|
while True:
|
||||||
action, param = yield from self.queue.get()
|
action, param = await self.queue.get()
|
||||||
if action == "set":
|
if action == "set":
|
||||||
k, ddb_entry = param
|
k, ddb_entry = param
|
||||||
if k in self.active:
|
if k in self.active:
|
||||||
yield from self.active[k].end()
|
await self.active[k].end()
|
||||||
self.active[k] = Controller(k, ddb_entry)
|
self.active[k] = Controller(k, ddb_entry)
|
||||||
elif action == "del":
|
elif action == "del":
|
||||||
yield from self.active[param].end()
|
await self.active[param].end()
|
||||||
del self.active[param]
|
del self.active[param]
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
@ -196,11 +189,10 @@ class Controllers:
|
||||||
for name in set(self.active_or_queued):
|
for name in set(self.active_or_queued):
|
||||||
del self[name]
|
del self[name]
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def shutdown(self):
|
||||||
def shutdown(self):
|
|
||||||
self.process_task.cancel()
|
self.process_task.cancel()
|
||||||
for c in self.active.values():
|
for c in self.active.values():
|
||||||
yield from c.end()
|
await c.end()
|
||||||
|
|
||||||
|
|
||||||
class ControllerDB:
|
class ControllerDB:
|
||||||
|
@ -225,8 +217,7 @@ class ControllerManager(TaskObject):
|
||||||
self.retry_master = retry_master
|
self.retry_master = retry_master
|
||||||
self.controller_db = ControllerDB()
|
self.controller_db = ControllerDB()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
try:
|
try:
|
||||||
subscriber = Subscriber("devices",
|
subscriber = Subscriber("devices",
|
||||||
self.controller_db.sync_struct_init)
|
self.controller_db.sync_struct_init)
|
||||||
|
@ -236,12 +227,12 @@ class ControllerManager(TaskObject):
|
||||||
s = subscriber.writer.get_extra_info("socket")
|
s = subscriber.writer.get_extra_info("socket")
|
||||||
localhost = s.getsockname()[0]
|
localhost = s.getsockname()[0]
|
||||||
self.controller_db.set_host_filter(localhost)
|
self.controller_db.set_host_filter(localhost)
|
||||||
yield from subscriber.connect(self.server, self.port,
|
await subscriber.connect(self.server, self.port,
|
||||||
set_host_filter)
|
set_host_filter)
|
||||||
try:
|
try:
|
||||||
yield from asyncio.wait_for(subscriber.receive_task, None)
|
await asyncio.wait_for(subscriber.receive_task, None)
|
||||||
finally:
|
finally:
|
||||||
yield from subscriber.close()
|
await subscriber.close()
|
||||||
except (ConnectionAbortedError, ConnectionError,
|
except (ConnectionAbortedError, ConnectionError,
|
||||||
ConnectionRefusedError, ConnectionResetError) as e:
|
ConnectionRefusedError, ConnectionResetError) as e:
|
||||||
logger.warning("Connection to master failed (%s: %s)",
|
logger.warning("Connection to master failed (%s: %s)",
|
||||||
|
@ -249,11 +240,11 @@ class ControllerManager(TaskObject):
|
||||||
else:
|
else:
|
||||||
logger.warning("Connection to master lost")
|
logger.warning("Connection to master lost")
|
||||||
logger.warning("Retrying in %.1f seconds", self.retry_master)
|
logger.warning("Retrying in %.1f seconds", self.retry_master)
|
||||||
yield from asyncio.sleep(self.retry_master)
|
await asyncio.sleep(self.retry_master)
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
yield from self.controller_db.current_controllers.shutdown()
|
await self.controller_db.current_controllers.shutdown()
|
||||||
|
|
||||||
def retry_now(self, k):
|
def retry_now(self, k):
|
||||||
"""If a controller is disabled and pending retry, perform that retry
|
"""If a controller is disabled and pending retry, perform that retry
|
||||||
|
|
|
@ -15,8 +15,7 @@ from artiq.protocols.pc_rpc import AsyncioClient
|
||||||
from artiq.gui.state import StateManager
|
from artiq.gui.state import StateManager
|
||||||
from artiq.gui.explorer import ExplorerDock
|
from artiq.gui.explorer import ExplorerDock
|
||||||
from artiq.gui.moninj import MonInj
|
from artiq.gui.moninj import MonInj
|
||||||
from artiq.gui.results import ResultsDock
|
from artiq.gui.datasets import DatasetsDock
|
||||||
from artiq.gui.parameters import ParametersDock
|
|
||||||
from artiq.gui.schedule import ScheduleDock
|
from artiq.gui.schedule import ScheduleDock
|
||||||
from artiq.gui.log import LogDock
|
from artiq.gui.log import LogDock
|
||||||
from artiq.gui.console import ConsoleDock
|
from artiq.gui.console import ConsoleDock
|
||||||
|
@ -92,30 +91,24 @@ def main():
|
||||||
args.server, args.port_notify))
|
args.server, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close()))
|
atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close()))
|
||||||
|
|
||||||
d_results = ResultsDock(win, area)
|
d_datasets = DatasetsDock(win, area)
|
||||||
smgr.register(d_results)
|
smgr.register(d_datasets)
|
||||||
loop.run_until_complete(d_results.sub_connect(
|
loop.run_until_complete(d_datasets.sub_connect(
|
||||||
args.server, args.port_notify))
|
args.server, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(d_results.sub_close()))
|
atexit.register(lambda: loop.run_until_complete(d_datasets.sub_close()))
|
||||||
|
|
||||||
if os.name != "nt":
|
if os.name != "nt":
|
||||||
d_ttl_dds = MonInj()
|
d_ttl_dds = MonInj()
|
||||||
loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
|
loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(d_ttl_dds.stop()))
|
atexit.register(lambda: loop.run_until_complete(d_ttl_dds.stop()))
|
||||||
|
|
||||||
d_params = ParametersDock()
|
|
||||||
loop.run_until_complete(d_params.sub_connect(
|
|
||||||
args.server, args.port_notify))
|
|
||||||
atexit.register(lambda: loop.run_until_complete(d_params.sub_close()))
|
|
||||||
|
|
||||||
if os.name != "nt":
|
if os.name != "nt":
|
||||||
area.addDock(d_ttl_dds.dds_dock, "top")
|
area.addDock(d_ttl_dds.dds_dock, "top")
|
||||||
area.addDock(d_ttl_dds.ttl_dock, "above", d_ttl_dds.dds_dock)
|
area.addDock(d_ttl_dds.ttl_dock, "above", d_ttl_dds.dds_dock)
|
||||||
area.addDock(d_results, "above", d_ttl_dds.ttl_dock)
|
area.addDock(d_datasets, "above", d_ttl_dds.ttl_dock)
|
||||||
else:
|
else:
|
||||||
area.addDock(d_results, "top")
|
area.addDock(d_datasets, "top")
|
||||||
area.addDock(d_params, "above", d_results)
|
area.addDock(d_explorer, "above", d_datasets)
|
||||||
area.addDock(d_explorer, "above", d_params)
|
|
||||||
|
|
||||||
d_schedule = ScheduleDock(status_bar, schedule_ctl)
|
d_schedule = ScheduleDock(status_bar, schedule_ctl)
|
||||||
loop.run_until_complete(d_schedule.sub_connect(
|
loop.run_until_complete(d_schedule.sub_connect(
|
||||||
|
@ -127,16 +120,18 @@ def main():
|
||||||
args.server, args.port_notify))
|
args.server, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))
|
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))
|
||||||
|
|
||||||
pdb = AsyncioClient()
|
dataset_db = AsyncioClient()
|
||||||
loop.run_until_complete(pdb.connect_rpc(
|
loop.run_until_complete(dataset_db.connect_rpc(
|
||||||
args.server, args.port_control, "master_pdb"))
|
args.server, args.port_control, "master_dataset_db"))
|
||||||
atexit.register(lambda: pdb.close_rpc())
|
atexit.register(lambda: dataset_db.close_rpc())
|
||||||
def _get_parameter(k, v):
|
def _set_dataset(k, v):
|
||||||
asyncio.async(pdb.set(k, v))
|
asyncio.ensure_future(dataset_db.set(k, v))
|
||||||
|
def _del_dataset(k):
|
||||||
|
asyncio.ensure_future(dataset_db.delete(k))
|
||||||
d_console = ConsoleDock(
|
d_console = ConsoleDock(
|
||||||
d_params.get_parameter,
|
d_datasets.get_dataset,
|
||||||
_get_parameter,
|
_set_dataset,
|
||||||
d_results.get_result)
|
_del_dataset)
|
||||||
|
|
||||||
area.addDock(d_console, "bottom")
|
area.addDock(d_console, "bottom")
|
||||||
area.addDock(d_log, "above", d_console)
|
area.addDock(d_log, "above", d_console)
|
||||||
|
|
|
@ -93,27 +93,26 @@ class DBWriter(TaskObject):
|
||||||
try:
|
try:
|
||||||
self._queue.put_nowait((k, v))
|
self._queue.put_nowait((k, v))
|
||||||
except asyncio.QueueFull:
|
except asyncio.QueueFull:
|
||||||
logger.warning("failed to update parameter '%s': "
|
logger.warning("failed to update dataset '%s': "
|
||||||
"too many pending updates", k)
|
"too many pending updates", k)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
while True:
|
while True:
|
||||||
k, v = yield from self._queue.get()
|
k, v = await self._queue.get()
|
||||||
url = self.base_url + "/write"
|
url = self.base_url + "/write"
|
||||||
params = {"u": self.user, "p": self.password, "db": self.database,
|
params = {"u": self.user, "p": self.password, "db": self.database,
|
||||||
"consistency": "any", "precision": "n"}
|
"consistency": "any", "precision": "n"}
|
||||||
fmt_ty, fmt_v = format_influxdb(v)
|
fmt_ty, fmt_v = format_influxdb(v)
|
||||||
data = "{},parameter={} {}={}".format(self.table, k, fmt_ty, fmt_v)
|
data = "{},dataset={} {}={}".format(self.table, k, fmt_ty, fmt_v)
|
||||||
try:
|
try:
|
||||||
response = yield from aiohttp.request(
|
response = await aiohttp.request(
|
||||||
"POST", url, params=params, data=data)
|
"POST", url, params=params, data=data)
|
||||||
except:
|
except:
|
||||||
logger.warning("got exception trying to update '%s'",
|
logger.warning("got exception trying to update '%s'",
|
||||||
k, exc_info=True)
|
k, exc_info=True)
|
||||||
else:
|
else:
|
||||||
if response.status not in (200, 204):
|
if response.status not in (200, 204):
|
||||||
content = (yield from response.content.read()).decode()
|
content = (await response.content.read()).decode()
|
||||||
if content:
|
if content:
|
||||||
content = content[:-1] # drop \n
|
content = content[:-1] # drop \n
|
||||||
logger.warning("got HTTP status %d "
|
logger.warning("got HTTP status %d "
|
||||||
|
@ -122,15 +121,31 @@ class DBWriter(TaskObject):
|
||||||
response.close()
|
response.close()
|
||||||
|
|
||||||
|
|
||||||
class Parameters:
|
class _Mock:
|
||||||
|
def __setitem__(self, k, v):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __getitem__(self, k):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __delitem__(self, k):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Datasets:
|
||||||
def __init__(self, filter_function, writer, init):
|
def __init__(self, filter_function, writer, init):
|
||||||
self.filter_function = filter_function
|
self.filter_function = filter_function
|
||||||
self.writer = writer
|
self.writer = writer
|
||||||
|
|
||||||
def __setitem__(self, k, v):
|
def __setitem__(self, k, v):
|
||||||
if self.filter_function(k):
|
if self.filter_function(k):
|
||||||
self.writer.update(k, v)
|
self.writer.update(k, v[1])
|
||||||
|
|
||||||
|
# ignore mutations
|
||||||
|
def __getitem__(self, k):
|
||||||
|
return _Mock()
|
||||||
|
|
||||||
|
# ignore deletions
|
||||||
def __delitem__(self, k):
|
def __delitem__(self, k):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -144,18 +159,17 @@ class MasterReader(TaskObject):
|
||||||
self.filter_function = filter_function
|
self.filter_function = filter_function
|
||||||
self.writer = writer
|
self.writer = writer
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
subscriber = Subscriber(
|
subscriber = Subscriber(
|
||||||
"parameters",
|
"datasets",
|
||||||
partial(Parameters, self.filter_function, self.writer))
|
partial(Datasets, self.filter_function, self.writer))
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
yield from subscriber.connect(self.server, self.port)
|
await subscriber.connect(self.server, self.port)
|
||||||
try:
|
try:
|
||||||
yield from asyncio.wait_for(subscriber.receive_task, None)
|
await asyncio.wait_for(subscriber.receive_task, None)
|
||||||
finally:
|
finally:
|
||||||
yield from subscriber.close()
|
await subscriber.close()
|
||||||
except (ConnectionAbortedError, ConnectionError,
|
except (ConnectionAbortedError, ConnectionError,
|
||||||
ConnectionRefusedError, ConnectionResetError) as e:
|
ConnectionRefusedError, ConnectionResetError) as e:
|
||||||
logger.warning("Connection to master failed (%s: %s)",
|
logger.warning("Connection to master failed (%s: %s)",
|
||||||
|
@ -163,7 +177,7 @@ class MasterReader(TaskObject):
|
||||||
else:
|
else:
|
||||||
logger.warning("Connection to master lost")
|
logger.warning("Connection to master lost")
|
||||||
logger.warning("Retrying in %.1f seconds", self.retry)
|
logger.warning("Retrying in %.1f seconds", self.retry)
|
||||||
yield from asyncio.sleep(self.retry)
|
await asyncio.sleep(self.retry)
|
||||||
|
|
||||||
|
|
||||||
class Filter:
|
class Filter:
|
||||||
|
|
|
@ -6,8 +6,8 @@ import atexit
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from artiq.protocols.pc_rpc import Server
|
from artiq.protocols.pc_rpc import Server
|
||||||
from artiq.protocols.sync_struct import Notifier, Publisher, process_mod
|
from artiq.protocols.sync_struct import Notifier, Publisher
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
from artiq.master.scheduler import Scheduler
|
from artiq.master.scheduler import Scheduler
|
||||||
from artiq.master.worker_db import get_last_rid
|
from artiq.master.worker_db import get_last_rid
|
||||||
from artiq.master.repository import FilesystemBackend, GitBackend, Repository
|
from artiq.master.repository import FilesystemBackend, GitBackend, Repository
|
||||||
|
@ -27,10 +27,10 @@ def get_argparser():
|
||||||
"--port-control", default=3251, type=int,
|
"--port-control", default=3251, type=int,
|
||||||
help="TCP port to listen to for control (default: %(default)d)")
|
help="TCP port to listen to for control (default: %(default)d)")
|
||||||
group = parser.add_argument_group("databases")
|
group = parser.add_argument_group("databases")
|
||||||
group.add_argument("-d", "--ddb", default="ddb.pyon",
|
group.add_argument("--device-db", default="device_db.pyon",
|
||||||
help="device database file")
|
help="device database file (default: '%(default)s')")
|
||||||
group.add_argument("-p", "--pdb", default="pdb.pyon",
|
group.add_argument("--dataset-db", default="dataset_db.pyon",
|
||||||
help="parameter database file")
|
help="dataset file (default: '%(default)s')")
|
||||||
group = parser.add_argument_group("repository")
|
group = parser.add_argument_group("repository")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"-g", "--git", default=False, action="store_true",
|
"-g", "--git", default=False, action="store_true",
|
||||||
|
@ -64,24 +64,25 @@ def main():
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
atexit.register(lambda: loop.close())
|
atexit.register(lambda: loop.close())
|
||||||
|
|
||||||
ddb = FlatFileDB(args.ddb)
|
device_db = DeviceDB(args.device_db)
|
||||||
pdb = FlatFileDB(args.pdb)
|
dataset_db = DatasetDB(args.dataset_db)
|
||||||
rtr = Notifier(dict())
|
dataset_db.start()
|
||||||
|
atexit.register(lambda: loop.run_until_complete(dataset_db.stop()))
|
||||||
log = Log(1000)
|
log = Log(1000)
|
||||||
|
|
||||||
if args.git:
|
if args.git:
|
||||||
repo_backend = GitBackend(args.repository)
|
repo_backend = GitBackend(args.repository)
|
||||||
else:
|
else:
|
||||||
repo_backend = FilesystemBackend(args.repository)
|
repo_backend = FilesystemBackend(args.repository)
|
||||||
repository = Repository(repo_backend, log.log)
|
repository = Repository(repo_backend, device_db.get_device_db, log.log)
|
||||||
atexit.register(repository.close)
|
atexit.register(repository.close)
|
||||||
repository.scan_async()
|
repository.scan_async()
|
||||||
|
|
||||||
worker_handlers = {
|
worker_handlers = {
|
||||||
"get_device": ddb.get,
|
"get_device_db": device_db.get_device_db,
|
||||||
"get_parameter": pdb.get,
|
"get_device": device_db.get,
|
||||||
"set_parameter": pdb.set,
|
"get_dataset": dataset_db.get,
|
||||||
"update_rt_results": lambda mod: process_mod(rtr, mod),
|
"update_dataset": dataset_db.update,
|
||||||
"log": log.log
|
"log": log.log
|
||||||
}
|
}
|
||||||
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
|
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
|
||||||
|
@ -90,8 +91,8 @@ def main():
|
||||||
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
|
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
|
||||||
|
|
||||||
server_control = Server({
|
server_control = Server({
|
||||||
"master_ddb": ddb,
|
"master_device_db": device_db,
|
||||||
"master_pdb": pdb,
|
"master_dataset_db": dataset_db,
|
||||||
"master_schedule": scheduler,
|
"master_schedule": scheduler,
|
||||||
"master_repository": repository
|
"master_repository": repository
|
||||||
})
|
})
|
||||||
|
@ -101,9 +102,8 @@ def main():
|
||||||
|
|
||||||
server_notify = Publisher({
|
server_notify = Publisher({
|
||||||
"schedule": scheduler.notifier,
|
"schedule": scheduler.notifier,
|
||||||
"devices": ddb.data,
|
"devices": device_db.data,
|
||||||
"parameters": pdb.data,
|
"datasets": dataset_db.data,
|
||||||
"rt_results": rtr,
|
|
||||||
"explist": repository.explist,
|
"explist": repository.explist,
|
||||||
"log": log.data
|
"log": log.data
|
||||||
})
|
})
|
||||||
|
|
|
@ -4,16 +4,14 @@
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
import time
|
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
from itertools import chain
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import h5py
|
import h5py
|
||||||
|
|
||||||
from artiq.language.environment import EnvExperiment
|
from artiq.language.environment import EnvExperiment
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
from artiq.master.worker_db import DeviceManager, ResultDB
|
from artiq.master.worker_db import DeviceManager, DatasetManager
|
||||||
from artiq.coredevice.core import CompileError
|
from artiq.coredevice.core import CompileError
|
||||||
from artiq.compiler.embedding import ObjectMap
|
from artiq.compiler.embedding import ObjectMap
|
||||||
from artiq.compiler.targets import OR1KTarget
|
from artiq.compiler.targets import OR1KTarget
|
||||||
|
@ -24,8 +22,8 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class ELFRunner(EnvExperiment):
|
class ELFRunner(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_argument("file")
|
self.setattr_argument("file")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
with open(self.file, "rb") as f:
|
with open(self.file, "rb") as f:
|
||||||
|
@ -38,11 +36,6 @@ class ELFRunner(EnvExperiment):
|
||||||
lambda addresses: target.symbolize(kernel_library, addresses))
|
lambda addresses: target.symbolize(kernel_library, addresses))
|
||||||
|
|
||||||
|
|
||||||
class SimpleParamLogger:
|
|
||||||
def set(self, timestamp, name, value):
|
|
||||||
logger.info("Parameter change: {} = {}".format(name, value))
|
|
||||||
|
|
||||||
|
|
||||||
class DummyScheduler:
|
class DummyScheduler:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.next_rid = 0
|
self.next_rid = 0
|
||||||
|
@ -68,10 +61,10 @@ def get_argparser(with_file=True):
|
||||||
description="Local experiment running tool")
|
description="Local experiment running tool")
|
||||||
|
|
||||||
verbosity_args(parser)
|
verbosity_args(parser)
|
||||||
parser.add_argument("-d", "--ddb", default="ddb.pyon",
|
parser.add_argument("--device-db", default="device_db.pyon",
|
||||||
help="device database file")
|
help="device database file (default: '%(default)s')")
|
||||||
parser.add_argument("-p", "--pdb", default="pdb.pyon",
|
parser.add_argument("--dataset-db", default="dataset_db.pyon",
|
||||||
help="parameter database file")
|
help="dataset file (default: '%(default)s')")
|
||||||
|
|
||||||
parser.add_argument("-e", "--experiment", default=None,
|
parser.add_argument("-e", "--experiment", default=None,
|
||||||
help="experiment to run")
|
help="experiment to run")
|
||||||
|
@ -87,7 +80,7 @@ def get_argparser(with_file=True):
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def _build_experiment(dmgr, pdb, rdb, args):
|
def _build_experiment(device_mgr, dataset_mgr, args):
|
||||||
if hasattr(args, "file"):
|
if hasattr(args, "file"):
|
||||||
if args.file.endswith(".elf"):
|
if args.file.endswith(".elf"):
|
||||||
if args.arguments:
|
if args.arguments:
|
||||||
|
@ -95,7 +88,7 @@ def _build_experiment(dmgr, pdb, rdb, args):
|
||||||
if args.experiment:
|
if args.experiment:
|
||||||
raise ValueError("experiment-by-name not supported "
|
raise ValueError("experiment-by-name not supported "
|
||||||
"for ELF kernels")
|
"for ELF kernels")
|
||||||
return ELFRunner(dmgr, pdb, rdb, file=args.file)
|
return ELFRunner(device_mgr, dataset_mgr, file=args.file)
|
||||||
else:
|
else:
|
||||||
module = file_import(args.file, prefix="artiq_run_")
|
module = file_import(args.file, prefix="artiq_run_")
|
||||||
file = args.file
|
file = args.file
|
||||||
|
@ -109,22 +102,21 @@ def _build_experiment(dmgr, pdb, rdb, args):
|
||||||
"experiment": args.experiment,
|
"experiment": args.experiment,
|
||||||
"arguments": arguments
|
"arguments": arguments
|
||||||
}
|
}
|
||||||
dmgr.virtual_devices["scheduler"].expid = expid
|
device_mgr.virtual_devices["scheduler"].expid = expid
|
||||||
return exp(dmgr, pdb, rdb, **arguments)
|
return exp(device_mgr, dataset_mgr, **arguments)
|
||||||
|
|
||||||
|
|
||||||
def run(with_file=False):
|
def run(with_file=False):
|
||||||
args = get_argparser(with_file).parse_args()
|
args = get_argparser(with_file).parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
|
|
||||||
dmgr = DeviceManager(FlatFileDB(args.ddb),
|
device_mgr = DeviceManager(DeviceDB(args.device_db),
|
||||||
virtual_devices={"scheduler": DummyScheduler()})
|
virtual_devices={"scheduler": DummyScheduler()})
|
||||||
pdb = FlatFileDB(args.pdb)
|
dataset_db = DatasetDB(args.dataset_db)
|
||||||
pdb.hooks.append(SimpleParamLogger())
|
dataset_mgr = DatasetManager(dataset_db)
|
||||||
rdb = ResultDB()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
exp_inst = _build_experiment(dmgr, pdb, rdb, args)
|
exp_inst = _build_experiment(device_mgr, dataset_mgr, args)
|
||||||
exp_inst.prepare()
|
exp_inst.prepare()
|
||||||
exp_inst.run()
|
exp_inst.run()
|
||||||
exp_inst.analyze()
|
exp_inst.analyze()
|
||||||
|
@ -132,15 +124,15 @@ def run(with_file=False):
|
||||||
print(error.render_string(colored=True), file=sys.stderr)
|
print(error.render_string(colored=True), file=sys.stderr)
|
||||||
return
|
return
|
||||||
finally:
|
finally:
|
||||||
dmgr.close_devices()
|
device_mgr.close_devices()
|
||||||
|
|
||||||
if args.hdf5 is not None:
|
if args.hdf5 is not None:
|
||||||
with h5py.File(args.hdf5, "w") as f:
|
with h5py.File(args.hdf5, "w") as f:
|
||||||
rdb.write_hdf5(f)
|
dataset_mgr.write_hdf5(f)
|
||||||
elif rdb.rt.read or rdb.nrt:
|
else:
|
||||||
r = chain(rdb.rt.read.items(), rdb.nrt.items())
|
for k, v in sorted(dataset_mgr.local.items(), key=itemgetter(0)):
|
||||||
for k, v in sorted(r, key=itemgetter(0)):
|
|
||||||
print("{}: {}".format(k, v))
|
print("{}: {}".format(k, v))
|
||||||
|
dataset_db.save()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
|
@ -36,13 +36,13 @@ class _AD9xxx(Module):
|
||||||
ftws = [Signal(32) for i in range(nchannels)]
|
ftws = [Signal(32) for i in range(nchannels)]
|
||||||
for c, ftw in enumerate(ftws):
|
for c, ftw in enumerate(ftws):
|
||||||
if flen(pads.d) == 8:
|
if flen(pads.d) == 8:
|
||||||
self.sync.rio += \
|
self.sync.rio_phy += \
|
||||||
If(selected(c), [
|
If(selected(c), [
|
||||||
If(current_address == ftw_base+i,
|
If(current_address == ftw_base+i,
|
||||||
ftw[i*8:(i+1)*8].eq(current_data))
|
ftw[i*8:(i+1)*8].eq(current_data))
|
||||||
for i in range(4)])
|
for i in range(4)])
|
||||||
elif flen(pads.d) == 16:
|
elif flen(pads.d) == 16:
|
||||||
self.sync.rio += \
|
self.sync.rio_phy += \
|
||||||
If(selected(c), [
|
If(selected(c), [
|
||||||
If(current_address == ftw_base+2*i,
|
If(current_address == ftw_base+2*i,
|
||||||
ftw[i*16:(i+1)*16].eq(current_data))
|
ftw[i*16:(i+1)*16].eq(current_data))
|
||||||
|
@ -51,7 +51,7 @@ class _AD9xxx(Module):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
# FTW to probe on FUD
|
# FTW to probe on FUD
|
||||||
self.sync.rio += If(current_address == 2**flen(pads.a), [
|
self.sync.rio_phy += If(current_address == 2**flen(pads.a), [
|
||||||
If(selected(c), probe.eq(ftw))
|
If(selected(c), probe.eq(ftw))
|
||||||
for c, (probe, ftw) in enumerate(zip(self.probes, ftws))])
|
for c, (probe, ftw) in enumerate(zip(self.probes, ftws))])
|
||||||
|
|
||||||
|
|
|
@ -5,19 +5,19 @@ _help = """
|
||||||
This is an interactive Python console.
|
This is an interactive Python console.
|
||||||
|
|
||||||
The following functions are available:
|
The following functions are available:
|
||||||
get_parameter(key)
|
get_dataset(key)
|
||||||
set_parameter(key, value) [asynchronous update]
|
set_dataset(key, value, persist=False) [asynchronous update]
|
||||||
get_result(key) [real-time results only]
|
del_dataset(key) [asynchronous update]
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class ConsoleDock(dockarea.Dock):
|
class ConsoleDock(dockarea.Dock):
|
||||||
def __init__(self, get_parameter, set_parameter, get_result):
|
def __init__(self, get_dataset, set_dataset, del_dataset):
|
||||||
dockarea.Dock.__init__(self, "Console", size=(1000, 300))
|
dockarea.Dock.__init__(self, "Console", size=(1000, 300))
|
||||||
ns = {
|
ns = {
|
||||||
"get_parameter": get_parameter,
|
"get_dataset": get_dataset,
|
||||||
"set_parameter": set_parameter,
|
"set_dataset": set_dataset,
|
||||||
"get_result": get_result
|
"del_dataset": del_dataset
|
||||||
}
|
}
|
||||||
c = console.ConsoleWidget(namespace=ns, text=_help)
|
c = console.ConsoleWidget(namespace=ns, text=_help)
|
||||||
self.addWidget(c)
|
self.addWidget(c)
|
||||||
|
|
|
@ -8,16 +8,17 @@ from pyqtgraph import dockarea
|
||||||
from pyqtgraph import LayoutWidget
|
from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.gui.tools import DictSyncModel, short_format
|
from artiq.tools import short_format
|
||||||
|
from artiq.gui.tools import DictSyncModel
|
||||||
from artiq.gui.displays import *
|
from artiq.gui.displays import *
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ResultsModel(DictSyncModel):
|
class DatasetsModel(DictSyncModel):
|
||||||
def __init__(self, parent, init):
|
def __init__(self, parent, init):
|
||||||
DictSyncModel.__init__(self, ["Result", "Value"],
|
DictSyncModel.__init__(self, ["Dataset", "Persistent", "Value"],
|
||||||
parent, init)
|
parent, init)
|
||||||
|
|
||||||
def sort_key(self, k, v):
|
def sort_key(self, k, v):
|
||||||
|
@ -27,7 +28,9 @@ class ResultsModel(DictSyncModel):
|
||||||
if column == 0:
|
if column == 0:
|
||||||
return k
|
return k
|
||||||
elif column == 1:
|
elif column == 1:
|
||||||
return short_format(v)
|
return "Y" if v[0] else "N"
|
||||||
|
elif column == 2:
|
||||||
|
return short_format(v[1])
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
|
@ -38,23 +41,28 @@ def _get_display_type_name(display_cls):
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
class ResultsDock(dockarea.Dock):
|
class DatasetsDock(dockarea.Dock):
|
||||||
def __init__(self, dialog_parent, dock_area):
|
def __init__(self, dialog_parent, dock_area):
|
||||||
dockarea.Dock.__init__(self, "Results", size=(1500, 500))
|
dockarea.Dock.__init__(self, "Datasets", size=(1500, 500))
|
||||||
self.dialog_parent = dialog_parent
|
self.dialog_parent = dialog_parent
|
||||||
self.dock_area = dock_area
|
self.dock_area = dock_area
|
||||||
|
|
||||||
grid = LayoutWidget()
|
grid = LayoutWidget()
|
||||||
self.addWidget(grid)
|
self.addWidget(grid)
|
||||||
|
|
||||||
|
self.search = QtGui.QLineEdit()
|
||||||
|
self.search.setPlaceholderText("search...")
|
||||||
|
self.search.editingFinished.connect(self._search_datasets)
|
||||||
|
grid.addWidget(self.search, 0, )
|
||||||
|
|
||||||
self.table = QtGui.QTableView()
|
self.table = QtGui.QTableView()
|
||||||
self.table.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
self.table.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
||||||
self.table.horizontalHeader().setResizeMode(
|
self.table.horizontalHeader().setResizeMode(
|
||||||
QtGui.QHeaderView.ResizeToContents)
|
QtGui.QHeaderView.ResizeToContents)
|
||||||
grid.addWidget(self.table, 0, 0)
|
grid.addWidget(self.table, 1, 0)
|
||||||
|
|
||||||
add_display_box = QtGui.QGroupBox("Add display")
|
add_display_box = QtGui.QGroupBox("Add display")
|
||||||
grid.addWidget(add_display_box, 0, 1)
|
grid.addWidget(add_display_box, 1, 1)
|
||||||
display_grid = QtGui.QGridLayout()
|
display_grid = QtGui.QGridLayout()
|
||||||
add_display_box.setLayout(display_grid)
|
add_display_box.setLayout(display_grid)
|
||||||
|
|
||||||
|
@ -65,24 +73,37 @@ class ResultsDock(dockarea.Dock):
|
||||||
|
|
||||||
self.displays = dict()
|
self.displays = dict()
|
||||||
|
|
||||||
def get_result(self, key):
|
def _search_datasets(self):
|
||||||
return self.table_model.backing_store[key]
|
model = self.table_model
|
||||||
|
search = self.search.displayText()
|
||||||
|
for row in range(model.rowCount(model.index(0, 0))):
|
||||||
|
index = model.index(row, 0)
|
||||||
|
dataset = model.data(index, QtCore.Qt.DisplayRole)
|
||||||
|
if search in dataset:
|
||||||
|
self.table.showRow(row)
|
||||||
|
else:
|
||||||
|
self.table.hideRow(row)
|
||||||
|
|
||||||
@asyncio.coroutine
|
def get_dataset(self, key):
|
||||||
def sub_connect(self, host, port):
|
return self.table_model.backing_store[key][1]
|
||||||
self.subscriber = Subscriber("rt_results", self.init_results_model,
|
|
||||||
|
async def sub_connect(self, host, port):
|
||||||
|
self.subscriber = Subscriber("datasets", self.init_datasets_model,
|
||||||
self.on_mod)
|
self.on_mod)
|
||||||
yield from self.subscriber.connect(host, port)
|
await self.subscriber.connect(host, port)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def sub_close(self):
|
||||||
def sub_close(self):
|
await self.subscriber.close()
|
||||||
yield from self.subscriber.close()
|
|
||||||
|
|
||||||
def init_results_model(self, init):
|
def init_datasets_model(self, init):
|
||||||
self.table_model = ResultsModel(self.table, init)
|
self.table_model = DatasetsModel(self.table, init)
|
||||||
self.table.setModel(self.table_model)
|
self.table.setModel(self.table_model)
|
||||||
return self.table_model
|
return self.table_model
|
||||||
|
|
||||||
|
def update_display_data(self, dsp):
|
||||||
|
dsp.update_data({k: self.table_model.backing_store[k][1]
|
||||||
|
for k in dsp.data_sources()})
|
||||||
|
|
||||||
def on_mod(self, mod):
|
def on_mod(self, mod):
|
||||||
if mod["action"] == "init":
|
if mod["action"] == "init":
|
||||||
for display in self.displays.values():
|
for display in self.displays.values():
|
||||||
|
@ -98,7 +119,7 @@ class ResultsDock(dockarea.Dock):
|
||||||
|
|
||||||
for display in self.displays.values():
|
for display in self.displays.values():
|
||||||
if source in display.data_sources():
|
if source in display.data_sources():
|
||||||
display.update_data(self.table_model.backing_store)
|
self.update_display_data(display)
|
||||||
|
|
||||||
def create_dialog(self, ty):
|
def create_dialog(self, ty):
|
||||||
dlg_class = display_types[ty][0]
|
dlg_class = display_types[ty][0]
|
||||||
|
@ -113,7 +134,7 @@ class ResultsDock(dockarea.Dock):
|
||||||
dsp_class = display_types[ty][1]
|
dsp_class = display_types[ty][1]
|
||||||
dsp = dsp_class(name, settings)
|
dsp = dsp_class(name, settings)
|
||||||
self.displays[name] = dsp
|
self.displays[name] = dsp
|
||||||
dsp.update_data(self.table_model.backing_store)
|
self.update_display_data(dsp)
|
||||||
|
|
||||||
def on_close():
|
def on_close():
|
||||||
del self.displays[name]
|
del self.displays[name]
|
|
@ -6,7 +6,7 @@ from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.gui.tools import DictSyncModel
|
from artiq.gui.tools import si_prefix, DictSyncModel
|
||||||
from artiq.gui.scan import ScanController
|
from artiq.gui.scan import ScanController
|
||||||
|
|
||||||
|
|
||||||
|
@ -74,26 +74,28 @@ class _EnumerationEntry(QtGui.QComboBox):
|
||||||
class _NumberEntry(QtGui.QDoubleSpinBox):
|
class _NumberEntry(QtGui.QDoubleSpinBox):
|
||||||
def __init__(self, procdesc):
|
def __init__(self, procdesc):
|
||||||
QtGui.QDoubleSpinBox.__init__(self)
|
QtGui.QDoubleSpinBox.__init__(self)
|
||||||
|
self.scale = procdesc["scale"]
|
||||||
self.setDecimals(procdesc["ndecimals"])
|
self.setDecimals(procdesc["ndecimals"])
|
||||||
self.setSingleStep(procdesc["step"])
|
self.setSingleStep(procdesc["step"]/self.scale)
|
||||||
if procdesc["min"] is not None:
|
if procdesc["min"] is not None:
|
||||||
self.setMinimum(procdesc["min"])
|
self.setMinimum(procdesc["min"]/self.scale)
|
||||||
else:
|
else:
|
||||||
self.setMinimum(float("-inf"))
|
self.setMinimum(float("-inf"))
|
||||||
if procdesc["max"] is not None:
|
if procdesc["max"] is not None:
|
||||||
self.setMaximum(procdesc["max"])
|
self.setMaximum(procdesc["max"]/self.scale)
|
||||||
else:
|
else:
|
||||||
self.setMaximum(float("inf"))
|
self.setMaximum(float("inf"))
|
||||||
if procdesc["unit"]:
|
suffix = si_prefix(self.scale) + procdesc["unit"]
|
||||||
self.setSuffix(" " + procdesc["unit"])
|
if suffix:
|
||||||
|
self.setSuffix(" " + suffix)
|
||||||
if "default" in procdesc:
|
if "default" in procdesc:
|
||||||
self.set_argument_value(procdesc["default"])
|
self.set_argument_value(procdesc["default"])
|
||||||
|
|
||||||
def get_argument_value(self):
|
def get_argument_value(self):
|
||||||
return self.value()
|
return self.value()*self.scale
|
||||||
|
|
||||||
def set_argument_value(self, value):
|
def set_argument_value(self, value):
|
||||||
self.setValue(value)
|
self.setValue(value/self.scale)
|
||||||
|
|
||||||
|
|
||||||
class _StringEntry(QtGui.QLineEdit):
|
class _StringEntry(QtGui.QLineEdit):
|
||||||
|
@ -300,23 +302,20 @@ class ExplorerDock(dockarea.Dock):
|
||||||
def enable_duedate(self):
|
def enable_duedate(self):
|
||||||
self.datetime_en.setChecked(True)
|
self.datetime_en.setChecked(True)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def sub_connect(self, host, port):
|
||||||
def sub_connect(self, host, port):
|
|
||||||
self.explist_subscriber = Subscriber("explist",
|
self.explist_subscriber = Subscriber("explist",
|
||||||
self.init_explist_model)
|
self.init_explist_model)
|
||||||
yield from self.explist_subscriber.connect(host, port)
|
await self.explist_subscriber.connect(host, port)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def sub_close(self):
|
||||||
def sub_close(self):
|
await self.explist_subscriber.close()
|
||||||
yield from self.explist_subscriber.close()
|
|
||||||
|
|
||||||
def init_explist_model(self, init):
|
def init_explist_model(self, init):
|
||||||
self.explist_model = _ExplistModel(self, self.el, init)
|
self.explist_model = _ExplistModel(self, self.el, init)
|
||||||
self.el.setModel(self.explist_model)
|
self.el.setModel(self.explist_model)
|
||||||
return self.explist_model
|
return self.explist_model
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def submit(self, pipeline_name, file, class_name, arguments,
|
||||||
def submit(self, pipeline_name, file, class_name, arguments,
|
|
||||||
priority, due_date, flush):
|
priority, due_date, flush):
|
||||||
expid = {
|
expid = {
|
||||||
"repo_rev": None,
|
"repo_rev": None,
|
||||||
|
@ -324,8 +323,8 @@ class ExplorerDock(dockarea.Dock):
|
||||||
"class_name": class_name,
|
"class_name": class_name,
|
||||||
"arguments": arguments,
|
"arguments": arguments,
|
||||||
}
|
}
|
||||||
rid = yield from self.schedule_ctl.submit(pipeline_name, expid,
|
rid = await self.schedule_ctl.submit(pipeline_name, expid,
|
||||||
priority, due_date, flush)
|
priority, due_date, flush)
|
||||||
self.status_bar.showMessage("Submitted RID {}".format(rid))
|
self.status_bar.showMessage("Submitted RID {}".format(rid))
|
||||||
|
|
||||||
def submit_clicked(self):
|
def submit_clicked(self):
|
||||||
|
@ -338,7 +337,10 @@ class ExplorerDock(dockarea.Dock):
|
||||||
arguments = self.argeditor.get_argument_values(True)
|
arguments = self.argeditor.get_argument_values(True)
|
||||||
if arguments is None:
|
if arguments is None:
|
||||||
return
|
return
|
||||||
asyncio.async(self.submit(self.pipeline.text(),
|
asyncio.ensure_future(self.submit(self.pipeline.text(),
|
||||||
expinfo["file"], expinfo["class_name"],
|
expinfo["file"],
|
||||||
arguments, self.priority.value(),
|
expinfo["class_name"],
|
||||||
due_date, self.flush.isChecked()))
|
arguments,
|
||||||
|
self.priority.value(),
|
||||||
|
due_date,
|
||||||
|
self.flush.isChecked()))
|
||||||
|
|
|
@ -41,14 +41,12 @@ class LogDock(dockarea.Dock):
|
||||||
self.addWidget(self.log)
|
self.addWidget(self.log)
|
||||||
self.scroll_at_bottom = False
|
self.scroll_at_bottom = False
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def sub_connect(self, host, port):
|
||||||
def sub_connect(self, host, port):
|
|
||||||
self.subscriber = Subscriber("log", self.init_log_model)
|
self.subscriber = Subscriber("log", self.init_log_model)
|
||||||
yield from self.subscriber.connect(host, port)
|
await self.subscriber.connect(host, port)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def sub_close(self):
|
||||||
def sub_close(self):
|
await self.subscriber.close()
|
||||||
yield from self.subscriber.close()
|
|
||||||
|
|
||||||
def rows_inserted_before(self):
|
def rows_inserted_before(self):
|
||||||
scrollbar = self.log.verticalScrollBar()
|
scrollbar = self.log.verticalScrollBar()
|
||||||
|
|
|
@ -23,9 +23,9 @@ _mode_enc = {
|
||||||
|
|
||||||
|
|
||||||
class _TTLWidget(QtGui.QFrame):
|
class _TTLWidget(QtGui.QFrame):
|
||||||
def __init__(self, send_to_device, channel, force_out, title):
|
def __init__(self, channel, send_to_device, force_out, title):
|
||||||
self.send_to_device = send_to_device
|
|
||||||
self.channel = channel
|
self.channel = channel
|
||||||
|
self.send_to_device = send_to_device
|
||||||
self.force_out = force_out
|
self.force_out = force_out
|
||||||
|
|
||||||
QtGui.QFrame.__init__(self)
|
QtGui.QFrame.__init__(self)
|
||||||
|
@ -119,7 +119,8 @@ class _TTLWidget(QtGui.QFrame):
|
||||||
|
|
||||||
|
|
||||||
class _DDSWidget(QtGui.QFrame):
|
class _DDSWidget(QtGui.QFrame):
|
||||||
def __init__(self, sysclk, title):
|
def __init__(self, channel, sysclk, title):
|
||||||
|
self.channel = channel
|
||||||
self.sysclk = sysclk
|
self.sysclk = sysclk
|
||||||
|
|
||||||
QtGui.QFrame.__init__(self)
|
QtGui.QFrame.__init__(self)
|
||||||
|
@ -163,9 +164,11 @@ class _DeviceManager:
|
||||||
self[k] = v
|
self[k] = v
|
||||||
|
|
||||||
def __setitem__(self, k, v):
|
def __setitem__(self, k, v):
|
||||||
self.ddb[k] = v
|
|
||||||
if k in self.ttl_widgets:
|
if k in self.ttl_widgets:
|
||||||
del self[k]
|
del self[k]
|
||||||
|
if k in self.dds_widgets:
|
||||||
|
del self[k]
|
||||||
|
self.ddb[k] = v
|
||||||
if not isinstance(v, dict):
|
if not isinstance(v, dict):
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
|
@ -176,14 +179,15 @@ class _DeviceManager:
|
||||||
if v["module"] == "artiq.coredevice.ttl":
|
if v["module"] == "artiq.coredevice.ttl":
|
||||||
channel = v["arguments"]["channel"]
|
channel = v["arguments"]["channel"]
|
||||||
force_out = v["class"] == "TTLOut"
|
force_out = v["class"] == "TTLOut"
|
||||||
self.ttl_widgets[channel] = _TTLWidget(
|
self.ttl_widgets[k] = _TTLWidget(
|
||||||
self.send_to_device, channel, force_out, title)
|
channel, self.send_to_device, force_out, title)
|
||||||
self.ttl_cb()
|
self.ttl_cb()
|
||||||
if (v["module"] == "artiq.coredevice.dds"
|
if (v["module"] == "artiq.coredevice.dds"
|
||||||
and v["class"] in {"AD9858", "AD9914"}):
|
and v["class"] in {"AD9858", "AD9914"}):
|
||||||
channel = v["arguments"]["channel"]
|
channel = v["arguments"]["channel"]
|
||||||
sysclk = v["arguments"]["sysclk"]
|
sysclk = v["arguments"]["sysclk"]
|
||||||
self.dds_widgets[channel] = _DDSWidget(sysclk, title)
|
self.dds_widgets[channel] = _DDSWidget(
|
||||||
|
channel, sysclk, title)
|
||||||
self.dds_cb()
|
self.dds_cb()
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
@ -191,8 +195,13 @@ class _DeviceManager:
|
||||||
def __delitem__(self, k):
|
def __delitem__(self, k):
|
||||||
del self.ddb[k]
|
del self.ddb[k]
|
||||||
if k in self.ttl_widgets:
|
if k in self.ttl_widgets:
|
||||||
|
self.ttl_widgets[k].deleteLater()
|
||||||
del self.ttl_widgets[k]
|
del self.ttl_widgets[k]
|
||||||
self.ttl_cb()
|
self.ttl_cb()
|
||||||
|
if k in self.dds_widgets:
|
||||||
|
self.dds_widgets[k].deleteLater()
|
||||||
|
del self.dds_widgets[k]
|
||||||
|
self.dds_cb()
|
||||||
|
|
||||||
def get_core_addr(self):
|
def get_core_addr(self):
|
||||||
try:
|
try:
|
||||||
|
@ -232,26 +241,24 @@ class MonInj(TaskObject):
|
||||||
self.dm = _DeviceManager(self.send_to_device, dict())
|
self.dm = _DeviceManager(self.send_to_device, dict())
|
||||||
self.transport = None
|
self.transport = None
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def start(self, server, port):
|
||||||
def start(self, server, port):
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
yield from loop.create_datagram_endpoint(lambda: self,
|
await loop.create_datagram_endpoint(lambda: self,
|
||||||
family=socket.AF_INET)
|
family=socket.AF_INET)
|
||||||
try:
|
try:
|
||||||
yield from self.subscriber.connect(server, port)
|
await self.subscriber.connect(server, port)
|
||||||
try:
|
try:
|
||||||
TaskObject.start(self)
|
TaskObject.start(self)
|
||||||
except:
|
except:
|
||||||
yield from self.subscriber.close()
|
await self.subscriber.close()
|
||||||
raise
|
raise
|
||||||
except:
|
except:
|
||||||
self.transport.close()
|
self.transport.close()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def stop(self):
|
||||||
def stop(self):
|
await TaskObject.stop(self)
|
||||||
yield from TaskObject.stop(self)
|
await self.subscriber.close()
|
||||||
yield from self.subscriber.close()
|
|
||||||
if self.transport is not None:
|
if self.transport is not None:
|
||||||
self.transport.close()
|
self.transport.close()
|
||||||
self.transport = None
|
self.transport = None
|
||||||
|
@ -263,16 +270,17 @@ class MonInj(TaskObject):
|
||||||
try:
|
try:
|
||||||
ttl_levels, ttl_oes, ttl_overrides = \
|
ttl_levels, ttl_oes, ttl_overrides = \
|
||||||
struct.unpack(">QQQ", data[:8*3])
|
struct.unpack(">QQQ", data[:8*3])
|
||||||
for channel, w in self.dm.ttl_widgets.items():
|
for w in self.dm.ttl_widgets.values():
|
||||||
|
channel = w.channel
|
||||||
w.set_value(ttl_levels & (1 << channel),
|
w.set_value(ttl_levels & (1 << channel),
|
||||||
ttl_oes & (1 << channel),
|
ttl_oes & (1 << channel),
|
||||||
ttl_overrides & (1 << channel))
|
ttl_overrides & (1 << channel))
|
||||||
dds_data = data[8*3:]
|
dds_data = data[8*3:]
|
||||||
ndds = len(dds_data)//4
|
ndds = len(dds_data)//4
|
||||||
ftws = struct.unpack(">" + "I"*ndds, dds_data)
|
ftws = struct.unpack(">" + "I"*ndds, dds_data)
|
||||||
for channel, w in self.dm.dds_widgets.items():
|
for w in self.dm.dds_widgets.values():
|
||||||
try:
|
try:
|
||||||
ftw = ftws[channel]
|
ftw = ftws[w.channel]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
@ -295,10 +303,9 @@ class MonInj(TaskObject):
|
||||||
else:
|
else:
|
||||||
self.transport.sendto(data, (ca, 3250))
|
self.transport.sendto(data, (ca, 3250))
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
while True:
|
while True:
|
||||||
yield from asyncio.sleep(0.2)
|
await asyncio.sleep(0.2)
|
||||||
# MONINJ_REQ_MONITOR
|
# MONINJ_REQ_MONITOR
|
||||||
self.send_to_device(b"\x01")
|
self.send_to_device(b"\x01")
|
||||||
|
|
||||||
|
|
|
@ -1,74 +0,0 @@
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from quamash import QtGui, QtCore
|
|
||||||
from pyqtgraph import dockarea
|
|
||||||
from pyqtgraph import LayoutWidget
|
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
|
||||||
from artiq.gui.tools import DictSyncModel, short_format
|
|
||||||
|
|
||||||
|
|
||||||
class ParametersModel(DictSyncModel):
|
|
||||||
def __init__(self, parent, init):
|
|
||||||
DictSyncModel.__init__(self, ["Parameter", "Value"],
|
|
||||||
parent, init)
|
|
||||||
|
|
||||||
def sort_key(self, k, v):
|
|
||||||
return k
|
|
||||||
|
|
||||||
def convert(self, k, v, column):
|
|
||||||
if column == 0:
|
|
||||||
return k
|
|
||||||
elif column == 1:
|
|
||||||
return short_format(v)
|
|
||||||
else:
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
|
|
||||||
class ParametersDock(dockarea.Dock):
|
|
||||||
def __init__(self):
|
|
||||||
dockarea.Dock.__init__(self, "Parameters", size=(400, 300))
|
|
||||||
|
|
||||||
grid = LayoutWidget()
|
|
||||||
self.addWidget(grid)
|
|
||||||
|
|
||||||
self.search = QtGui.QLineEdit()
|
|
||||||
self.search.setPlaceholderText("search...")
|
|
||||||
self.search.editingFinished.connect(self._search_parameters)
|
|
||||||
grid.addWidget(self.search, 0, 0)
|
|
||||||
|
|
||||||
self.table = QtGui.QTableView()
|
|
||||||
self.table.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
|
||||||
self.table.horizontalHeader().setResizeMode(
|
|
||||||
QtGui.QHeaderView.ResizeToContents)
|
|
||||||
grid.addWidget(self.table, 1, 0)
|
|
||||||
|
|
||||||
def get_parameter(self, key):
|
|
||||||
return self.table_model.backing_store[key]
|
|
||||||
|
|
||||||
def _search_parameters(self):
|
|
||||||
model = self.table.model()
|
|
||||||
parentIndex = model.index(0, 0)
|
|
||||||
numRows = model.rowCount(parentIndex)
|
|
||||||
|
|
||||||
for row in range(numRows):
|
|
||||||
index = model.index(row, 0)
|
|
||||||
parameter = model.data(index, QtCore.Qt.DisplayRole)
|
|
||||||
if parameter.startswith(self.search.displayText()):
|
|
||||||
self.table.showRow(row)
|
|
||||||
else:
|
|
||||||
self.table.hideRow(row)
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def sub_connect(self, host, port):
|
|
||||||
self.subscriber = Subscriber("parameters", self.init_parameters_model)
|
|
||||||
yield from self.subscriber.connect(host, port)
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def sub_close(self):
|
|
||||||
yield from self.subscriber.close()
|
|
||||||
|
|
||||||
def init_parameters_model(self, init):
|
|
||||||
self.table_model = ParametersModel(self.table, init)
|
|
||||||
self.table.setModel(self.table_model)
|
|
||||||
return self.table_model
|
|
|
@ -1,25 +1,28 @@
|
||||||
from quamash import QtGui
|
from quamash import QtGui
|
||||||
from pyqtgraph import LayoutWidget
|
from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
|
from artiq.gui.tools import si_prefix
|
||||||
|
|
||||||
|
|
||||||
class _Range(LayoutWidget):
|
class _Range(LayoutWidget):
|
||||||
def __init__(self, global_min, global_max, global_step, unit, ndecimals):
|
def __init__(self, global_min, global_max, global_step, suffix, scale, ndecimals):
|
||||||
LayoutWidget.__init__(self)
|
LayoutWidget.__init__(self)
|
||||||
|
|
||||||
|
self.scale = scale
|
||||||
def apply_properties(spinbox):
|
def apply_properties(spinbox):
|
||||||
spinbox.setDecimals(ndecimals)
|
spinbox.setDecimals(ndecimals)
|
||||||
if global_min is not None:
|
if global_min is not None:
|
||||||
spinbox.setMinimum(global_min)
|
spinbox.setMinimum(global_min/self.scale)
|
||||||
else:
|
else:
|
||||||
spinbox.setMinimum(float("-inf"))
|
spinbox.setMinimum(float("-inf"))
|
||||||
if global_max is not None:
|
if global_max is not None:
|
||||||
spinbox.setMaximum(global_max)
|
spinbox.setMaximum(global_max/self.scale)
|
||||||
else:
|
else:
|
||||||
spinbox.setMaximum(float("inf"))
|
spinbox.setMaximum(float("inf"))
|
||||||
if global_step is not None:
|
if global_step is not None:
|
||||||
spinbox.setSingleStep(global_step)
|
spinbox.setSingleStep(global_step/self.scale)
|
||||||
if unit:
|
if suffix:
|
||||||
spinbox.setSuffix(" " + unit)
|
spinbox.setSuffix(" " + suffix)
|
||||||
|
|
||||||
self.addWidget(QtGui.QLabel("Min:"), 0, 0)
|
self.addWidget(QtGui.QLabel("Min:"), 0, 0)
|
||||||
self.min = QtGui.QDoubleSpinBox()
|
self.min = QtGui.QDoubleSpinBox()
|
||||||
|
@ -38,8 +41,8 @@ class _Range(LayoutWidget):
|
||||||
self.addWidget(self.npoints, 0, 5)
|
self.addWidget(self.npoints, 0, 5)
|
||||||
|
|
||||||
def set_values(self, min, max, npoints):
|
def set_values(self, min, max, npoints):
|
||||||
self.min.setValue(min)
|
self.min.setValue(min/self.scale)
|
||||||
self.max.setValue(max)
|
self.max.setValue(max/self.scale)
|
||||||
self.npoints.setValue(npoints)
|
self.npoints.setValue(npoints)
|
||||||
|
|
||||||
def get_values(self):
|
def get_values(self):
|
||||||
|
@ -48,8 +51,8 @@ class _Range(LayoutWidget):
|
||||||
if min > max:
|
if min > max:
|
||||||
raise ValueError("Minimum scan boundary must be less than maximum")
|
raise ValueError("Minimum scan boundary must be less than maximum")
|
||||||
return {
|
return {
|
||||||
"min": min,
|
"min": min*self.scale,
|
||||||
"max": max,
|
"max": max*self.scale,
|
||||||
"npoints": self.npoints.value()
|
"npoints": self.npoints.value()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -61,33 +64,35 @@ class ScanController(LayoutWidget):
|
||||||
self.stack = QtGui.QStackedWidget()
|
self.stack = QtGui.QStackedWidget()
|
||||||
self.addWidget(self.stack, 1, 0, colspan=4)
|
self.addWidget(self.stack, 1, 0, colspan=4)
|
||||||
|
|
||||||
|
self.scale = procdesc["scale"]
|
||||||
|
|
||||||
gmin, gmax = procdesc["global_min"], procdesc["global_max"]
|
gmin, gmax = procdesc["global_min"], procdesc["global_max"]
|
||||||
gstep = procdesc["global_step"]
|
gstep = procdesc["global_step"]
|
||||||
unit = procdesc["unit"]
|
suffix = si_prefix(self.scale) + procdesc["unit"]
|
||||||
ndecimals = procdesc["ndecimals"]
|
ndecimals = procdesc["ndecimals"]
|
||||||
|
|
||||||
self.v_noscan = QtGui.QDoubleSpinBox()
|
self.v_noscan = QtGui.QDoubleSpinBox()
|
||||||
self.v_noscan.setDecimals(ndecimals)
|
self.v_noscan.setDecimals(ndecimals)
|
||||||
if gmin is not None:
|
if gmin is not None:
|
||||||
self.v_noscan.setMinimum(gmin)
|
self.v_noscan.setMinimum(gmin/self.scale)
|
||||||
else:
|
else:
|
||||||
self.v_noscan.setMinimum(float("-inf"))
|
self.v_noscan.setMinimum(float("-inf"))
|
||||||
if gmax is not None:
|
if gmax is not None:
|
||||||
self.v_noscan.setMaximum(gmax)
|
self.v_noscan.setMaximum(gmax/self.scale)
|
||||||
else:
|
else:
|
||||||
self.v_noscan.setMaximum(float("inf"))
|
self.v_noscan.setMaximum(float("inf"))
|
||||||
self.v_noscan.setSingleStep(gstep)
|
self.v_noscan.setSingleStep(gstep/self.scale)
|
||||||
if unit:
|
if suffix:
|
||||||
self.v_noscan.setSuffix(" " + unit)
|
self.v_noscan.setSuffix(" " + suffix)
|
||||||
self.v_noscan_gr = LayoutWidget()
|
self.v_noscan_gr = LayoutWidget()
|
||||||
self.v_noscan_gr.addWidget(QtGui.QLabel("Value:"), 0, 0)
|
self.v_noscan_gr.addWidget(QtGui.QLabel("Value:"), 0, 0)
|
||||||
self.v_noscan_gr.addWidget(self.v_noscan, 0, 1)
|
self.v_noscan_gr.addWidget(self.v_noscan, 0, 1)
|
||||||
self.stack.addWidget(self.v_noscan_gr)
|
self.stack.addWidget(self.v_noscan_gr)
|
||||||
|
|
||||||
self.v_linear = _Range(gmin, gmax, gstep, unit, ndecimals)
|
self.v_linear = _Range(gmin, gmax, gstep, suffix, self.scale, ndecimals)
|
||||||
self.stack.addWidget(self.v_linear)
|
self.stack.addWidget(self.v_linear)
|
||||||
|
|
||||||
self.v_random = _Range(gmin, gmax, gstep, unit, ndecimals)
|
self.v_random = _Range(gmin, gmax, gstep, suffix, self.scale, ndecimals)
|
||||||
self.stack.addWidget(self.v_random)
|
self.stack.addWidget(self.v_random)
|
||||||
|
|
||||||
self.v_explicit = QtGui.QLineEdit()
|
self.v_explicit = QtGui.QLineEdit()
|
||||||
|
@ -124,7 +129,7 @@ class ScanController(LayoutWidget):
|
||||||
|
|
||||||
def get_argument_value(self):
|
def get_argument_value(self):
|
||||||
if self.noscan.isChecked():
|
if self.noscan.isChecked():
|
||||||
return {"ty": "NoScan", "value": self.v_noscan.value()}
|
return {"ty": "NoScan", "value": self.v_noscan.value()*self.scale}
|
||||||
elif self.linear.isChecked():
|
elif self.linear.isChecked():
|
||||||
d = {"ty": "LinearScan"}
|
d = {"ty": "LinearScan"}
|
||||||
d.update(self.v_linear.get_values())
|
d.update(self.v_linear.get_values())
|
||||||
|
@ -140,7 +145,7 @@ class ScanController(LayoutWidget):
|
||||||
def set_argument_value(self, d):
|
def set_argument_value(self, d):
|
||||||
if d["ty"] == "NoScan":
|
if d["ty"] == "NoScan":
|
||||||
self.noscan.setChecked(True)
|
self.noscan.setChecked(True)
|
||||||
self.v_noscan.setValue(d["value"])
|
self.v_noscan.setValue(d["value"]/self.scale)
|
||||||
elif d["ty"] == "LinearScan":
|
elif d["ty"] == "LinearScan":
|
||||||
self.linear.setChecked(True)
|
self.linear.setChecked(True)
|
||||||
self.v_linear.set_values(d["min"], d["max"], d["npoints"])
|
self.v_linear.set_values(d["min"], d["max"], d["npoints"])
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import time
|
import time
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
from quamash import QtGui, QtCore
|
from quamash import QtGui, QtCore
|
||||||
from pyqtgraph import dockarea
|
from pyqtgraph import dockarea
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.gui.tools import elide, DictSyncModel
|
from artiq.gui.tools import DictSyncModel
|
||||||
|
from artiq.tools import elide
|
||||||
|
|
||||||
|
|
||||||
class _ScheduleModel(DictSyncModel):
|
class _ScheduleModel(DictSyncModel):
|
||||||
|
@ -71,32 +73,36 @@ class ScheduleDock(dockarea.Dock):
|
||||||
self.addWidget(self.table)
|
self.addWidget(self.table)
|
||||||
|
|
||||||
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
||||||
|
request_termination_action = QtGui.QAction("Request termination", self.table)
|
||||||
|
request_termination_action.triggered.connect(partial(self.delete_clicked, True))
|
||||||
|
self.table.addAction(request_termination_action)
|
||||||
delete_action = QtGui.QAction("Delete", self.table)
|
delete_action = QtGui.QAction("Delete", self.table)
|
||||||
delete_action.triggered.connect(self.delete_clicked)
|
delete_action.triggered.connect(partial(self.delete_clicked, False))
|
||||||
self.table.addAction(delete_action)
|
self.table.addAction(delete_action)
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def sub_connect(self, host, port):
|
|
||||||
self.subscriber = Subscriber("schedule", self.init_schedule_model)
|
|
||||||
yield from self.subscriber.connect(host, port)
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def sub_connect(self, host, port):
|
||||||
def sub_close(self):
|
self.subscriber = Subscriber("schedule", self.init_schedule_model)
|
||||||
yield from self.subscriber.close()
|
await self.subscriber.connect(host, port)
|
||||||
|
|
||||||
|
async def sub_close(self):
|
||||||
|
await self.subscriber.close()
|
||||||
|
|
||||||
def init_schedule_model(self, init):
|
def init_schedule_model(self, init):
|
||||||
self.table_model = _ScheduleModel(self.table, init)
|
self.table_model = _ScheduleModel(self.table, init)
|
||||||
self.table.setModel(self.table_model)
|
self.table.setModel(self.table_model)
|
||||||
return self.table_model
|
return self.table_model
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def delete(self, rid, graceful):
|
||||||
def delete(self, rid):
|
if graceful:
|
||||||
yield from self.schedule_ctl.delete(rid)
|
await self.schedule_ctl.request_termination(rid)
|
||||||
|
else:
|
||||||
|
await self.schedule_ctl.delete(rid)
|
||||||
|
|
||||||
def delete_clicked(self):
|
def delete_clicked(self, graceful):
|
||||||
idx = self.table.selectedIndexes()
|
idx = self.table.selectedIndexes()
|
||||||
if idx:
|
if idx:
|
||||||
row = idx[0].row()
|
row = idx[0].row()
|
||||||
rid = self.table_model.row_to_key[row]
|
rid = self.table_model.row_to_key[row]
|
||||||
self.status_bar.showMessage("Deleted RID {}".format(rid))
|
self.status_bar.showMessage("Deleted RID {}".format(rid))
|
||||||
asyncio.async(self.delete(rid))
|
asyncio.ensure_future(self.delete(rid, graceful))
|
||||||
|
|
|
@ -69,11 +69,10 @@ class StateManager(TaskObject):
|
||||||
exc_info=True)
|
exc_info=True)
|
||||||
pyon.store_file(self.filename, data)
|
pyon.store_file(self.filename, data)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
yield from asyncio.sleep(self.autosave_period)
|
await asyncio.sleep(self.autosave_period)
|
||||||
self.save()
|
self.save()
|
||||||
finally:
|
finally:
|
||||||
self.save()
|
self.save()
|
||||||
|
|
|
@ -2,39 +2,21 @@ from quamash import QtCore
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
def elide(s, maxlen):
|
def si_prefix(scale):
|
||||||
elided = False
|
|
||||||
if len(s) > maxlen:
|
|
||||||
s = s[:maxlen]
|
|
||||||
elided = True
|
|
||||||
try:
|
try:
|
||||||
idx = s.index("\n")
|
return {
|
||||||
except ValueError:
|
1e-12: "p",
|
||||||
pass
|
1e-9: "n",
|
||||||
else:
|
1e-6: "u",
|
||||||
s = s[:idx]
|
1e-3: "m",
|
||||||
elided = True
|
1.0: "",
|
||||||
if elided:
|
1e3: "k",
|
||||||
maxlen -= 3
|
1e6: "M",
|
||||||
if len(s) > maxlen:
|
1e9: "G",
|
||||||
s = s[:maxlen]
|
1e12: "T"
|
||||||
s += "..."
|
}[scale]
|
||||||
return s
|
except KeyError:
|
||||||
|
return "[x{}]".format(scale)
|
||||||
|
|
||||||
def short_format(v):
|
|
||||||
if v is None:
|
|
||||||
return "None"
|
|
||||||
t = type(v)
|
|
||||||
if np.issubdtype(t, int) or np.issubdtype(t, float):
|
|
||||||
return str(v)
|
|
||||||
elif t is str:
|
|
||||||
return "\"" + elide(v, 15) + "\""
|
|
||||||
else:
|
|
||||||
r = t.__name__
|
|
||||||
if t is list or t is dict or t is set:
|
|
||||||
r += " ({})".format(len(v))
|
|
||||||
return r
|
|
||||||
|
|
||||||
|
|
||||||
class _SyncSubstruct:
|
class _SyncSubstruct:
|
||||||
|
|
|
@ -13,7 +13,8 @@ from artiq.coredevice.runtime import source_loader
|
||||||
__all__ = ["host_int", "int",
|
__all__ = ["host_int", "int",
|
||||||
"kernel", "portable", "syscall",
|
"kernel", "portable", "syscall",
|
||||||
"set_time_manager", "set_watchdog_factory",
|
"set_time_manager", "set_watchdog_factory",
|
||||||
"ARTIQException"]
|
"ARTIQException",
|
||||||
|
"TerminationRequested"]
|
||||||
|
|
||||||
# global namespace for kernels
|
# global namespace for kernels
|
||||||
kernel_globals = (
|
kernel_globals = (
|
||||||
|
@ -350,6 +351,11 @@ def watchdog(timeout):
|
||||||
return _watchdog_factory(timeout)
|
return _watchdog_factory(timeout)
|
||||||
|
|
||||||
|
|
||||||
|
class TerminationRequested(Exception):
|
||||||
|
"""Raised by ``pause`` when the user has requested termination."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ARTIQException(Exception):
|
class ARTIQException(Exception):
|
||||||
"""Base class for exceptions raised or passed through the core device."""
|
"""Base class for exceptions raised or passed through the core device."""
|
||||||
|
|
||||||
|
|
|
@ -73,16 +73,22 @@ class NumberValue(_SimpleArgProcessor):
|
||||||
|
|
||||||
:param unit: A string representing the unit of the value, for user
|
:param unit: A string representing the unit of the value, for user
|
||||||
interface (UI) purposes.
|
interface (UI) purposes.
|
||||||
|
:param scale: The scale of value for UI purposes. The corresponding SI
|
||||||
|
prefix is shown in front of the unit, and the displayed value is
|
||||||
|
divided by the scale.
|
||||||
:param step: The step with which the value should be modified by up/down
|
:param step: The step with which the value should be modified by up/down
|
||||||
buttons in a UI.
|
buttons in a UI. The default is the scale divided by 10.
|
||||||
:param min: The minimum value of the argument.
|
:param min: The minimum value of the argument.
|
||||||
:param max: The maximum value of the argument.
|
:param max: The maximum value of the argument.
|
||||||
:param ndecimals: The number of decimals a UI should use.
|
:param ndecimals: The number of decimals a UI should use.
|
||||||
"""
|
"""
|
||||||
def __init__(self, default=NoDefault, unit="", step=1.0,
|
def __init__(self, default=NoDefault, unit="", scale=1.0,
|
||||||
min=None, max=None, ndecimals=2):
|
step=None, min=None, max=None, ndecimals=2):
|
||||||
|
if step is None:
|
||||||
|
step = scale/10.0
|
||||||
_SimpleArgProcessor.__init__(self, default)
|
_SimpleArgProcessor.__init__(self, default)
|
||||||
self.unit = unit
|
self.unit = unit
|
||||||
|
self.scale = scale
|
||||||
self.step = step
|
self.step = step
|
||||||
self.min = min
|
self.min = min
|
||||||
self.max = max
|
self.max = max
|
||||||
|
@ -91,6 +97,7 @@ class NumberValue(_SimpleArgProcessor):
|
||||||
def describe(self):
|
def describe(self):
|
||||||
d = _SimpleArgProcessor.describe(self)
|
d = _SimpleArgProcessor.describe(self)
|
||||||
d["unit"] = self.unit
|
d["unit"] = self.unit
|
||||||
|
d["scale"] = self.scale
|
||||||
d["step"] = self.step
|
d["step"] = self.step
|
||||||
d["min"] = self.min
|
d["min"] = self.min
|
||||||
d["max"] = self.max
|
d["max"] = self.max
|
||||||
|
@ -106,15 +113,13 @@ class StringValue(_SimpleArgProcessor):
|
||||||
class HasEnvironment:
|
class HasEnvironment:
|
||||||
"""Provides methods to manage the environment of an experiment (devices,
|
"""Provides methods to manage the environment of an experiment (devices,
|
||||||
parameters, results, arguments)."""
|
parameters, results, arguments)."""
|
||||||
def __init__(self, dmgr=None, pdb=None, rdb=None, *, parent=None,
|
def __init__(self, device_mgr=None, dataset_mgr=None, *, parent=None,
|
||||||
param_override=dict(), default_arg_none=False, **kwargs):
|
default_arg_none=False, **kwargs):
|
||||||
self.requested_args = OrderedDict()
|
self.requested_args = OrderedDict()
|
||||||
|
|
||||||
self.__dmgr = dmgr
|
self.__device_mgr = device_mgr
|
||||||
self.__pdb = pdb
|
self.__dataset_mgr = dataset_mgr
|
||||||
self.__rdb = rdb
|
|
||||||
self.__parent = parent
|
self.__parent = parent
|
||||||
self.__param_override = param_override
|
|
||||||
self.__default_arg_none = default_arg_none
|
self.__default_arg_none = default_arg_none
|
||||||
|
|
||||||
self.__kwargs = kwargs
|
self.__kwargs = kwargs
|
||||||
|
@ -136,17 +141,16 @@ class HasEnvironment:
|
||||||
are set to ``None``."""
|
are set to ``None``."""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def dbs(self):
|
def managers(self):
|
||||||
"""Returns the device manager, the parameter database and the result
|
"""Returns the device manager and the dataset manager, in this order.
|
||||||
database, in this order.
|
|
||||||
|
|
||||||
This is the same order that the constructor takes them, allowing
|
This is the same order that the constructor takes them, allowing
|
||||||
sub-objects to be created with this idiom to pass the environment
|
sub-objects to be created with this idiom to pass the environment
|
||||||
around: ::
|
around: ::
|
||||||
|
|
||||||
sub_object = SomeLibrary(*self.dbs())
|
sub_object = SomeLibrary(*self.managers())
|
||||||
"""
|
"""
|
||||||
return self.__dmgr, self.__pdb, self.__rdb
|
return self.__device_mgr, self.__dataset_mgr
|
||||||
|
|
||||||
def get_argument(self, key, processor=None, group=None):
|
def get_argument(self, key, processor=None, group=None):
|
||||||
"""Retrieves and returns the value of an argument.
|
"""Retrieves and returns the value of an argument.
|
||||||
|
@ -177,94 +181,54 @@ class HasEnvironment:
|
||||||
raise
|
raise
|
||||||
return processor.process(argval)
|
return processor.process(argval)
|
||||||
|
|
||||||
def attr_argument(self, key, processor=None, group=None):
|
def setattr_argument(self, key, processor=None, group=None):
|
||||||
"""Sets an argument as attribute. The names of the argument and of the
|
"""Sets an argument as attribute. The names of the argument and of the
|
||||||
attribute are the same."""
|
attribute are the same."""
|
||||||
setattr(self, key, self.get_argument(key, processor, group))
|
setattr(self, key, self.get_argument(key, processor, group))
|
||||||
|
|
||||||
|
def get_device_db(self):
|
||||||
|
"""Returns the full contents of the device database."""
|
||||||
|
if self.__parent is not None:
|
||||||
|
return self.__parent.get_device_db()
|
||||||
|
return self.__device_mgr.get_device_db()
|
||||||
|
|
||||||
def get_device(self, key):
|
def get_device(self, key):
|
||||||
"""Creates and returns a device driver."""
|
"""Creates and returns a device driver."""
|
||||||
if self.__parent is not None:
|
if self.__parent is not None:
|
||||||
return self.__parent.get_device(key)
|
return self.__parent.get_device(key)
|
||||||
if self.__dmgr is None:
|
if self.__device_mgr is None:
|
||||||
raise ValueError("Device manager not present")
|
raise ValueError("Device manager not present")
|
||||||
return self.__dmgr.get(key)
|
return self.__device_mgr.get(key)
|
||||||
|
|
||||||
def attr_device(self, key):
|
def setattr_device(self, key):
|
||||||
"""Sets a device driver as attribute. The names of the device driver
|
"""Sets a device driver as attribute. The names of the device driver
|
||||||
and of the attribute are the same."""
|
and of the attribute are the same."""
|
||||||
setattr(self, key, self.get_device(key))
|
setattr(self, key, self.get_device(key))
|
||||||
|
|
||||||
def get_parameter(self, key, default=NoDefault):
|
def set_dataset(self, key, value,
|
||||||
"""Retrieves and returns a parameter."""
|
broadcast=False, persist=False, save=True):
|
||||||
if self.__parent is not None and key not in self.__param_override:
|
if self.__parent is not None:
|
||||||
return self.__parent.get_parameter(key, default)
|
self.__parent.set_dataset(key, value, broadcast, persist, save)
|
||||||
if self.__pdb is None:
|
return
|
||||||
raise ValueError("Parameter database not present")
|
if self.__dataset_mgr is None:
|
||||||
if key in self.__param_override:
|
raise ValueError("Dataset manager not present")
|
||||||
return self.__param_override[key]
|
return self.__dataset_mgr.set(key, value, broadcast, persist, save)
|
||||||
|
|
||||||
|
def get_dataset(self, key, default=NoDefault):
|
||||||
|
if self.__parent is not None:
|
||||||
|
return self.__parent.get_dataset(key, default)
|
||||||
|
if self.__dataset_mgr is None:
|
||||||
|
raise ValueError("Dataset manager not present")
|
||||||
try:
|
try:
|
||||||
return self.__pdb.get(key)
|
return self.__dataset_mgr.get(key)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
if default is not NoDefault:
|
if default is NoDefault:
|
||||||
return default
|
|
||||||
else:
|
|
||||||
raise
|
raise
|
||||||
|
else:
|
||||||
|
return default
|
||||||
|
|
||||||
def attr_parameter(self, key, default=NoDefault):
|
def setattr_dataset(self, key, default=NoDefault):
|
||||||
"""Sets a parameter as attribute. The names of the argument and of the
|
setattr(self, key, self.get_dataset(key, default))
|
||||||
parameter are the same."""
|
|
||||||
setattr(self, key, self.get_parameter(key, default))
|
|
||||||
|
|
||||||
def set_parameter(self, key, value):
|
|
||||||
"""Writes the value of a parameter into the parameter database."""
|
|
||||||
if self.__parent is not None:
|
|
||||||
self.__parent.set_parameter(key, value)
|
|
||||||
return
|
|
||||||
if self.__pdb is None:
|
|
||||||
raise ValueError("Parameter database not present")
|
|
||||||
self.__pdb.set(key, value)
|
|
||||||
|
|
||||||
def set_result(self, key, value, realtime=False, store=True):
|
|
||||||
"""Writes the value of a result.
|
|
||||||
|
|
||||||
:param realtime: Marks the result as real-time, making it immediately
|
|
||||||
available to clients such as the user interface. Returns a
|
|
||||||
``Notifier`` instance that can be used to modify mutable results
|
|
||||||
(such as lists) and synchronize the modifications with the clients.
|
|
||||||
:param store: Defines if the result should be stored permanently,
|
|
||||||
e.g. in HDF5 output. Default is to store.
|
|
||||||
"""
|
|
||||||
if self.__parent is not None:
|
|
||||||
self.__parent.set_result(key, value, realtime, store)
|
|
||||||
return
|
|
||||||
if self.__rdb is None:
|
|
||||||
raise ValueError("Result database not present")
|
|
||||||
if realtime:
|
|
||||||
if key in self.__rdb.nrt:
|
|
||||||
raise ValueError("Result is already non-realtime")
|
|
||||||
self.__rdb.rt[key] = value
|
|
||||||
notifier = self.__rdb.rt[key]
|
|
||||||
notifier.kernel_attr_init = False
|
|
||||||
self.__rdb.set_store(key, store)
|
|
||||||
return notifier
|
|
||||||
else:
|
|
||||||
if key in self.__rdb.rt.read:
|
|
||||||
raise ValueError("Result is already realtime")
|
|
||||||
self.__rdb.nrt[key] = value
|
|
||||||
self.__rdb.set_store(key, store)
|
|
||||||
|
|
||||||
def get_result(self, key):
|
|
||||||
"""Retrieves the value of a result.
|
|
||||||
|
|
||||||
There is no difference between real-time and non-real-time results
|
|
||||||
(this function does not return ``Notifier`` instances).
|
|
||||||
"""
|
|
||||||
if self.__parent is not None:
|
|
||||||
return self.__parent.get_result(key)
|
|
||||||
if self.__rdb is None:
|
|
||||||
raise ValueError("Result database not present")
|
|
||||||
return self.__rdb.get(key)
|
|
||||||
|
|
||||||
|
|
||||||
class Experiment:
|
class Experiment:
|
||||||
|
|
|
@ -47,6 +47,9 @@ class NoScan(ScanObject):
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self._gen()
|
return self._gen()
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return 1
|
||||||
|
|
||||||
def describe(self):
|
def describe(self):
|
||||||
return {"ty": "NoScan", "value": self.value}
|
return {"ty": "NoScan", "value": self.value}
|
||||||
|
|
||||||
|
@ -70,6 +73,9 @@ class LinearScan(ScanObject):
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return self._gen()
|
return self._gen()
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return self.npoints
|
||||||
|
|
||||||
def describe(self):
|
def describe(self):
|
||||||
return {"ty": "LinearScan",
|
return {"ty": "LinearScan",
|
||||||
"min": self.min, "max": self.max, "npoints": self.npoints}
|
"min": self.min, "max": self.max, "npoints": self.npoints}
|
||||||
|
@ -79,6 +85,9 @@ class RandomScan(ScanObject):
|
||||||
"""A scan object that yields a fixed number of randomly ordered evenly
|
"""A scan object that yields a fixed number of randomly ordered evenly
|
||||||
spaced values in a range."""
|
spaced values in a range."""
|
||||||
def __init__(self, min, max, npoints, seed=0):
|
def __init__(self, min, max, npoints, seed=0):
|
||||||
|
self.min = min
|
||||||
|
self.max = max
|
||||||
|
self.npoints = npoints
|
||||||
self.sequence = list(LinearScan(min, max, npoints))
|
self.sequence = list(LinearScan(min, max, npoints))
|
||||||
shuffle(self.sequence, Random(seed).random)
|
shuffle(self.sequence, Random(seed).random)
|
||||||
|
|
||||||
|
@ -86,6 +95,9 @@ class RandomScan(ScanObject):
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter(self.sequence)
|
return iter(self.sequence)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return self.npoints
|
||||||
|
|
||||||
def describe(self):
|
def describe(self):
|
||||||
return {"ty": "RandomScan",
|
return {"ty": "RandomScan",
|
||||||
"min": self.min, "max": self.max, "npoints": self.npoints}
|
"min": self.min, "max": self.max, "npoints": self.npoints}
|
||||||
|
@ -100,6 +112,9 @@ class ExplicitScan(ScanObject):
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter(self.sequence)
|
return iter(self.sequence)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.sequence)
|
||||||
|
|
||||||
def describe(self):
|
def describe(self):
|
||||||
return {"ty": "ExplicitScan", "sequence": self.sequence}
|
return {"ty": "ExplicitScan", "sequence": self.sequence}
|
||||||
|
|
||||||
|
@ -121,17 +136,24 @@ class Scannable:
|
||||||
range of its input widgets.
|
range of its input widgets.
|
||||||
:param global_max: Same as global_min, but for the maximum value.
|
:param global_max: Same as global_min, but for the maximum value.
|
||||||
:param global_step: The step with which the value should be modified by
|
:param global_step: The step with which the value should be modified by
|
||||||
up/down buttons in a user interface.
|
up/down buttons in a user interface. The default is the scale divided
|
||||||
|
by 10.
|
||||||
:param unit: A string representing the unit of the scanned variable, for user
|
:param unit: A string representing the unit of the scanned variable, for user
|
||||||
interface (UI) purposes.
|
interface (UI) purposes.
|
||||||
|
:param scale: The scale of value for UI purposes. The corresponding SI
|
||||||
|
prefix is shown in front of the unit, and the displayed value is
|
||||||
|
divided by the scale.
|
||||||
:param ndecimals: The number of decimals a UI should use.
|
:param ndecimals: The number of decimals a UI should use.
|
||||||
"""
|
"""
|
||||||
def __init__(self, default=NoDefault, unit="",
|
def __init__(self, default=NoDefault, unit="", scale=1.0,
|
||||||
global_step=1.0, global_min=None, global_max=None,
|
global_step=None, global_min=None, global_max=None,
|
||||||
ndecimals=2):
|
ndecimals=2):
|
||||||
|
if global_step is None:
|
||||||
|
global_step = scale/10.0
|
||||||
if default is not NoDefault:
|
if default is not NoDefault:
|
||||||
self.default_value = default
|
self.default_value = default
|
||||||
self.unit = unit
|
self.unit = unit
|
||||||
|
self.scale = scale
|
||||||
self.global_step = global_step
|
self.global_step = global_step
|
||||||
self.global_min = global_min
|
self.global_min = global_min
|
||||||
self.global_max = global_max
|
self.global_max = global_max
|
||||||
|
@ -155,6 +177,7 @@ class Scannable:
|
||||||
if hasattr(self, "default_value"):
|
if hasattr(self, "default_value"):
|
||||||
d["default"] = self.default_value.describe()
|
d["default"] = self.default_value.describe()
|
||||||
d["unit"] = self.unit
|
d["unit"] = self.unit
|
||||||
|
d["scale"] = self.scale
|
||||||
d["global_step"] = self.global_step
|
d["global_step"] = self.global_step
|
||||||
d["global_min"] = self.global_min
|
d["global_min"] = self.global_min
|
||||||
d["global_max"] = self.global_max
|
d["global_max"] = self.global_max
|
||||||
|
|
|
@ -0,0 +1,62 @@
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from artiq.protocols.sync_struct import Notifier, process_mod
|
||||||
|
from artiq.protocols import pyon
|
||||||
|
from artiq.tools import TaskObject
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceDB:
|
||||||
|
def __init__(self, backing_file):
|
||||||
|
self.backing_file = backing_file
|
||||||
|
self.data = Notifier(pyon.load_file(self.backing_file))
|
||||||
|
|
||||||
|
def scan(self):
|
||||||
|
new_data = pyon.load_file(self.backing_file)
|
||||||
|
|
||||||
|
for k in list(self.data.read.keys()):
|
||||||
|
if k not in new_data:
|
||||||
|
del self.data[k]
|
||||||
|
for k in new_data.keys():
|
||||||
|
if k not in self.data.read or self.data.read[k] != new_data[k]:
|
||||||
|
self.data[k] = new_data[k]
|
||||||
|
|
||||||
|
def get_device_db(self):
|
||||||
|
return self.data.read
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
return self.data.read[key]
|
||||||
|
|
||||||
|
|
||||||
|
class DatasetDB(TaskObject):
|
||||||
|
def __init__(self, persist_file, autosave_period=30):
|
||||||
|
self.persist_file = persist_file
|
||||||
|
self.autosave_period = autosave_period
|
||||||
|
|
||||||
|
file_data = pyon.load_file(self.persist_file)
|
||||||
|
self.data = Notifier({k: (True, v) for k, v in file_data.items()})
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
data = {k: v[1] for k, v in self.data.read.items() if v[0]}
|
||||||
|
pyon.store_file(self.persist_file, data)
|
||||||
|
|
||||||
|
async def _do(self):
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
await asyncio.sleep(self.autosave_period)
|
||||||
|
self.save()
|
||||||
|
finally:
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
return self.data.read[key][1]
|
||||||
|
|
||||||
|
def update(self, mod):
|
||||||
|
process_mod(self.data, mod)
|
||||||
|
|
||||||
|
# convenience functions (update() can be used instead)
|
||||||
|
def set(self, key, value, persist=False):
|
||||||
|
self.data[key] = (persist, value)
|
||||||
|
|
||||||
|
def delete(self, key):
|
||||||
|
del self.data[key]
|
||||||
|
#
|
|
@ -12,17 +12,19 @@ from artiq.tools import exc_to_warning
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _scan_experiments(wd, get_device_db, log):
|
||||||
def _scan_experiments(wd, log):
|
|
||||||
r = dict()
|
r = dict()
|
||||||
for f in os.listdir(wd):
|
for f in os.listdir(wd):
|
||||||
if f.endswith(".py"):
|
if f.endswith(".py"):
|
||||||
try:
|
try:
|
||||||
worker = Worker({"log": lambda message: log("scan", message)})
|
worker = Worker({
|
||||||
|
"get_device_db": get_device_db,
|
||||||
|
"log": lambda message: log("scan", message)
|
||||||
|
})
|
||||||
try:
|
try:
|
||||||
description = yield from worker.examine(os.path.join(wd, f))
|
description = await worker.examine(os.path.join(wd, f))
|
||||||
finally:
|
finally:
|
||||||
yield from worker.close()
|
await worker.close()
|
||||||
for class_name, class_desc in description.items():
|
for class_name, class_desc in description.items():
|
||||||
name = class_desc["name"]
|
name = class_desc["name"]
|
||||||
arguments = class_desc["arguments"]
|
arguments = class_desc["arguments"]
|
||||||
|
@ -54,8 +56,9 @@ def _sync_explist(target, source):
|
||||||
|
|
||||||
|
|
||||||
class Repository:
|
class Repository:
|
||||||
def __init__(self, backend, log_fn):
|
def __init__(self, backend, get_device_db_fn, log_fn):
|
||||||
self.backend = backend
|
self.backend = backend
|
||||||
|
self.get_device_db_fn = get_device_db_fn
|
||||||
self.log_fn = log_fn
|
self.log_fn = log_fn
|
||||||
|
|
||||||
self.cur_rev = self.backend.get_head_rev()
|
self.cur_rev = self.backend.get_head_rev()
|
||||||
|
@ -68,8 +71,7 @@ class Repository:
|
||||||
# The object cannot be used anymore after calling this method.
|
# The object cannot be used anymore after calling this method.
|
||||||
self.backend.release_rev(self.cur_rev)
|
self.backend.release_rev(self.cur_rev)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def scan(self, new_cur_rev=None):
|
||||||
def scan(self, new_cur_rev=None):
|
|
||||||
if self._scanning:
|
if self._scanning:
|
||||||
return
|
return
|
||||||
self._scanning = True
|
self._scanning = True
|
||||||
|
@ -79,14 +81,15 @@ class Repository:
|
||||||
wd, _ = self.backend.request_rev(new_cur_rev)
|
wd, _ = self.backend.request_rev(new_cur_rev)
|
||||||
self.backend.release_rev(self.cur_rev)
|
self.backend.release_rev(self.cur_rev)
|
||||||
self.cur_rev = new_cur_rev
|
self.cur_rev = new_cur_rev
|
||||||
new_explist = yield from _scan_experiments(wd, self.log_fn)
|
new_explist = await _scan_experiments(wd, self.get_device_db_fn,
|
||||||
|
self.log_fn)
|
||||||
|
|
||||||
_sync_explist(self.explist, new_explist)
|
_sync_explist(self.explist, new_explist)
|
||||||
finally:
|
finally:
|
||||||
self._scanning = False
|
self._scanning = False
|
||||||
|
|
||||||
def scan_async(self, new_cur_rev=None):
|
def scan_async(self, new_cur_rev=None):
|
||||||
asyncio.async(exc_to_warning(self.scan(new_cur_rev)))
|
asyncio.ensure_future(exc_to_warning(self.scan(new_cur_rev)))
|
||||||
|
|
||||||
|
|
||||||
class FilesystemBackend:
|
class FilesystemBackend:
|
||||||
|
|
|
@ -24,13 +24,12 @@ class RunStatus(Enum):
|
||||||
|
|
||||||
|
|
||||||
def _mk_worker_method(name):
|
def _mk_worker_method(name):
|
||||||
@asyncio.coroutine
|
async def worker_method(self, *args, **kwargs):
|
||||||
def worker_method(self, *args, **kwargs):
|
|
||||||
if self.worker.closed.is_set():
|
if self.worker.closed.is_set():
|
||||||
return True
|
return True
|
||||||
m = getattr(self.worker, name)
|
m = getattr(self.worker, name)
|
||||||
try:
|
try:
|
||||||
return (yield from m(*args, **kwargs))
|
return await m(*args, **kwargs)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if isinstance(e, asyncio.CancelledError):
|
if isinstance(e, asyncio.CancelledError):
|
||||||
raise
|
raise
|
||||||
|
@ -58,6 +57,7 @@ class Run:
|
||||||
self.flush = flush
|
self.flush = flush
|
||||||
|
|
||||||
self.worker = Worker(pool.worker_handlers)
|
self.worker = Worker(pool.worker_handlers)
|
||||||
|
self.termination_requested = False
|
||||||
|
|
||||||
self._status = RunStatus.pending
|
self._status = RunStatus.pending
|
||||||
|
|
||||||
|
@ -97,19 +97,17 @@ class Run:
|
||||||
runnable = 1
|
runnable = 1
|
||||||
return (runnable, self.priority, due_date_k, -self.rid)
|
return (runnable, self.priority, due_date_k, -self.rid)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def close(self):
|
||||||
def close(self):
|
|
||||||
# called through pool
|
# called through pool
|
||||||
yield from self.worker.close()
|
await self.worker.close()
|
||||||
del self._notifier[self.rid]
|
del self._notifier[self.rid]
|
||||||
|
|
||||||
_build = _mk_worker_method("build")
|
_build = _mk_worker_method("build")
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def build(self):
|
||||||
def build(self):
|
await self._build(self.rid, self.pipeline_name,
|
||||||
yield from self._build(self.rid, self.pipeline_name,
|
self.wd, self.expid,
|
||||||
self.wd, self.expid,
|
self.priority)
|
||||||
self.priority)
|
|
||||||
|
|
||||||
prepare = _mk_worker_method("prepare")
|
prepare = _mk_worker_method("prepare")
|
||||||
run = _mk_worker_method("run")
|
run = _mk_worker_method("run")
|
||||||
|
@ -154,13 +152,12 @@ class RunPool:
|
||||||
self.state_changed.notify()
|
self.state_changed.notify()
|
||||||
return rid
|
return rid
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def delete(self, rid):
|
||||||
def delete(self, rid):
|
|
||||||
# called through deleter
|
# called through deleter
|
||||||
if rid not in self.runs:
|
if rid not in self.runs:
|
||||||
return
|
return
|
||||||
run = self.runs[rid]
|
run = self.runs[rid]
|
||||||
yield from run.close()
|
await run.close()
|
||||||
if "repo_rev" in run.expid:
|
if "repo_rev" in run.expid:
|
||||||
self.repo_backend.release_rev(run.expid["repo_rev"])
|
self.repo_backend.release_rev(run.expid["repo_rev"])
|
||||||
del self.runs[rid]
|
del self.runs[rid]
|
||||||
|
@ -203,14 +200,13 @@ class PrepareStage(TaskObject):
|
||||||
else:
|
else:
|
||||||
return candidate.due_date - now
|
return candidate.due_date - now
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
while True:
|
while True:
|
||||||
run = self._get_run()
|
run = self._get_run()
|
||||||
if run is None:
|
if run is None:
|
||||||
yield from self.pool.state_changed.wait()
|
await self.pool.state_changed.wait()
|
||||||
elif isinstance(run, float):
|
elif isinstance(run, float):
|
||||||
yield from asyncio_wait_or_cancel([self.pool.state_changed.wait()],
|
await asyncio_wait_or_cancel([self.pool.state_changed.wait()],
|
||||||
timeout=run)
|
timeout=run)
|
||||||
else:
|
else:
|
||||||
if run.flush:
|
if run.flush:
|
||||||
|
@ -221,7 +217,7 @@ class PrepareStage(TaskObject):
|
||||||
for r in self.pool.runs.values()):
|
for r in self.pool.runs.values()):
|
||||||
ev = [self.pool.state_changed.wait(),
|
ev = [self.pool.state_changed.wait(),
|
||||||
run.worker.closed.wait()]
|
run.worker.closed.wait()]
|
||||||
yield from asyncio_wait_or_cancel(
|
await asyncio_wait_or_cancel(
|
||||||
ev, return_when=asyncio.FIRST_COMPLETED)
|
ev, return_when=asyncio.FIRST_COMPLETED)
|
||||||
if run.worker.closed.is_set():
|
if run.worker.closed.is_set():
|
||||||
break
|
break
|
||||||
|
@ -229,8 +225,8 @@ class PrepareStage(TaskObject):
|
||||||
continue
|
continue
|
||||||
run.status = RunStatus.preparing
|
run.status = RunStatus.preparing
|
||||||
try:
|
try:
|
||||||
yield from run.build()
|
await run.build()
|
||||||
yield from run.prepare()
|
await run.prepare()
|
||||||
except:
|
except:
|
||||||
logger.warning("got worker exception in prepare stage, "
|
logger.warning("got worker exception in prepare stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
|
@ -255,8 +251,7 @@ class RunStage(TaskObject):
|
||||||
r = None
|
r = None
|
||||||
return r
|
return r
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
stack = []
|
stack = []
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -265,7 +260,7 @@ class RunStage(TaskObject):
|
||||||
next_irun is not None and
|
next_irun is not None and
|
||||||
next_irun.priority_key() > stack[-1].priority_key()):
|
next_irun.priority_key() > stack[-1].priority_key()):
|
||||||
while next_irun is None:
|
while next_irun is None:
|
||||||
yield from self.pool.state_changed.wait()
|
await self.pool.state_changed.wait()
|
||||||
next_irun = self._get_run()
|
next_irun = self._get_run()
|
||||||
stack.append(next_irun)
|
stack.append(next_irun)
|
||||||
|
|
||||||
|
@ -273,10 +268,15 @@ class RunStage(TaskObject):
|
||||||
try:
|
try:
|
||||||
if run.status == RunStatus.paused:
|
if run.status == RunStatus.paused:
|
||||||
run.status = RunStatus.running
|
run.status = RunStatus.running
|
||||||
completed = yield from run.resume()
|
# clear "termination requested" flag now
|
||||||
|
# so that if it is set again during the resume, this
|
||||||
|
# results in another exception.
|
||||||
|
request_termination = run.termination_requested
|
||||||
|
run.termination_requested = False
|
||||||
|
completed = await run.resume(request_termination)
|
||||||
else:
|
else:
|
||||||
run.status = RunStatus.running
|
run.status = RunStatus.running
|
||||||
completed = yield from run.run()
|
completed = await run.run()
|
||||||
except:
|
except:
|
||||||
logger.warning("got worker exception in run stage, "
|
logger.warning("got worker exception in run stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
|
@ -305,17 +305,16 @@ class AnalyzeStage(TaskObject):
|
||||||
r = None
|
r = None
|
||||||
return r
|
return r
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
while True:
|
while True:
|
||||||
run = self._get_run()
|
run = self._get_run()
|
||||||
while run is None:
|
while run is None:
|
||||||
yield from self.pool.state_changed.wait()
|
await self.pool.state_changed.wait()
|
||||||
run = self._get_run()
|
run = self._get_run()
|
||||||
run.status = RunStatus.analyzing
|
run.status = RunStatus.analyzing
|
||||||
try:
|
try:
|
||||||
yield from run.analyze()
|
await run.analyze()
|
||||||
yield from run.write_results()
|
await run.write_results()
|
||||||
except:
|
except:
|
||||||
logger.warning("got worker exception in analyze stage, "
|
logger.warning("got worker exception in analyze stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
|
@ -337,18 +336,17 @@ class Pipeline:
|
||||||
self._run.start()
|
self._run.start()
|
||||||
self._analyze.start()
|
self._analyze.start()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def stop(self):
|
||||||
def stop(self):
|
|
||||||
# NB: restart of a stopped pipeline is not supported
|
# NB: restart of a stopped pipeline is not supported
|
||||||
yield from self._analyze.stop()
|
await self._analyze.stop()
|
||||||
yield from self._run.stop()
|
await self._run.stop()
|
||||||
yield from self._prepare.stop()
|
await self._prepare.stop()
|
||||||
|
|
||||||
|
|
||||||
class Deleter(TaskObject):
|
class Deleter(TaskObject):
|
||||||
def __init__(self, pipelines):
|
def __init__(self, pipelines):
|
||||||
self._pipelines = pipelines
|
self._pipelines = pipelines
|
||||||
self._queue = asyncio.JoinableQueue()
|
self._queue = asyncio.Queue()
|
||||||
|
|
||||||
def delete(self, rid):
|
def delete(self, rid):
|
||||||
logger.debug("delete request for RID %d", rid)
|
logger.debug("delete request for RID %d", rid)
|
||||||
|
@ -358,36 +356,32 @@ class Deleter(TaskObject):
|
||||||
break
|
break
|
||||||
self._queue.put_nowait(rid)
|
self._queue.put_nowait(rid)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def join(self):
|
||||||
def join(self):
|
await self._queue.join()
|
||||||
yield from self._queue.join()
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _delete(self, rid):
|
||||||
def _delete(self, rid):
|
|
||||||
for pipeline in self._pipelines.values():
|
for pipeline in self._pipelines.values():
|
||||||
if rid in pipeline.pool.runs:
|
if rid in pipeline.pool.runs:
|
||||||
logger.debug("deleting RID %d...", rid)
|
logger.debug("deleting RID %d...", rid)
|
||||||
yield from pipeline.pool.delete(rid)
|
await pipeline.pool.delete(rid)
|
||||||
logger.debug("deletion of RID %d completed", rid)
|
logger.debug("deletion of RID %d completed", rid)
|
||||||
break
|
break
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _gc_pipelines(self):
|
||||||
def _gc_pipelines(self):
|
|
||||||
pipeline_names = list(self._pipelines.keys())
|
pipeline_names = list(self._pipelines.keys())
|
||||||
for name in pipeline_names:
|
for name in pipeline_names:
|
||||||
if not self._pipelines[name].pool.runs:
|
if not self._pipelines[name].pool.runs:
|
||||||
logger.debug("garbage-collecting pipeline '%s'...", name)
|
logger.debug("garbage-collecting pipeline '%s'...", name)
|
||||||
yield from self._pipelines[name].stop()
|
await self._pipelines[name].stop()
|
||||||
del self._pipelines[name]
|
del self._pipelines[name]
|
||||||
logger.debug("garbage-collection of pipeline '%s' completed",
|
logger.debug("garbage-collection of pipeline '%s' completed",
|
||||||
name)
|
name)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
while True:
|
while True:
|
||||||
rid = yield from self._queue.get()
|
rid = await self._queue.get()
|
||||||
yield from self._delete(rid)
|
await self._delete(rid)
|
||||||
yield from self._gc_pipelines()
|
await self._gc_pipelines()
|
||||||
self._queue.task_done()
|
self._queue.task_done()
|
||||||
|
|
||||||
|
|
||||||
|
@ -406,15 +400,14 @@ class Scheduler:
|
||||||
def start(self):
|
def start(self):
|
||||||
self._deleter.start()
|
self._deleter.start()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def stop(self):
|
||||||
def stop(self):
|
|
||||||
# NB: restart of a stopped scheduler is not supported
|
# NB: restart of a stopped scheduler is not supported
|
||||||
self._terminated = True # prevent further runs from being created
|
self._terminated = True # prevent further runs from being created
|
||||||
for pipeline in self._pipelines.values():
|
for pipeline in self._pipelines.values():
|
||||||
for rid in pipeline.pool.runs.keys():
|
for rid in pipeline.pool.runs.keys():
|
||||||
self._deleter.delete(rid)
|
self._deleter.delete(rid)
|
||||||
yield from self._deleter.join()
|
await self._deleter.join()
|
||||||
yield from self._deleter.stop()
|
await self._deleter.stop()
|
||||||
if self._pipelines:
|
if self._pipelines:
|
||||||
logger.warning("some pipelines were not garbage-collected")
|
logger.warning("some pipelines were not garbage-collected")
|
||||||
|
|
||||||
|
@ -435,3 +428,13 @@ class Scheduler:
|
||||||
|
|
||||||
def delete(self, rid):
|
def delete(self, rid):
|
||||||
self._deleter.delete(rid)
|
self._deleter.delete(rid)
|
||||||
|
|
||||||
|
def request_termination(self, rid):
|
||||||
|
for pipeline in self._pipelines.values():
|
||||||
|
if rid in pipeline.pool.runs:
|
||||||
|
run = pipeline.pool.runs[rid]
|
||||||
|
if run.status == RunStatus.running or run.status == RunStatus.paused:
|
||||||
|
run.termination_requested = True
|
||||||
|
else:
|
||||||
|
self.delete(rid)
|
||||||
|
break
|
||||||
|
|
|
@ -7,8 +7,7 @@ import time
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.tools import (asyncio_process_wait_timeout, asyncio_process_wait,
|
from artiq.tools import asyncio_wait_or_cancel
|
||||||
asyncio_wait_or_cancel)
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -22,6 +21,10 @@ class WorkerWatchdogTimeout(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class WorkerException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class WorkerError(Exception):
|
class WorkerError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -57,27 +60,25 @@ class Worker:
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _create_process(self):
|
||||||
def _create_process(self):
|
await self.io_lock.acquire()
|
||||||
yield from self.io_lock.acquire()
|
|
||||||
try:
|
try:
|
||||||
if self.closed.is_set():
|
if self.closed.is_set():
|
||||||
raise WorkerError("Attempting to create process after close")
|
raise WorkerError("Attempting to create process after close")
|
||||||
self.process = yield from asyncio.create_subprocess_exec(
|
self.process = await asyncio.create_subprocess_exec(
|
||||||
sys.executable, "-m", "artiq.master.worker_impl",
|
sys.executable, "-m", "artiq.master.worker_impl",
|
||||||
stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def close(self, term_timeout=1.0):
|
||||||
def close(self, term_timeout=1.0):
|
|
||||||
"""Interrupts any I/O with the worker process and terminates the
|
"""Interrupts any I/O with the worker process and terminates the
|
||||||
worker process.
|
worker process.
|
||||||
|
|
||||||
This method should always be called by the user to clean up, even if
|
This method should always be called by the user to clean up, even if
|
||||||
build() or examine() raises an exception."""
|
build() or examine() raises an exception."""
|
||||||
self.closed.set()
|
self.closed.set()
|
||||||
yield from self.io_lock.acquire()
|
await self.io_lock.acquire()
|
||||||
try:
|
try:
|
||||||
if self.process is None:
|
if self.process is None:
|
||||||
# Note the %s - self.rid can be None
|
# Note the %s - self.rid can be None
|
||||||
|
@ -92,27 +93,25 @@ class Worker:
|
||||||
return
|
return
|
||||||
obj = {"action": "terminate"}
|
obj = {"action": "terminate"}
|
||||||
try:
|
try:
|
||||||
yield from self._send(obj, cancellable=False)
|
await self._send(obj, cancellable=False)
|
||||||
except:
|
except:
|
||||||
logger.warning("failed to send terminate command to worker"
|
logger.warning("failed to send terminate command to worker"
|
||||||
" (RID %s), killing", self.rid, exc_info=True)
|
" (RID %s), killing", self.rid, exc_info=True)
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
yield from asyncio_process_wait(self.process)
|
await self.process.wait()
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
yield from asyncio_process_wait_timeout(self.process,
|
await asyncio.wait_for(self.process.wait(), term_timeout)
|
||||||
term_timeout)
|
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
logger.warning("worker did not exit (RID %s), killing", self.rid)
|
logger.warning("worker did not exit (RID %s), killing", self.rid)
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
yield from asyncio_process_wait(self.process)
|
await self.process.wait()
|
||||||
else:
|
else:
|
||||||
logger.debug("worker exited gracefully (RID %s)", self.rid)
|
logger.debug("worker exited gracefully (RID %s)", self.rid)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _send(self, obj, cancellable=True):
|
||||||
def _send(self, obj, cancellable=True):
|
|
||||||
assert self.io_lock.locked()
|
assert self.io_lock.locked()
|
||||||
line = pyon.encode(obj)
|
line = pyon.encode(obj)
|
||||||
self.process.stdin.write(line.encode())
|
self.process.stdin.write(line.encode())
|
||||||
|
@ -120,7 +119,7 @@ class Worker:
|
||||||
ifs = [self.process.stdin.drain()]
|
ifs = [self.process.stdin.drain()]
|
||||||
if cancellable:
|
if cancellable:
|
||||||
ifs.append(self.closed.wait())
|
ifs.append(self.closed.wait())
|
||||||
fs = yield from asyncio_wait_or_cancel(
|
fs = await asyncio_wait_or_cancel(
|
||||||
ifs, timeout=self.send_timeout,
|
ifs, timeout=self.send_timeout,
|
||||||
return_when=asyncio.FIRST_COMPLETED)
|
return_when=asyncio.FIRST_COMPLETED)
|
||||||
if all(f.cancelled() for f in fs):
|
if all(f.cancelled() for f in fs):
|
||||||
|
@ -131,10 +130,9 @@ class Worker:
|
||||||
if cancellable and self.closed.is_set():
|
if cancellable and self.closed.is_set():
|
||||||
raise WorkerError("Data transmission to worker cancelled")
|
raise WorkerError("Data transmission to worker cancelled")
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _recv(self, timeout):
|
||||||
def _recv(self, timeout):
|
|
||||||
assert self.io_lock.locked()
|
assert self.io_lock.locked()
|
||||||
fs = yield from asyncio_wait_or_cancel(
|
fs = await asyncio_wait_or_cancel(
|
||||||
[self.process.stdout.readline(), self.closed.wait()],
|
[self.process.stdout.readline(), self.closed.wait()],
|
||||||
timeout=timeout, return_when=asyncio.FIRST_COMPLETED)
|
timeout=timeout, return_when=asyncio.FIRST_COMPLETED)
|
||||||
if all(f.cancelled() for f in fs):
|
if all(f.cancelled() for f in fs):
|
||||||
|
@ -150,13 +148,12 @@ class Worker:
|
||||||
raise WorkerError("Worker sent invalid PYON data")
|
raise WorkerError("Worker sent invalid PYON data")
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _handle_worker_requests(self):
|
||||||
def _handle_worker_requests(self):
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
yield from self.io_lock.acquire()
|
await self.io_lock.acquire()
|
||||||
try:
|
try:
|
||||||
obj = yield from self._recv(self.watchdog_time())
|
obj = await self._recv(self.watchdog_time())
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
except WorkerTimeout:
|
except WorkerTimeout:
|
||||||
|
@ -166,6 +163,8 @@ class Worker:
|
||||||
return True
|
return True
|
||||||
elif action == "pause":
|
elif action == "pause":
|
||||||
return False
|
return False
|
||||||
|
elif action == "exception":
|
||||||
|
raise WorkerException
|
||||||
del obj["action"]
|
del obj["action"]
|
||||||
if action == "create_watchdog":
|
if action == "create_watchdog":
|
||||||
func = self.create_watchdog
|
func = self.create_watchdog
|
||||||
|
@ -183,24 +182,23 @@ class Worker:
|
||||||
except:
|
except:
|
||||||
reply = {"status": "failed",
|
reply = {"status": "failed",
|
||||||
"message": traceback.format_exc()}
|
"message": traceback.format_exc()}
|
||||||
yield from self.io_lock.acquire()
|
await self.io_lock.acquire()
|
||||||
try:
|
try:
|
||||||
yield from self._send(reply)
|
await self._send(reply)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _worker_action(self, obj, timeout=None):
|
||||||
def _worker_action(self, obj, timeout=None):
|
|
||||||
if timeout is not None:
|
if timeout is not None:
|
||||||
self.watchdogs[-1] = time.monotonic() + timeout
|
self.watchdogs[-1] = time.monotonic() + timeout
|
||||||
try:
|
try:
|
||||||
yield from self.io_lock.acquire()
|
await self.io_lock.acquire()
|
||||||
try:
|
try:
|
||||||
yield from self._send(obj)
|
await self._send(obj)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
try:
|
try:
|
||||||
completed = yield from self._handle_worker_requests()
|
completed = await self._handle_worker_requests()
|
||||||
except WorkerTimeout:
|
except WorkerTimeout:
|
||||||
raise WorkerWatchdogTimeout
|
raise WorkerWatchdogTimeout
|
||||||
finally:
|
finally:
|
||||||
|
@ -208,11 +206,10 @@ class Worker:
|
||||||
del self.watchdogs[-1]
|
del self.watchdogs[-1]
|
||||||
return completed
|
return completed
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def build(self, rid, pipeline_name, wd, expid, priority, timeout=15.0):
|
||||||
def build(self, rid, pipeline_name, wd, expid, priority, timeout=15.0):
|
|
||||||
self.rid = rid
|
self.rid = rid
|
||||||
yield from self._create_process()
|
await self._create_process()
|
||||||
yield from self._worker_action(
|
await self._worker_action(
|
||||||
{"action": "build",
|
{"action": "build",
|
||||||
"rid": rid,
|
"rid": rid,
|
||||||
"pipeline_name": pipeline_name,
|
"pipeline_name": pipeline_name,
|
||||||
|
@ -221,45 +218,39 @@ class Worker:
|
||||||
"priority": priority},
|
"priority": priority},
|
||||||
timeout)
|
timeout)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def prepare(self):
|
||||||
def prepare(self):
|
await self._worker_action({"action": "prepare"})
|
||||||
yield from self._worker_action({"action": "prepare"})
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def run(self):
|
||||||
def run(self):
|
completed = await self._worker_action({"action": "run"})
|
||||||
completed = yield from self._worker_action({"action": "run"})
|
|
||||||
if not completed:
|
if not completed:
|
||||||
self.yield_time = time.monotonic()
|
self.yield_time = time.monotonic()
|
||||||
return completed
|
return completed
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def resume(self, request_termination):
|
||||||
def resume(self):
|
|
||||||
stop_duration = time.monotonic() - self.yield_time
|
stop_duration = time.monotonic() - self.yield_time
|
||||||
for wid, expiry in self.watchdogs:
|
for wid, expiry in self.watchdogs:
|
||||||
self.watchdogs[wid] += stop_duration
|
self.watchdogs[wid] += stop_duration
|
||||||
completed = yield from self._worker_action({"status": "ok",
|
completed = await self._worker_action({"status": "ok",
|
||||||
"data": None})
|
"data": request_termination})
|
||||||
if not completed:
|
if not completed:
|
||||||
self.yield_time = time.monotonic()
|
self.yield_time = time.monotonic()
|
||||||
return completed
|
return completed
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def analyze(self):
|
||||||
def analyze(self):
|
await self._worker_action({"action": "analyze"})
|
||||||
yield from self._worker_action({"action": "analyze"})
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def write_results(self, timeout=15.0):
|
||||||
def write_results(self, timeout=15.0):
|
await self._worker_action({"action": "write_results"},
|
||||||
yield from self._worker_action({"action": "write_results"},
|
timeout)
|
||||||
timeout)
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def examine(self, file, timeout=20.0):
|
||||||
def examine(self, file, timeout=20.0):
|
await self._create_process()
|
||||||
yield from self._create_process()
|
|
||||||
r = dict()
|
r = dict()
|
||||||
def register(class_name, name, arguments):
|
def register(class_name, name, arguments):
|
||||||
r[class_name] = {"name": name, "arguments": arguments}
|
r[class_name] = {"name": name, "arguments": arguments}
|
||||||
self.register_experiment = register
|
self.register_experiment = register
|
||||||
yield from self._worker_action({"action": "examine",
|
await self._worker_action({"action": "examine", "file": file},
|
||||||
"file": file}, timeout)
|
timeout)
|
||||||
del self.register_experiment
|
del self.register_experiment
|
||||||
return r
|
return r
|
||||||
|
|
|
@ -15,6 +15,64 @@ from artiq.protocols.pc_rpc import Client, BestEffortClient
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_device(desc, device_mgr):
|
||||||
|
ty = desc["type"]
|
||||||
|
if ty == "local":
|
||||||
|
module = importlib.import_module(desc["module"])
|
||||||
|
device_class = getattr(module, desc["class"])
|
||||||
|
return device_class(device_mgr, **desc["arguments"])
|
||||||
|
elif ty == "controller":
|
||||||
|
if desc["best_effort"]:
|
||||||
|
cl = BestEffortClient
|
||||||
|
else:
|
||||||
|
cl = Client
|
||||||
|
return cl(desc["host"], desc["port"], desc["target_name"])
|
||||||
|
else:
|
||||||
|
raise ValueError("Unsupported type in device DB: " + ty)
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceManager:
|
||||||
|
"""Handles creation and destruction of local device drivers and controller
|
||||||
|
RPC clients."""
|
||||||
|
def __init__(self, ddb, virtual_devices=dict()):
|
||||||
|
self.ddb = ddb
|
||||||
|
self.virtual_devices = virtual_devices
|
||||||
|
self.active_devices = OrderedDict()
|
||||||
|
|
||||||
|
def get_device_db(self):
|
||||||
|
"""Returns the full contents of the device database."""
|
||||||
|
return self.ddb.get_device_db()
|
||||||
|
|
||||||
|
def get(self, name):
|
||||||
|
"""Get the device driver or controller client corresponding to a
|
||||||
|
device database entry."""
|
||||||
|
if name in self.virtual_devices:
|
||||||
|
return self.virtual_devices[name]
|
||||||
|
if name in self.active_devices:
|
||||||
|
return self.active_devices[name]
|
||||||
|
else:
|
||||||
|
desc = self.ddb.get(name)
|
||||||
|
while isinstance(desc, str):
|
||||||
|
# alias
|
||||||
|
desc = self.ddb.get(desc)
|
||||||
|
dev = _create_device(desc, self)
|
||||||
|
self.active_devices[name] = dev
|
||||||
|
return dev
|
||||||
|
|
||||||
|
def close_devices(self):
|
||||||
|
"""Closes all active devices, in the opposite order as they were
|
||||||
|
requested."""
|
||||||
|
for dev in reversed(list(self.active_devices.values())):
|
||||||
|
try:
|
||||||
|
if isinstance(dev, (Client, BestEffortClient)):
|
||||||
|
dev.close_rpc()
|
||||||
|
elif hasattr(dev, "close"):
|
||||||
|
dev.close()
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Exception %r when closing device %r", e, dev)
|
||||||
|
self.active_devices.clear()
|
||||||
|
|
||||||
|
|
||||||
def get_hdf5_output(start_time, rid, name):
|
def get_hdf5_output(start_time, rid, name):
|
||||||
dirname = os.path.join("results",
|
dirname = os.path.join("results",
|
||||||
time.strftime("%Y-%m-%d", start_time),
|
time.strftime("%Y-%m-%d", start_time),
|
||||||
|
@ -87,80 +145,30 @@ def result_dict_to_hdf5(f, rd):
|
||||||
dataset[()] = data
|
dataset[()] = data
|
||||||
|
|
||||||
|
|
||||||
class ResultDB:
|
class DatasetManager:
|
||||||
def __init__(self):
|
def __init__(self, ddb):
|
||||||
self.rt = Notifier(dict())
|
self.broadcast = Notifier(dict())
|
||||||
self.nrt = dict()
|
self.local = dict()
|
||||||
self.store = set()
|
|
||||||
|
self.ddb = ddb
|
||||||
|
self.broadcast.publish = ddb.update
|
||||||
|
|
||||||
|
def set(self, key, value, broadcast=False, persist=False, save=True):
|
||||||
|
if persist:
|
||||||
|
broadcast = True
|
||||||
|
r = None
|
||||||
|
if broadcast:
|
||||||
|
self.broadcast[key] = (persist, value)
|
||||||
|
r = self.broadcast[key][1]
|
||||||
|
if save:
|
||||||
|
self.local[key] = value
|
||||||
|
return r
|
||||||
|
|
||||||
def get(self, key):
|
def get(self, key):
|
||||||
try:
|
try:
|
||||||
return self.nrt[key]
|
return self.local[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return self.rt[key].read
|
return self.ddb.get(key)
|
||||||
|
|
||||||
def set_store(self, key, store):
|
|
||||||
if store:
|
|
||||||
self.store.add(key)
|
|
||||||
else:
|
|
||||||
self.store.discard(key)
|
|
||||||
|
|
||||||
def write_hdf5(self, f):
|
def write_hdf5(self, f):
|
||||||
result_dict_to_hdf5(
|
result_dict_to_hdf5(f, self.local)
|
||||||
f, {k: v for k, v in self.rt.read.items() if k in self.store})
|
|
||||||
result_dict_to_hdf5(
|
|
||||||
f, {k: v for k, v in self.nrt.items() if k in self.store})
|
|
||||||
|
|
||||||
|
|
||||||
def _create_device(desc, dmgr):
|
|
||||||
ty = desc["type"]
|
|
||||||
if ty == "local":
|
|
||||||
module = importlib.import_module(desc["module"])
|
|
||||||
device_class = getattr(module, desc["class"])
|
|
||||||
return device_class(dmgr, **desc["arguments"])
|
|
||||||
elif ty == "controller":
|
|
||||||
if desc["best_effort"]:
|
|
||||||
cl = BestEffortClient
|
|
||||||
else:
|
|
||||||
cl = Client
|
|
||||||
return cl(desc["host"], desc["port"], desc["target_name"])
|
|
||||||
else:
|
|
||||||
raise ValueError("Unsupported type in device DB: " + ty)
|
|
||||||
|
|
||||||
|
|
||||||
class DeviceManager:
|
|
||||||
"""Handles creation and destruction of local device drivers and controller
|
|
||||||
RPC clients."""
|
|
||||||
def __init__(self, ddb, virtual_devices=dict()):
|
|
||||||
self.ddb = ddb
|
|
||||||
self.virtual_devices = virtual_devices
|
|
||||||
self.active_devices = OrderedDict()
|
|
||||||
|
|
||||||
def get(self, name):
|
|
||||||
"""Get the device driver or controller client corresponding to a
|
|
||||||
device database entry."""
|
|
||||||
if name in self.virtual_devices:
|
|
||||||
return self.virtual_devices[name]
|
|
||||||
if name in self.active_devices:
|
|
||||||
return self.active_devices[name]
|
|
||||||
else:
|
|
||||||
desc = self.ddb.get(name)
|
|
||||||
while isinstance(desc, str):
|
|
||||||
# alias
|
|
||||||
desc = self.ddb.get(desc)
|
|
||||||
dev = _create_device(desc, self)
|
|
||||||
self.active_devices[name] = dev
|
|
||||||
return dev
|
|
||||||
|
|
||||||
def close_devices(self):
|
|
||||||
"""Closes all active devices, in the opposite order as they were
|
|
||||||
requested."""
|
|
||||||
for dev in reversed(list(self.active_devices.values())):
|
|
||||||
try:
|
|
||||||
if isinstance(dev, (Client, BestEffortClient)):
|
|
||||||
dev.close_rpc()
|
|
||||||
elif hasattr(dev, "close"):
|
|
||||||
dev.close()
|
|
||||||
except Exception as e:
|
|
||||||
logger.warning("Exception %r when closing device %r", e, dev)
|
|
||||||
self.active_devices.clear()
|
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
|
import traceback
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.tools import file_import
|
from artiq.tools import file_import
|
||||||
from artiq.master.worker_db import DeviceManager, ResultDB, get_hdf5_output
|
from artiq.master.worker_db import DeviceManager, DatasetManager, get_hdf5_output
|
||||||
from artiq.language.environment import is_experiment
|
from artiq.language.environment import is_experiment
|
||||||
from artiq.language.core import set_watchdog_factory
|
from artiq.language.core import set_watchdog_factory, TerminationRequested
|
||||||
|
|
||||||
|
|
||||||
def get_object():
|
def get_object():
|
||||||
|
@ -62,16 +63,14 @@ class LogForwarder:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ParentDDB:
|
class ParentDeviceDB:
|
||||||
get = make_parent_action("get_device", "name", KeyError)
|
get_device_db = make_parent_action("get_device_db", "")
|
||||||
|
get = make_parent_action("get_device", "key", KeyError)
|
||||||
|
|
||||||
|
|
||||||
class ParentPDB:
|
class ParentDatasetDB:
|
||||||
get = make_parent_action("get_parameter", "name", KeyError)
|
get = make_parent_action("get_dataset", "key", KeyError)
|
||||||
set = make_parent_action("set_parameter", "name value")
|
update = make_parent_action("update_dataset", "mod")
|
||||||
|
|
||||||
|
|
||||||
update_rt_results = make_parent_action("update_rt_results", "mod")
|
|
||||||
|
|
||||||
|
|
||||||
class Watchdog:
|
class Watchdog:
|
||||||
|
@ -92,7 +91,11 @@ set_watchdog_factory(Watchdog)
|
||||||
|
|
||||||
|
|
||||||
class Scheduler:
|
class Scheduler:
|
||||||
pause = staticmethod(make_parent_action("pause", ""))
|
pause_noexc = staticmethod(make_parent_action("pause", ""))
|
||||||
|
|
||||||
|
def pause(self):
|
||||||
|
if self.pause_noexc():
|
||||||
|
raise TerminationRequested
|
||||||
|
|
||||||
submit = staticmethod(make_parent_action("scheduler_submit",
|
submit = staticmethod(make_parent_action("scheduler_submit",
|
||||||
"pipeline_name expid priority due_date flush"))
|
"pipeline_name expid priority due_date flush"))
|
||||||
|
@ -121,20 +124,22 @@ register_experiment = make_parent_action("register_experiment",
|
||||||
"class_name name arguments")
|
"class_name name arguments")
|
||||||
|
|
||||||
|
|
||||||
class DummyDMGR:
|
class ExamineDeviceMgr:
|
||||||
|
get_device_db = make_parent_action("get_device_db", "")
|
||||||
|
|
||||||
def get(self, name):
|
def get(self, name):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class DummyPDB:
|
class DummyDatasetMgr:
|
||||||
def get(self, name):
|
def set(self, key, value, broadcast=False, persist=False, save=True):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def set(self, name, value):
|
def get(self, key):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def examine(dmgr, pdb, rdb, file):
|
def examine(device_mgr, dataset_mgr, file):
|
||||||
module = file_import(file)
|
module = file_import(file)
|
||||||
for class_name, exp_class in module.__dict__.items():
|
for class_name, exp_class in module.__dict__.items():
|
||||||
if class_name[0] == "_":
|
if class_name[0] == "_":
|
||||||
|
@ -146,7 +151,7 @@ def examine(dmgr, pdb, rdb, file):
|
||||||
name = exp_class.__doc__.splitlines()[0].strip()
|
name = exp_class.__doc__.splitlines()[0].strip()
|
||||||
if name[-1] == ".":
|
if name[-1] == ".":
|
||||||
name = name[:-1]
|
name = name[:-1]
|
||||||
exp_inst = exp_class(dmgr, pdb, rdb, default_arg_none=True)
|
exp_inst = exp_class(device_mgr, dataset_mgr, default_arg_none=True)
|
||||||
arguments = [(k, (proc.describe(), group))
|
arguments = [(k, (proc.describe(), group))
|
||||||
for k, (proc, group) in exp_inst.requested_args.items()]
|
for k, (proc, group) in exp_inst.requested_args.items()]
|
||||||
register_experiment(class_name, name, arguments)
|
register_experiment(class_name, name, arguments)
|
||||||
|
@ -161,10 +166,9 @@ def main():
|
||||||
exp = None
|
exp = None
|
||||||
exp_inst = None
|
exp_inst = None
|
||||||
|
|
||||||
dmgr = DeviceManager(ParentDDB,
|
device_mgr = DeviceManager(ParentDeviceDB,
|
||||||
virtual_devices={"scheduler": Scheduler()})
|
virtual_devices={"scheduler": Scheduler()})
|
||||||
rdb = ResultDB()
|
dataset_mgr = DatasetManager(ParentDatasetDB)
|
||||||
rdb.rt.publish = update_rt_results
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
|
@ -180,9 +184,9 @@ def main():
|
||||||
else:
|
else:
|
||||||
expf = expid["file"]
|
expf = expid["file"]
|
||||||
exp = get_exp(expf, expid["class_name"])
|
exp = get_exp(expf, expid["class_name"])
|
||||||
dmgr.virtual_devices["scheduler"].set_run_info(
|
device_mgr.virtual_devices["scheduler"].set_run_info(
|
||||||
obj["pipeline_name"], expid, obj["priority"])
|
obj["pipeline_name"], expid, obj["priority"])
|
||||||
exp_inst = exp(dmgr, ParentPDB, rdb,
|
exp_inst = exp(device_mgr, dataset_mgr,
|
||||||
**expid["arguments"])
|
**expid["arguments"])
|
||||||
put_object({"action": "completed"})
|
put_object({"action": "completed"})
|
||||||
elif action == "prepare":
|
elif action == "prepare":
|
||||||
|
@ -197,7 +201,7 @@ def main():
|
||||||
elif action == "write_results":
|
elif action == "write_results":
|
||||||
f = get_hdf5_output(start_time, rid, exp.__name__)
|
f = get_hdf5_output(start_time, rid, exp.__name__)
|
||||||
try:
|
try:
|
||||||
rdb.write_hdf5(f)
|
dataset_mgr.write_hdf5(f)
|
||||||
if "repo_rev" in expid:
|
if "repo_rev" in expid:
|
||||||
rr = expid["repo_rev"]
|
rr = expid["repo_rev"]
|
||||||
dtype = "S{}".format(len(rr))
|
dtype = "S{}".format(len(rr))
|
||||||
|
@ -207,12 +211,15 @@ def main():
|
||||||
f.close()
|
f.close()
|
||||||
put_object({"action": "completed"})
|
put_object({"action": "completed"})
|
||||||
elif action == "examine":
|
elif action == "examine":
|
||||||
examine(DummyDMGR(), DummyPDB(), ResultDB(), obj["file"])
|
examine(ExamineDeviceMgr(), DummyDatasetMgr(), obj["file"])
|
||||||
put_object({"action": "completed"})
|
put_object({"action": "completed"})
|
||||||
elif action == "terminate":
|
elif action == "terminate":
|
||||||
break
|
break
|
||||||
|
except:
|
||||||
|
traceback.print_exc()
|
||||||
|
put_object({"action": "exception"})
|
||||||
finally:
|
finally:
|
||||||
dmgr.close_devices()
|
device_mgr.close_devices()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -12,8 +12,7 @@ class AsyncioServer:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._client_tasks = set()
|
self._client_tasks = set()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def start(self, host, port):
|
||||||
def start(self, host, port):
|
|
||||||
"""Starts the server.
|
"""Starts the server.
|
||||||
|
|
||||||
The user must call ``stop`` to free resources properly after this
|
The user must call ``stop`` to free resources properly after this
|
||||||
|
@ -26,11 +25,10 @@ class AsyncioServer:
|
||||||
:param port: TCP port to bind to.
|
:param port: TCP port to bind to.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.server = yield from asyncio.start_server(self._handle_connection,
|
self.server = await asyncio.start_server(self._handle_connection,
|
||||||
host, port)
|
host, port)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def stop(self):
|
||||||
def stop(self):
|
|
||||||
"""Stops the server.
|
"""Stops the server.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -39,11 +37,11 @@ class AsyncioServer:
|
||||||
task.cancel()
|
task.cancel()
|
||||||
for task in wait_for:
|
for task in wait_for:
|
||||||
try:
|
try:
|
||||||
yield from asyncio.wait_for(task, None)
|
await asyncio.wait_for(task, None)
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
pass
|
pass
|
||||||
self.server.close()
|
self.server.close()
|
||||||
yield from self.server.wait_closed()
|
await self.server.wait_closed()
|
||||||
del self.server
|
del self.server
|
||||||
|
|
||||||
def _client_done(self, task):
|
def _client_done(self, task):
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
from time import time
|
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
|
||||||
from artiq.protocols.sync_struct import Notifier
|
|
||||||
|
|
||||||
|
|
||||||
class FlatFileDB:
|
|
||||||
def __init__(self, filename):
|
|
||||||
self.filename = filename
|
|
||||||
self.data = Notifier(pyon.load_file(self.filename))
|
|
||||||
self.hooks = []
|
|
||||||
|
|
||||||
def save(self):
|
|
||||||
pyon.store_file(self.filename, self.data.read)
|
|
||||||
|
|
||||||
def get(self, name):
|
|
||||||
return self.data.read[name]
|
|
||||||
|
|
||||||
def set(self, name, value):
|
|
||||||
self.data[name] = value
|
|
||||||
self.save()
|
|
||||||
timestamp = time()
|
|
||||||
for hook in self.hooks:
|
|
||||||
hook.set(timestamp, name, value)
|
|
||||||
|
|
||||||
def delete(self, name):
|
|
||||||
del self.data[name]
|
|
||||||
self.save()
|
|
||||||
timestamp = time()
|
|
||||||
for hook in self.hooks:
|
|
||||||
hook.delete(timestamp, name)
|
|
|
@ -159,16 +159,15 @@ class AsyncioClient:
|
||||||
self.__target_names = None
|
self.__target_names = None
|
||||||
self.__description = None
|
self.__description = None
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def connect_rpc(self, host, port, target_name):
|
||||||
def connect_rpc(self, host, port, target_name):
|
|
||||||
"""Connects to the server. This cannot be done in __init__ because
|
"""Connects to the server. This cannot be done in __init__ because
|
||||||
this method is a coroutine. See ``Client`` for a description of the
|
this method is a coroutine. See ``Client`` for a description of the
|
||||||
parameters."""
|
parameters."""
|
||||||
self.__reader, self.__writer = \
|
self.__reader, self.__writer = \
|
||||||
yield from asyncio.open_connection(host, port)
|
await asyncio.open_connection(host, port)
|
||||||
try:
|
try:
|
||||||
self.__writer.write(_init_string)
|
self.__writer.write(_init_string)
|
||||||
server_identification = yield from self.__recv()
|
server_identification = await self.__recv()
|
||||||
self.__target_names = server_identification["targets"]
|
self.__target_names = server_identification["targets"]
|
||||||
self.__description = server_identification["description"]
|
self.__description = server_identification["description"]
|
||||||
if target_name is not None:
|
if target_name is not None:
|
||||||
|
@ -205,20 +204,18 @@ class AsyncioClient:
|
||||||
line = pyon.encode(obj) + "\n"
|
line = pyon.encode(obj) + "\n"
|
||||||
self.__writer.write(line.encode())
|
self.__writer.write(line.encode())
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def __recv(self):
|
||||||
def __recv(self):
|
line = await self.__reader.readline()
|
||||||
line = yield from self.__reader.readline()
|
|
||||||
return pyon.decode(line.decode())
|
return pyon.decode(line.decode())
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def __do_rpc(self, name, args, kwargs):
|
||||||
def __do_rpc(self, name, args, kwargs):
|
await self.__lock.acquire()
|
||||||
yield from self.__lock.acquire()
|
|
||||||
try:
|
try:
|
||||||
obj = {"action": "call", "name": name,
|
obj = {"action": "call", "name": name,
|
||||||
"args": args, "kwargs": kwargs}
|
"args": args, "kwargs": kwargs}
|
||||||
self.__send(obj)
|
self.__send(obj)
|
||||||
|
|
||||||
obj = yield from self.__recv()
|
obj = await self.__recv()
|
||||||
if obj["status"] == "ok":
|
if obj["status"] == "ok":
|
||||||
return obj["ret"]
|
return obj["ret"]
|
||||||
elif obj["status"] == "failed":
|
elif obj["status"] == "failed":
|
||||||
|
@ -229,9 +226,8 @@ class AsyncioClient:
|
||||||
self.__lock.release()
|
self.__lock.release()
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@asyncio.coroutine
|
async def proxy(*args, **kwargs):
|
||||||
def proxy(*args, **kwargs):
|
res = await self.__do_rpc(name, args, kwargs)
|
||||||
res = yield from self.__do_rpc(name, args, kwargs)
|
|
||||||
return res
|
return res
|
||||||
return proxy
|
return proxy
|
||||||
|
|
||||||
|
@ -413,10 +409,9 @@ class Server(_AsyncioServer):
|
||||||
if builtin_terminate:
|
if builtin_terminate:
|
||||||
self._terminate_request = asyncio.Event()
|
self._terminate_request = asyncio.Event()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _handle_connection_cr(self, reader, writer):
|
||||||
def _handle_connection_cr(self, reader, writer):
|
|
||||||
try:
|
try:
|
||||||
line = yield from reader.readline()
|
line = await reader.readline()
|
||||||
if line != _init_string:
|
if line != _init_string:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -426,7 +421,7 @@ class Server(_AsyncioServer):
|
||||||
}
|
}
|
||||||
line = pyon.encode(obj) + "\n"
|
line = pyon.encode(obj) + "\n"
|
||||||
writer.write(line.encode())
|
writer.write(line.encode())
|
||||||
line = yield from reader.readline()
|
line = await reader.readline()
|
||||||
if not line:
|
if not line:
|
||||||
return
|
return
|
||||||
target_name = line.decode()[:-1]
|
target_name = line.decode()[:-1]
|
||||||
|
@ -436,7 +431,7 @@ class Server(_AsyncioServer):
|
||||||
return
|
return
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
line = yield from reader.readline()
|
line = await reader.readline()
|
||||||
if not line:
|
if not line:
|
||||||
break
|
break
|
||||||
obj = pyon.decode(line.decode())
|
obj = pyon.decode(line.decode())
|
||||||
|
@ -486,9 +481,8 @@ class Server(_AsyncioServer):
|
||||||
finally:
|
finally:
|
||||||
writer.close()
|
writer.close()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def wait_terminate(self):
|
||||||
def wait_terminate(self):
|
await self._terminate_request.wait()
|
||||||
yield from self._terminate_request.wait()
|
|
||||||
|
|
||||||
|
|
||||||
def simple_server_loop(targets, host, port, description=None):
|
def simple_server_loop(targets, host, port, description=None):
|
||||||
|
|
|
@ -187,3 +187,23 @@ def load_file(filename):
|
||||||
"""Parses the specified file and returns the decoded Python object."""
|
"""Parses the specified file and returns the decoded Python object."""
|
||||||
with open(filename, "r") as f:
|
with open(filename, "r") as f:
|
||||||
return decode(f.read())
|
return decode(f.read())
|
||||||
|
|
||||||
|
|
||||||
|
class FlatFileDB:
|
||||||
|
def __init__(self, filename):
|
||||||
|
self.filename = filename
|
||||||
|
self.data = pyon.load_file(self.filename)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
pyon.store_file(self.filename, self.data)
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
return self.data[key]
|
||||||
|
|
||||||
|
def set(self, key, value):
|
||||||
|
self.data[key] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def delete(self, key):
|
||||||
|
del self.data[key]
|
||||||
|
self.save()
|
||||||
|
|
|
@ -61,10 +61,9 @@ class Subscriber:
|
||||||
self.target_builders = [target_builder]
|
self.target_builders = [target_builder]
|
||||||
self.notify_cb = notify_cb
|
self.notify_cb = notify_cb
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def connect(self, host, port, before_receive_cb=None):
|
||||||
def connect(self, host, port, before_receive_cb=None):
|
|
||||||
self.reader, self.writer = \
|
self.reader, self.writer = \
|
||||||
yield from asyncio.open_connection(host, port)
|
await asyncio.open_connection(host, port)
|
||||||
try:
|
try:
|
||||||
if before_receive_cb is not None:
|
if before_receive_cb is not None:
|
||||||
before_receive_cb()
|
before_receive_cb()
|
||||||
|
@ -77,12 +76,11 @@ class Subscriber:
|
||||||
del self.writer
|
del self.writer
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def close(self):
|
||||||
def close(self):
|
|
||||||
try:
|
try:
|
||||||
self.receive_task.cancel()
|
self.receive_task.cancel()
|
||||||
try:
|
try:
|
||||||
yield from asyncio.wait_for(self.receive_task, None)
|
await asyncio.wait_for(self.receive_task, None)
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
|
@ -90,11 +88,10 @@ class Subscriber:
|
||||||
del self.reader
|
del self.reader
|
||||||
del self.writer
|
del self.writer
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _receive_cr(self):
|
||||||
def _receive_cr(self):
|
|
||||||
targets = []
|
targets = []
|
||||||
while True:
|
while True:
|
||||||
line = yield from self.reader.readline()
|
line = await self.reader.readline()
|
||||||
if not line:
|
if not line:
|
||||||
return
|
return
|
||||||
mod = pyon.decode(line.decode())
|
mod = pyon.decode(line.decode())
|
||||||
|
@ -209,14 +206,13 @@ class Publisher(AsyncioServer):
|
||||||
for notifier in notifiers.values():
|
for notifier in notifiers.values():
|
||||||
notifier.publish = partial(self.publish, notifier)
|
notifier.publish = partial(self.publish, notifier)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _handle_connection_cr(self, reader, writer):
|
||||||
def _handle_connection_cr(self, reader, writer):
|
|
||||||
try:
|
try:
|
||||||
line = yield from reader.readline()
|
line = await reader.readline()
|
||||||
if line != _init_string:
|
if line != _init_string:
|
||||||
return
|
return
|
||||||
|
|
||||||
line = yield from reader.readline()
|
line = await reader.readline()
|
||||||
if not line:
|
if not line:
|
||||||
return
|
return
|
||||||
notifier_name = line.decode()[:-1]
|
notifier_name = line.decode()[:-1]
|
||||||
|
@ -234,10 +230,10 @@ class Publisher(AsyncioServer):
|
||||||
self._recipients[notifier_name].add(queue)
|
self._recipients[notifier_name].add(queue)
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
line = yield from queue.get()
|
line = await queue.get()
|
||||||
writer.write(line)
|
writer.write(line)
|
||||||
# raise exception on connection error
|
# raise exception on connection error
|
||||||
yield from writer.drain()
|
await writer.drain()
|
||||||
finally:
|
finally:
|
||||||
self._recipients[notifier_name].remove(queue)
|
self._recipients[notifier_name].remove(queue)
|
||||||
except ConnectionResetError:
|
except ConnectionResetError:
|
||||||
|
|
|
@ -356,8 +356,7 @@ class Function:
|
||||||
exception_id = self.mappers.exception.encode(exception_class)
|
exception_id = self.mappers.exception.encode(exception_class)
|
||||||
return ast.copy_location(
|
return ast.copy_location(
|
||||||
ast.Call(func=ast.Name("EncodedException", ast.Load()),
|
ast.Call(func=ast.Name("EncodedException", ast.Load()),
|
||||||
args=[value_to_ast(exception_id)],
|
args=[value_to_ast(exception_id)], keywords=[]),
|
||||||
keywords=[], starargs=None, kwargs=None),
|
|
||||||
e)
|
e)
|
||||||
|
|
||||||
def code_visit_Raise(self, node):
|
def code_visit_Raise(self, node):
|
||||||
|
@ -514,8 +513,7 @@ def get_attr_writeback(attribute_namespace, rpc_mapper, loc_node):
|
||||||
arg3 = ast.copy_location(
|
arg3 = ast.copy_location(
|
||||||
ast.Name(attr_info.mangled_name, ast.Load()), loc_node)
|
ast.Name(attr_info.mangled_name, ast.Load()), loc_node)
|
||||||
call = ast.copy_location(
|
call = ast.copy_location(
|
||||||
ast.Call(func=func, args=[arg1, arg2, arg3],
|
ast.Call(func=func, args=[arg1, arg2, arg3], keywords=[]),
|
||||||
keywords=[], starargs=None, kwargs=None),
|
|
||||||
loc_node)
|
loc_node)
|
||||||
expr = ast.copy_location(ast.Expr(call), loc_node)
|
expr = ast.copy_location(ast.Expr(call), loc_node)
|
||||||
attr_writeback.append(expr)
|
attr_writeback.append(expr)
|
||||||
|
|
|
@ -71,8 +71,7 @@ def _interleave_timelines(timelines):
|
||||||
delay_stmt = ast.copy_location(
|
delay_stmt = ast.copy_location(
|
||||||
ast.Expr(ast.Call(
|
ast.Expr(ast.Call(
|
||||||
func=ast.Name("delay_mu", ast.Load()),
|
func=ast.Name("delay_mu", ast.Load()),
|
||||||
args=[value_to_ast(dt)],
|
args=[value_to_ast(dt)], keywords=[])),
|
||||||
keywords=[], starargs=[], kwargs=[])),
|
|
||||||
ref_stmt)
|
ref_stmt)
|
||||||
r.append(delay_stmt)
|
r.append(delay_stmt)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -16,9 +16,9 @@ def _run_on_host(k_class, **arguments):
|
||||||
|
|
||||||
class _Primes(EnvExperiment):
|
class _Primes(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_argument("output_list")
|
self.setattr_argument("output_list")
|
||||||
self.attr_argument("maximum")
|
self.setattr_argument("maximum")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -36,7 +36,7 @@ class _Primes(EnvExperiment):
|
||||||
|
|
||||||
class _Misc(EnvExperiment):
|
class _Misc(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
self.input = 84
|
self.input = 84
|
||||||
self.al = [1, 2, 3, 4, 5]
|
self.al = [1, 2, 3, 4, 5]
|
||||||
|
@ -54,9 +54,9 @@ class _Misc(EnvExperiment):
|
||||||
|
|
||||||
class _PulseLogger(EnvExperiment):
|
class _PulseLogger(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_argument("output_list")
|
self.setattr_argument("output_list")
|
||||||
self.attr_argument("name")
|
self.setattr_argument("name")
|
||||||
|
|
||||||
def _append(self, t, l, f):
|
def _append(self, t, l, f):
|
||||||
if not hasattr(self, "first_timestamp"):
|
if not hasattr(self, "first_timestamp"):
|
||||||
|
@ -81,11 +81,11 @@ class _PulseLogger(EnvExperiment):
|
||||||
|
|
||||||
class _Pulses(EnvExperiment):
|
class _Pulses(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_argument("output_list")
|
self.setattr_argument("output_list")
|
||||||
|
|
||||||
for name in "a", "b", "c", "d":
|
for name in "a", "b", "c", "d":
|
||||||
pl = _PulseLogger(*self.dbs(),
|
pl = _PulseLogger(*self.managers(),
|
||||||
output_list=self.output_list,
|
output_list=self.output_list,
|
||||||
name=name)
|
name=name)
|
||||||
setattr(self, name, pl)
|
setattr(self, name, pl)
|
||||||
|
@ -108,8 +108,8 @@ class _MyException(Exception):
|
||||||
|
|
||||||
class _Exceptions(EnvExperiment):
|
class _Exceptions(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_argument("trace")
|
self.setattr_argument("trace")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -152,8 +152,8 @@ class _Exceptions(EnvExperiment):
|
||||||
|
|
||||||
class _RPCExceptions(EnvExperiment):
|
class _RPCExceptions(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_argument("catch", FreeValue(False))
|
self.setattr_argument("catch", FreeValue(False))
|
||||||
|
|
||||||
self.success = False
|
self.success = False
|
||||||
|
|
||||||
|
|
|
@ -10,8 +10,11 @@ from artiq.coredevice.exceptions import RTIOUnderflow, RTIOSequenceError
|
||||||
|
|
||||||
class RTT(EnvExperiment):
|
class RTT(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("ttl_inout")
|
self.setattr_device("ttl_inout")
|
||||||
|
|
||||||
|
def set_rtt(self, rtt):
|
||||||
|
self.set_dataset("rtt", rtt)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -30,9 +33,12 @@ class RTT(EnvExperiment):
|
||||||
|
|
||||||
class Loopback(EnvExperiment):
|
class Loopback(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("loop_in")
|
self.setattr_device("loop_in")
|
||||||
self.attr_device("loop_out")
|
self.setattr_device("loop_out")
|
||||||
|
|
||||||
|
def set_rtt(self, rtt):
|
||||||
|
self.set_dataset("rtt", rtt)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -49,9 +55,12 @@ class Loopback(EnvExperiment):
|
||||||
|
|
||||||
class ClockGeneratorLoopback(EnvExperiment):
|
class ClockGeneratorLoopback(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("loop_clock_in")
|
self.setattr_device("loop_clock_in")
|
||||||
self.attr_device("loop_clock_out")
|
self.setattr_device("loop_clock_out")
|
||||||
|
|
||||||
|
def set_count(self, count):
|
||||||
|
self.set_dataset("count", count)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -68,8 +77,11 @@ class ClockGeneratorLoopback(EnvExperiment):
|
||||||
|
|
||||||
class PulseRate(EnvExperiment):
|
class PulseRate(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("ttl_out")
|
self.setattr_device("ttl_out")
|
||||||
|
|
||||||
|
def set_pulse_rate(self, pulse_rate):
|
||||||
|
self.set_dataset("pulse_rate", pulse_rate)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -89,7 +101,7 @@ class PulseRate(EnvExperiment):
|
||||||
|
|
||||||
class Watchdog(EnvExperiment):
|
class Watchdog(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -100,9 +112,12 @@ class Watchdog(EnvExperiment):
|
||||||
|
|
||||||
class LoopbackCount(EnvExperiment):
|
class LoopbackCount(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("ttl_inout")
|
self.setattr_device("ttl_inout")
|
||||||
self.attr_argument("npulses")
|
self.setattr_argument("npulses")
|
||||||
|
|
||||||
|
def set_count(self, count):
|
||||||
|
self.set_dataset("count", count)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -119,8 +134,8 @@ class LoopbackCount(EnvExperiment):
|
||||||
|
|
||||||
class Underflow(EnvExperiment):
|
class Underflow(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("ttl_out")
|
self.setattr_device("ttl_out")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -131,8 +146,8 @@ class Underflow(EnvExperiment):
|
||||||
|
|
||||||
class SequenceError(EnvExperiment):
|
class SequenceError(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("ttl_out")
|
self.setattr_device("ttl_out")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -144,8 +159,8 @@ class SequenceError(EnvExperiment):
|
||||||
|
|
||||||
class CollisionError(EnvExperiment):
|
class CollisionError(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("ttl_out_serdes")
|
self.setattr_device("ttl_out_serdes")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -157,7 +172,10 @@ class CollisionError(EnvExperiment):
|
||||||
|
|
||||||
class TimeKeepsRunning(EnvExperiment):
|
class TimeKeepsRunning(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
|
def set_time_at_start(self, time_at_start):
|
||||||
|
self.set_dataset("time_at_start", time_at_start)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -166,40 +184,42 @@ class TimeKeepsRunning(EnvExperiment):
|
||||||
|
|
||||||
class Handover(EnvExperiment):
|
class Handover(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def get_now(self, var):
|
def get_now(self, var):
|
||||||
self.set_result(var, now_mu())
|
self.set_result(var, now_mu())
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.get_now("t1")
|
self.get_now()
|
||||||
self.get_now("t2")
|
self.set_dataset("t1", self.time_at_start)
|
||||||
|
self.get_now()
|
||||||
|
self.set_dataset("t2", self.time_at_start)
|
||||||
|
|
||||||
|
|
||||||
class CoredeviceTest(ExperimentCase):
|
class CoredeviceTest(ExperimentCase):
|
||||||
def test_rtt(self):
|
def test_rtt(self):
|
||||||
self.execute(RTT)
|
self.execute(RTT)
|
||||||
rtt = self.rdb.get("rtt")
|
rtt = self.dataset_mgr.get("rtt")
|
||||||
print(rtt)
|
print(rtt)
|
||||||
self.assertGreater(rtt, 0*ns)
|
self.assertGreater(rtt, 0*ns)
|
||||||
self.assertLess(rtt, 100*ns)
|
self.assertLess(rtt, 100*ns)
|
||||||
|
|
||||||
def test_loopback(self):
|
def test_loopback(self):
|
||||||
self.execute(Loopback)
|
self.execute(Loopback)
|
||||||
rtt = self.rdb.get("rtt")
|
rtt = self.dataset_mgr.get("rtt")
|
||||||
print(rtt)
|
print(rtt)
|
||||||
self.assertGreater(rtt, 0*ns)
|
self.assertGreater(rtt, 0*ns)
|
||||||
self.assertLess(rtt, 50*ns)
|
self.assertLess(rtt, 50*ns)
|
||||||
|
|
||||||
def test_clock_generator_loopback(self):
|
def test_clock_generator_loopback(self):
|
||||||
self.execute(ClockGeneratorLoopback)
|
self.execute(ClockGeneratorLoopback)
|
||||||
count = self.rdb.get("count")
|
count = self.dataset_mgr.get("count")
|
||||||
self.assertEqual(count, 10)
|
self.assertEqual(count, 10)
|
||||||
|
|
||||||
def test_pulse_rate(self):
|
def test_pulse_rate(self):
|
||||||
self.execute(PulseRate)
|
self.execute(PulseRate)
|
||||||
rate = self.rdb.get("pulse_rate")
|
rate = self.dataset_mgr.get("pulse_rate")
|
||||||
print(rate)
|
print(rate)
|
||||||
self.assertGreater(rate, 100*ns)
|
self.assertGreater(rate, 100*ns)
|
||||||
self.assertLess(rate, 2500*ns)
|
self.assertLess(rate, 2500*ns)
|
||||||
|
@ -207,7 +227,7 @@ class CoredeviceTest(ExperimentCase):
|
||||||
def test_loopback_count(self):
|
def test_loopback_count(self):
|
||||||
npulses = 2
|
npulses = 2
|
||||||
self.execute(LoopbackCount, npulses=npulses)
|
self.execute(LoopbackCount, npulses=npulses)
|
||||||
count = self.rdb.get("count")
|
count = self.dataset_mgr.get("count")
|
||||||
self.assertEqual(count, npulses)
|
self.assertEqual(count, npulses)
|
||||||
|
|
||||||
def test_underflow(self):
|
def test_underflow(self):
|
||||||
|
@ -229,23 +249,24 @@ class CoredeviceTest(ExperimentCase):
|
||||||
|
|
||||||
def test_time_keeps_running(self):
|
def test_time_keeps_running(self):
|
||||||
self.execute(TimeKeepsRunning)
|
self.execute(TimeKeepsRunning)
|
||||||
t1 = self.rdb.get("time_at_start")
|
t1 = self.dataset_mgr.get("time_at_start")
|
||||||
self.execute(TimeKeepsRunning)
|
self.execute(TimeKeepsRunning)
|
||||||
t2 = self.rdb.get("time_at_start")
|
t2 = self.dataset_mgr.get("time_at_start")
|
||||||
dead_time = mu_to_seconds(t2 - t1, self.dmgr.get("core"))
|
dead_time = mu_to_seconds(t2 - t1, self.device_mgr.get("core"))
|
||||||
print(dead_time)
|
print(dead_time)
|
||||||
self.assertGreater(dead_time, 1*ms)
|
self.assertGreater(dead_time, 1*ms)
|
||||||
self.assertLess(dead_time, 500*ms)
|
self.assertLess(dead_time, 500*ms)
|
||||||
|
|
||||||
def test_handover(self):
|
def test_handover(self):
|
||||||
self.execute(Handover)
|
self.execute(Handover)
|
||||||
self.assertEqual(self.rdb.get("t1"), self.rdb.get("t2"))
|
self.assertEqual(self.dataset_mgr.get("t1"),
|
||||||
|
self.dataset_mgr.get("t2"))
|
||||||
|
|
||||||
|
|
||||||
class RPCTiming(EnvExperiment):
|
class RPCTiming(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_argument("repeats", FreeValue(100))
|
self.setattr_argument("repeats", FreeValue(100))
|
||||||
|
|
||||||
def nop(self):
|
def nop(self):
|
||||||
pass
|
pass
|
||||||
|
@ -262,14 +283,14 @@ class RPCTiming(EnvExperiment):
|
||||||
self.ts = [0. for _ in range(self.repeats)]
|
self.ts = [0. for _ in range(self.repeats)]
|
||||||
self.bench()
|
self.bench()
|
||||||
mean = sum(self.ts)/self.repeats
|
mean = sum(self.ts)/self.repeats
|
||||||
self.set_result("rpc_time_stddev", sqrt(
|
self.set_dataset("rpc_time_stddev", sqrt(
|
||||||
sum([(t - mean)**2 for t in self.ts])/self.repeats))
|
sum([(t - mean)**2 for t in self.ts])/self.repeats))
|
||||||
self.set_result("rpc_time_mean", mean)
|
self.set_dataset("rpc_time_mean", mean)
|
||||||
|
|
||||||
|
|
||||||
class RPCTest(ExperimentCase):
|
class RPCTest(ExperimentCase):
|
||||||
def test_rpc_timing(self):
|
def test_rpc_timing(self):
|
||||||
self.execute(RPCTiming)
|
self.execute(RPCTiming)
|
||||||
self.assertGreater(self.rdb.get("rpc_time_mean"), 100*ns)
|
self.assertGreater(self.dataset_mgr.get("rpc_time_mean"), 100*ns)
|
||||||
self.assertLess(self.rdb.get("rpc_time_mean"), 15*ms)
|
self.assertLess(self.dataset_mgr.get("rpc_time_mean"), 15*ms)
|
||||||
self.assertLess(self.rdb.get("rpc_time_stddev"), 1*ms)
|
self.assertLess(self.dataset_mgr.get("rpc_time_stddev"), 1*ms)
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
# Copyright (C) 2015 M-Labs Limited
|
||||||
# Copyright (C) 2014, 2015 Robert Jordens <jordens@gmail.com>
|
# Copyright (C) 2014, 2015 Robert Jordens <jordens@gmail.com>
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
@ -6,9 +7,10 @@ import unittest
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from artiq.language import *
|
from artiq.language import *
|
||||||
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
|
from artiq.master.worker_db import DeviceManager, DatasetManager
|
||||||
from artiq.coredevice.core import CompileError
|
from artiq.coredevice.core import CompileError
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
from artiq.protocols import pyon
|
||||||
from artiq.master.worker_db import DeviceManager, ResultDB
|
|
||||||
from artiq.frontend.artiq_run import DummyScheduler
|
from artiq.frontend.artiq_run import DummyScheduler
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,14 +21,14 @@ logger = logging.getLogger(__name__)
|
||||||
def get_from_ddb(*path, default="skip"):
|
def get_from_ddb(*path, default="skip"):
|
||||||
if not artiq_root:
|
if not artiq_root:
|
||||||
raise unittest.SkipTest("no ARTIQ_ROOT")
|
raise unittest.SkipTest("no ARTIQ_ROOT")
|
||||||
v = FlatFileDB(os.path.join(artiq_root, "ddb.pyon")).data
|
v = pyon.load_file(os.path.join(artiq_root, "device_db.pyon"))
|
||||||
try:
|
try:
|
||||||
for p in path:
|
for p in path:
|
||||||
v = v[p]
|
v = v[p]
|
||||||
return v.read
|
return v.read
|
||||||
except KeyError:
|
except KeyError:
|
||||||
if default == "skip":
|
if default == "skip":
|
||||||
raise unittest.SkipTest("ddb path {} not found".format(path))
|
raise unittest.SkipTest("device db path {} not found".format(path))
|
||||||
else:
|
else:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
|
@ -34,15 +36,15 @@ def get_from_ddb(*path, default="skip"):
|
||||||
@unittest.skipUnless(artiq_root, "no ARTIQ_ROOT")
|
@unittest.skipUnless(artiq_root, "no ARTIQ_ROOT")
|
||||||
class ExperimentCase(unittest.TestCase):
|
class ExperimentCase(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.ddb = FlatFileDB(os.path.join(artiq_root, "ddb.pyon"))
|
self.device_db = DeviceDB(os.path.join(artiq_root, "device_db.pyon"))
|
||||||
self.dmgr = DeviceManager(self.ddb,
|
self.dataset_db = DatasetDB(os.path.join(artiq_root, "dataset_db.pyon"))
|
||||||
|
self.device_mgr = DeviceManager(self.device_db,
|
||||||
virtual_devices={"scheduler": DummyScheduler()})
|
virtual_devices={"scheduler": DummyScheduler()})
|
||||||
self.pdb = FlatFileDB(os.path.join(artiq_root, "pdb.pyon"))
|
self.dataset_mgr = DatasetManager(self.dataset_db)
|
||||||
self.rdb = ResultDB()
|
|
||||||
|
|
||||||
def create(self, cls, **kwargs):
|
def create(self, cls, **kwargs):
|
||||||
try:
|
try:
|
||||||
exp = cls(self.dmgr, self.pdb, self.rdb, **kwargs)
|
exp = cls(self.device_mgr, self.dataset_mgr, **kwargs)
|
||||||
exp.prepare()
|
exp.prepare()
|
||||||
return exp
|
return exp
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
|
@ -55,7 +57,7 @@ class ExperimentCase(unittest.TestCase):
|
||||||
"class_name": cls.__name__,
|
"class_name": cls.__name__,
|
||||||
"arguments": kwargs
|
"arguments": kwargs
|
||||||
}
|
}
|
||||||
self.dmgr.virtual_devices["scheduler"].expid = expid
|
self.device_mgr.virtual_devices["scheduler"].expid = expid
|
||||||
try:
|
try:
|
||||||
exp = self.create(cls, **kwargs)
|
exp = self.create(cls, **kwargs)
|
||||||
exp.run()
|
exp.run()
|
||||||
|
@ -65,4 +67,4 @@ class ExperimentCase(unittest.TestCase):
|
||||||
# Reduce amount of text on terminal.
|
# Reduce amount of text on terminal.
|
||||||
raise error from None
|
raise error from None
|
||||||
finally:
|
finally:
|
||||||
self.dmgr.close_devices()
|
self.device_mgr.close_devices()
|
||||||
|
|
|
@ -52,23 +52,22 @@ class RPCCase(unittest.TestCase):
|
||||||
def test_blocking_echo(self):
|
def test_blocking_echo(self):
|
||||||
self._run_server_and_test(self._blocking_echo)
|
self._run_server_and_test(self._blocking_echo)
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _asyncio_echo(self):
|
||||||
def _asyncio_echo(self):
|
|
||||||
remote = pc_rpc.AsyncioClient()
|
remote = pc_rpc.AsyncioClient()
|
||||||
for attempt in range(100):
|
for attempt in range(100):
|
||||||
yield from asyncio.sleep(.2)
|
await asyncio.sleep(.2)
|
||||||
try:
|
try:
|
||||||
yield from remote.connect_rpc(test_address, test_port, "test")
|
await remote.connect_rpc(test_address, test_port, "test")
|
||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
test_object_back = yield from remote.echo(test_object)
|
test_object_back = await remote.echo(test_object)
|
||||||
self.assertEqual(test_object, test_object_back)
|
self.assertEqual(test_object, test_object_back)
|
||||||
with self.assertRaises(pc_rpc.RemoteError):
|
with self.assertRaises(pc_rpc.RemoteError):
|
||||||
yield from remote.non_existing_method()
|
await remote.non_existing_method()
|
||||||
yield from remote.terminate()
|
await remote.terminate()
|
||||||
finally:
|
finally:
|
||||||
remote.close_rpc()
|
remote.close_rpc()
|
||||||
|
|
||||||
|
|
|
@ -18,12 +18,16 @@ class EmptyExperiment(EnvExperiment):
|
||||||
|
|
||||||
class BackgroundExperiment(EnvExperiment):
|
class BackgroundExperiment(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("scheduler")
|
self.setattr_device("scheduler")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
while True:
|
try:
|
||||||
self.scheduler.pause()
|
while True:
|
||||||
sleep(0.2)
|
self.scheduler.pause()
|
||||||
|
sleep(0.2)
|
||||||
|
except TerminationRequested:
|
||||||
|
self.set_dataset("termination_ok", True,
|
||||||
|
broadcast=True, save=False)
|
||||||
|
|
||||||
|
|
||||||
def _get_expid(name):
|
def _get_expid(name):
|
||||||
|
@ -57,11 +61,6 @@ def _get_basic_steps(rid, expid, priority=0, flush=False):
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
_handlers = {
|
|
||||||
"init_rt_results": lambda description: None
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class SchedulerCase(unittest.TestCase):
|
class SchedulerCase(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
|
@ -72,7 +71,7 @@ class SchedulerCase(unittest.TestCase):
|
||||||
|
|
||||||
def test_steps(self):
|
def test_steps(self):
|
||||||
loop = self.loop
|
loop = self.loop
|
||||||
scheduler = Scheduler(0, _handlers, None)
|
scheduler = Scheduler(0, dict(), None)
|
||||||
expid = _get_expid("EmptyExperiment")
|
expid = _get_expid("EmptyExperiment")
|
||||||
|
|
||||||
expect = _get_basic_steps(1, expid)
|
expect = _get_basic_steps(1, expid)
|
||||||
|
@ -108,13 +107,27 @@ class SchedulerCase(unittest.TestCase):
|
||||||
|
|
||||||
def test_pause(self):
|
def test_pause(self):
|
||||||
loop = self.loop
|
loop = self.loop
|
||||||
scheduler = Scheduler(0, _handlers, None)
|
|
||||||
|
termination_ok = False
|
||||||
|
def check_termination(mod):
|
||||||
|
nonlocal termination_ok
|
||||||
|
self.assertEqual(
|
||||||
|
mod,
|
||||||
|
{"action": "setitem", "key": "termination_ok",
|
||||||
|
"value": (False, True), "path": []})
|
||||||
|
termination_ok = True
|
||||||
|
handlers = {
|
||||||
|
"update_dataset": check_termination
|
||||||
|
}
|
||||||
|
scheduler = Scheduler(0, handlers, None)
|
||||||
|
|
||||||
expid_bg = _get_expid("BackgroundExperiment")
|
expid_bg = _get_expid("BackgroundExperiment")
|
||||||
expid = _get_expid("EmptyExperiment")
|
expid = _get_expid("EmptyExperiment")
|
||||||
|
|
||||||
expect = _get_basic_steps(1, expid)
|
expect = _get_basic_steps(1, expid)
|
||||||
background_running = asyncio.Event()
|
background_running = asyncio.Event()
|
||||||
done = asyncio.Event()
|
empty_completed = asyncio.Event()
|
||||||
|
background_completed = asyncio.Event()
|
||||||
expect_idx = 0
|
expect_idx = 0
|
||||||
def notify(mod):
|
def notify(mod):
|
||||||
nonlocal expect_idx
|
nonlocal expect_idx
|
||||||
|
@ -123,23 +136,34 @@ class SchedulerCase(unittest.TestCase):
|
||||||
"key": "status",
|
"key": "status",
|
||||||
"action": "setitem"}:
|
"action": "setitem"}:
|
||||||
background_running.set()
|
background_running.set()
|
||||||
|
if mod == {"path": [0],
|
||||||
|
"value": "deleting",
|
||||||
|
"key": "status",
|
||||||
|
"action": "setitem"}:
|
||||||
|
background_completed.set()
|
||||||
if mod["path"] == [1] or (mod["path"] == [] and mod["key"] == 1):
|
if mod["path"] == [1] or (mod["path"] == [] and mod["key"] == 1):
|
||||||
self.assertEqual(mod, expect[expect_idx])
|
self.assertEqual(mod, expect[expect_idx])
|
||||||
expect_idx += 1
|
expect_idx += 1
|
||||||
if expect_idx >= len(expect):
|
if expect_idx >= len(expect):
|
||||||
done.set()
|
empty_completed.set()
|
||||||
scheduler.notifier.publish = notify
|
scheduler.notifier.publish = notify
|
||||||
|
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
scheduler.submit("main", expid_bg, -99, None, False)
|
scheduler.submit("main", expid_bg, -99, None, False)
|
||||||
loop.run_until_complete(background_running.wait())
|
loop.run_until_complete(background_running.wait())
|
||||||
scheduler.submit("main", expid, 0, None, False)
|
scheduler.submit("main", expid, 0, None, False)
|
||||||
loop.run_until_complete(done.wait())
|
loop.run_until_complete(empty_completed.wait())
|
||||||
|
|
||||||
|
self.assertFalse(termination_ok)
|
||||||
|
scheduler.request_termination(0)
|
||||||
|
loop.run_until_complete(background_completed.wait())
|
||||||
|
self.assertTrue(termination_ok)
|
||||||
|
|
||||||
loop.run_until_complete(scheduler.stop())
|
loop.run_until_complete(scheduler.stop())
|
||||||
|
|
||||||
def test_flush(self):
|
def test_flush(self):
|
||||||
loop = self.loop
|
loop = self.loop
|
||||||
scheduler = Scheduler(0, _handlers, None)
|
scheduler = Scheduler(0, dict(), None)
|
||||||
expid = _get_expid("EmptyExperiment")
|
expid = _get_expid("EmptyExperiment")
|
||||||
|
|
||||||
expect = _get_basic_steps(1, expid, 1, True)
|
expect = _get_basic_steps(1, expid, 1, True)
|
||||||
|
|
|
@ -8,7 +8,6 @@ test_address = "::1"
|
||||||
test_port = 7777
|
test_port = 7777
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def write_test_data(test_dict):
|
def write_test_data(test_dict):
|
||||||
test_values = [5, 2.1, None, True, False,
|
test_values = [5, 2.1, None, True, False,
|
||||||
{"a": 5, 2: np.linspace(0, 10, 1)},
|
{"a": 5, 2: np.linspace(0, 10, 1)},
|
||||||
|
@ -30,19 +29,9 @@ def write_test_data(test_dict):
|
||||||
test_dict["finished"] = True
|
test_dict["finished"] = True
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def start_server(publisher_future, test_dict_future):
|
|
||||||
test_dict = sync_struct.Notifier(dict())
|
|
||||||
publisher = sync_struct.Publisher(
|
|
||||||
{"test": test_dict})
|
|
||||||
yield from publisher.start(test_address, test_port)
|
|
||||||
publisher_future.set_result(publisher)
|
|
||||||
test_dict_future.set_result(test_dict)
|
|
||||||
|
|
||||||
|
|
||||||
class SyncStructCase(unittest.TestCase):
|
class SyncStructCase(unittest.TestCase):
|
||||||
def init_test_dict(self, init):
|
def init_test_dict(self, init):
|
||||||
self.test_dict = init
|
self.received_dict = init
|
||||||
return init
|
return init
|
||||||
|
|
||||||
def notify(self, mod):
|
def notify(self, mod):
|
||||||
|
@ -54,29 +43,27 @@ class SyncStructCase(unittest.TestCase):
|
||||||
self.loop = asyncio.new_event_loop()
|
self.loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(self.loop)
|
asyncio.set_event_loop(self.loop)
|
||||||
|
|
||||||
def test_recv(self):
|
async def _do_test_recv(self):
|
||||||
loop = self.loop
|
|
||||||
self.receiving_done = asyncio.Event()
|
self.receiving_done = asyncio.Event()
|
||||||
publisher = asyncio.Future()
|
|
||||||
test_dict = asyncio.Future()
|
|
||||||
asyncio.async(start_server(publisher, test_dict))
|
|
||||||
loop.run_until_complete(publisher)
|
|
||||||
loop.run_until_complete(test_dict)
|
|
||||||
|
|
||||||
self.publisher = publisher.result()
|
test_dict = sync_struct.Notifier(dict())
|
||||||
test_dict = test_dict.result()
|
publisher = sync_struct.Publisher({"test": test_dict})
|
||||||
test_vector = dict()
|
await publisher.start(test_address, test_port)
|
||||||
loop.run_until_complete(write_test_data(test_vector))
|
|
||||||
|
|
||||||
asyncio.async(write_test_data(test_dict))
|
subscriber = sync_struct.Subscriber("test", self.init_test_dict,
|
||||||
self.subscriber = sync_struct.Subscriber("test", self.init_test_dict,
|
self.notify)
|
||||||
self.notify)
|
await subscriber.connect(test_address, test_port)
|
||||||
loop.run_until_complete(self.subscriber.connect(test_address,
|
|
||||||
test_port))
|
write_test_data(test_dict)
|
||||||
loop.run_until_complete(self.receiving_done.wait())
|
await self.receiving_done.wait()
|
||||||
self.assertEqual(self.test_dict, test_vector)
|
|
||||||
self.loop.run_until_complete(self.subscriber.close())
|
await subscriber.close()
|
||||||
self.loop.run_until_complete(self.publisher.stop())
|
await publisher.stop()
|
||||||
|
|
||||||
|
self.assertEqual(self.received_dict, test_dict.read)
|
||||||
|
|
||||||
|
def test_recv(self):
|
||||||
|
self.loop.run_until_complete(self._do_test_recv())
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
self.loop.close()
|
self.loop.close()
|
||||||
|
|
|
@ -8,6 +8,22 @@ from artiq import *
|
||||||
from artiq.master.worker import *
|
from artiq.master.worker import *
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleExperiment(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ExceptionTermination(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
raise TypeError
|
||||||
|
|
||||||
|
|
||||||
class WatchdogNoTimeout(EnvExperiment):
|
class WatchdogNoTimeout(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
pass
|
pass
|
||||||
|
@ -36,15 +52,14 @@ class WatchdogTimeoutInBuild(EnvExperiment):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _call_worker(worker, expid):
|
||||||
def _call_worker(worker, expid):
|
|
||||||
try:
|
try:
|
||||||
yield from worker.build(0, "main", None, expid, 0)
|
await worker.build(0, "main", None, expid, 0)
|
||||||
yield from worker.prepare()
|
await worker.prepare()
|
||||||
yield from worker.run()
|
await worker.run()
|
||||||
yield from worker.analyze()
|
await worker.analyze()
|
||||||
finally:
|
finally:
|
||||||
yield from worker.close()
|
await worker.close()
|
||||||
|
|
||||||
|
|
||||||
def _run_experiment(class_name):
|
def _run_experiment(class_name):
|
||||||
|
@ -54,11 +69,11 @@ def _run_experiment(class_name):
|
||||||
"arguments": dict()
|
"arguments": dict()
|
||||||
}
|
}
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
worker = Worker()
|
worker = Worker(handlers={"log": lambda message: None})
|
||||||
loop.run_until_complete(_call_worker(worker, expid))
|
loop.run_until_complete(_call_worker(worker, expid))
|
||||||
|
|
||||||
|
|
||||||
class WatchdogCase(unittest.TestCase):
|
class WorkerCase(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
self.loop = asyncio.ProactorEventLoop()
|
self.loop = asyncio.ProactorEventLoop()
|
||||||
|
@ -66,6 +81,13 @@ class WatchdogCase(unittest.TestCase):
|
||||||
self.loop = asyncio.new_event_loop()
|
self.loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(self.loop)
|
asyncio.set_event_loop(self.loop)
|
||||||
|
|
||||||
|
def test_simple_run(self):
|
||||||
|
_run_experiment("SimpleExperiment")
|
||||||
|
|
||||||
|
def test_exception(self):
|
||||||
|
with self.assertRaises(WorkerException):
|
||||||
|
_run_experiment("ExceptionTermination")
|
||||||
|
|
||||||
def test_watchdog_no_timeout(self):
|
def test_watchdog_no_timeout(self):
|
||||||
_run_experiment("WatchdogNoTimeout")
|
_run_experiment("WatchdogNoTimeout")
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,8 @@ import time
|
||||||
import collections
|
import collections
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from artiq.language.environment import is_experiment
|
from artiq.language.environment import is_experiment
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
|
|
||||||
|
@ -23,6 +25,41 @@ def parse_arguments(arguments):
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
def elide(s, maxlen):
|
||||||
|
elided = False
|
||||||
|
if len(s) > maxlen:
|
||||||
|
s = s[:maxlen]
|
||||||
|
elided = True
|
||||||
|
try:
|
||||||
|
idx = s.index("\n")
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
s = s[:idx]
|
||||||
|
elided = True
|
||||||
|
if elided:
|
||||||
|
maxlen -= 3
|
||||||
|
if len(s) > maxlen:
|
||||||
|
s = s[:maxlen]
|
||||||
|
s += "..."
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def short_format(v):
|
||||||
|
if v is None:
|
||||||
|
return "None"
|
||||||
|
t = type(v)
|
||||||
|
if np.issubdtype(t, int) or np.issubdtype(t, float):
|
||||||
|
return str(v)
|
||||||
|
elif t is str:
|
||||||
|
return "\"" + elide(v, 15) + "\""
|
||||||
|
else:
|
||||||
|
r = t.__name__
|
||||||
|
if t is list or t is dict or t is set:
|
||||||
|
r += " ({})".format(len(v))
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
def file_import(filename, prefix="file_import_"):
|
def file_import(filename, prefix="file_import_"):
|
||||||
linecache.checkcache(filename)
|
linecache.checkcache(filename)
|
||||||
|
|
||||||
|
@ -79,68 +116,41 @@ def init_logger(args):
|
||||||
logging.basicConfig(level=logging.WARNING + args.quiet*10 - args.verbose*10)
|
logging.basicConfig(level=logging.WARNING + args.quiet*10 - args.verbose*10)
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def exc_to_warning(coro):
|
||||||
def exc_to_warning(coro):
|
|
||||||
try:
|
try:
|
||||||
yield from coro
|
await coro
|
||||||
except:
|
except:
|
||||||
logger.warning("asyncio coroutine terminated with exception",
|
logger.warning("asyncio coroutine terminated with exception",
|
||||||
exc_info=True)
|
exc_info=True)
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def asyncio_wait_or_cancel(fs, **kwargs):
|
||||||
def asyncio_process_wait_timeout(process, timeout):
|
fs = [asyncio.ensure_future(f) for f in fs]
|
||||||
# In Python < 3.5, asyncio.wait_for(process.wait(), ...
|
|
||||||
# causes a futures.InvalidStateError inside asyncio if and when the
|
|
||||||
# process terminates after the timeout.
|
|
||||||
# Work around this problem.
|
|
||||||
@asyncio.coroutine
|
|
||||||
def process_wait_returncode_timeout():
|
|
||||||
while True:
|
|
||||||
if process.returncode is not None:
|
|
||||||
break
|
|
||||||
yield from asyncio.sleep(0.1)
|
|
||||||
yield from asyncio.wait_for(process_wait_returncode_timeout(),
|
|
||||||
timeout=timeout)
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def asyncio_process_wait(process):
|
|
||||||
r = True
|
|
||||||
while r:
|
|
||||||
f, p = yield from asyncio.wait([process.stdout.read(1024)])
|
|
||||||
r = f.pop().result()
|
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def asyncio_wait_or_cancel(fs, **kwargs):
|
|
||||||
fs = [asyncio.async(f) for f in fs]
|
|
||||||
try:
|
try:
|
||||||
d, p = yield from asyncio.wait(fs, **kwargs)
|
d, p = await asyncio.wait(fs, **kwargs)
|
||||||
except:
|
except:
|
||||||
for f in fs:
|
for f in fs:
|
||||||
f.cancel()
|
f.cancel()
|
||||||
raise
|
raise
|
||||||
for f in p:
|
for f in p:
|
||||||
f.cancel()
|
f.cancel()
|
||||||
yield from asyncio.wait([f])
|
await asyncio.wait([f])
|
||||||
return fs
|
return fs
|
||||||
|
|
||||||
|
|
||||||
class TaskObject:
|
class TaskObject:
|
||||||
def start(self):
|
def start(self):
|
||||||
self.task = asyncio.async(self._do())
|
self.task = asyncio.ensure_future(self._do())
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def stop(self):
|
||||||
def stop(self):
|
|
||||||
self.task.cancel()
|
self.task.cancel()
|
||||||
try:
|
try:
|
||||||
yield from asyncio.wait_for(self.task, None)
|
await asyncio.wait_for(self.task, None)
|
||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
pass
|
pass
|
||||||
del self.task
|
del self.task
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def _do(self):
|
||||||
def _do(self):
|
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
@ -152,13 +162,12 @@ class Condition:
|
||||||
self._loop = asyncio.get_event_loop()
|
self._loop = asyncio.get_event_loop()
|
||||||
self._waiters = collections.deque()
|
self._waiters = collections.deque()
|
||||||
|
|
||||||
@asyncio.coroutine
|
async def wait(self):
|
||||||
def wait(self):
|
|
||||||
"""Wait until notified."""
|
"""Wait until notified."""
|
||||||
fut = asyncio.Future(loop=self._loop)
|
fut = asyncio.Future(loop=self._loop)
|
||||||
self._waiters.append(fut)
|
self._waiters.append(fut)
|
||||||
try:
|
try:
|
||||||
yield from fut
|
await fut
|
||||||
finally:
|
finally:
|
||||||
self._waiters.remove(fut)
|
self._waiters.remove(fut)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,62 @@
|
||||||
|
"""
|
||||||
|
This transform implements time management functions (delay_mu/now_mu/at_mu)
|
||||||
|
using an accumulator 'now' and simple replacement rules:
|
||||||
|
|
||||||
|
delay_mu(t) -> now += t
|
||||||
|
now_mu() -> now
|
||||||
|
at_mu(t) -> now = t
|
||||||
|
|
||||||
|
The function delay(), that uses seconds, must be lowered to delay_mu() before
|
||||||
|
invoking this transform.
|
||||||
|
The accumulator is initialized to an int64 value at the beginning of the
|
||||||
|
output function.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import ast
|
||||||
|
|
||||||
|
|
||||||
|
class _TimeLowerer(ast.NodeTransformer):
|
||||||
|
def visit_Call(self, node):
|
||||||
|
if node.func.id == "now_mu":
|
||||||
|
return ast.copy_location(ast.Name("now", ast.Load()), node)
|
||||||
|
else:
|
||||||
|
self.generic_visit(node)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_Expr(self, node):
|
||||||
|
r = node
|
||||||
|
if isinstance(node.value, ast.Call):
|
||||||
|
funcname = node.value.func.id
|
||||||
|
if funcname == "delay_mu":
|
||||||
|
r = ast.copy_location(
|
||||||
|
ast.AugAssign(target=ast.Name("now", ast.Store()),
|
||||||
|
op=ast.Add(),
|
||||||
|
value=node.value.args[0]),
|
||||||
|
node)
|
||||||
|
elif funcname == "at_mu":
|
||||||
|
r = ast.copy_location(
|
||||||
|
ast.Assign(targets=[ast.Name("now", ast.Store())],
|
||||||
|
value=node.value.args[0]),
|
||||||
|
node)
|
||||||
|
self.generic_visit(r)
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
def lower_time(func_def):
|
||||||
|
_TimeLowerer().visit(func_def)
|
||||||
|
call_init = ast.Call(
|
||||||
|
func=ast.Name("syscall", ast.Load()),
|
||||||
|
args=[ast.Str("now_init")], keywords=[])
|
||||||
|
stmt_init = ast.Assign(targets=[ast.Name("now", ast.Store())],
|
||||||
|
value=call_init)
|
||||||
|
call_save = ast.Call(
|
||||||
|
func=ast.Name("syscall", ast.Load()),
|
||||||
|
args=[ast.Str("now_save"), ast.Name("now", ast.Load())], keywords=[])
|
||||||
|
stmt_save = ast.Expr(call_save)
|
||||||
|
func_def.body = [
|
||||||
|
stmt_init,
|
||||||
|
ast.Try(body=func_def.body,
|
||||||
|
handlers=[],
|
||||||
|
orelse=[],
|
||||||
|
finalbody=[stmt_save])
|
||||||
|
]
|
|
@ -0,0 +1,109 @@
|
||||||
|
"""
|
||||||
|
This transform turns calls to delay() that use non-integer time
|
||||||
|
expressed in seconds into calls to delay_mu() that use int64 time
|
||||||
|
expressed in multiples of ref_period.
|
||||||
|
|
||||||
|
It does so by inserting multiplication/division/rounding operations around
|
||||||
|
those calls.
|
||||||
|
|
||||||
|
The seconds_to_mu and mu_to_seconds core language functions are also
|
||||||
|
implemented here, as well as watchdog to syscall conversion.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import ast
|
||||||
|
|
||||||
|
from artiq.transforms.tools import value_to_ast
|
||||||
|
|
||||||
|
|
||||||
|
def _seconds_to_mu(ref_period, node):
|
||||||
|
divided = ast.copy_location(
|
||||||
|
ast.BinOp(left=node,
|
||||||
|
op=ast.Div(),
|
||||||
|
right=value_to_ast(ref_period)),
|
||||||
|
node)
|
||||||
|
return ast.copy_location(
|
||||||
|
ast.Call(func=ast.Name("round64", ast.Load()),
|
||||||
|
args=[divided], keywords=[]),
|
||||||
|
divided)
|
||||||
|
|
||||||
|
|
||||||
|
def _mu_to_seconds(ref_period, node):
|
||||||
|
return ast.copy_location(
|
||||||
|
ast.BinOp(left=node,
|
||||||
|
op=ast.Mult(),
|
||||||
|
right=value_to_ast(ref_period)),
|
||||||
|
node)
|
||||||
|
|
||||||
|
|
||||||
|
class _TimeQuantizer(ast.NodeTransformer):
|
||||||
|
def __init__(self, ref_period):
|
||||||
|
self.ref_period = ref_period
|
||||||
|
self.watchdog_id_counter = 0
|
||||||
|
|
||||||
|
def visit_Call(self, node):
|
||||||
|
funcname = node.func.id
|
||||||
|
if funcname == "delay":
|
||||||
|
node.func.id = "delay_mu"
|
||||||
|
if (isinstance(node.args[0], ast.Call)
|
||||||
|
and node.args[0].func.id == "mu_to_seconds"):
|
||||||
|
# optimize:
|
||||||
|
# delay(mu_to_seconds(x)) -> delay_mu(x)
|
||||||
|
node.args[0] = self.visit(node.args[0].args[0])
|
||||||
|
else:
|
||||||
|
node.args[0] = _seconds_to_mu(self.ref_period,
|
||||||
|
self.visit(node.args[0]))
|
||||||
|
return node
|
||||||
|
elif funcname == "seconds_to_mu":
|
||||||
|
return _seconds_to_mu(self.ref_period,
|
||||||
|
self.visit(node.args[0]))
|
||||||
|
elif funcname == "mu_to_seconds":
|
||||||
|
return _mu_to_seconds(self.ref_period,
|
||||||
|
self.visit(node.args[0]))
|
||||||
|
else:
|
||||||
|
self.generic_visit(node)
|
||||||
|
return node
|
||||||
|
|
||||||
|
def visit_With(self, node):
|
||||||
|
self.generic_visit(node)
|
||||||
|
if (isinstance(node.items[0].context_expr, ast.Call)
|
||||||
|
and node.items[0].context_expr.func.id == "watchdog"):
|
||||||
|
|
||||||
|
idname = "__watchdog_id_" + str(self.watchdog_id_counter)
|
||||||
|
self.watchdog_id_counter += 1
|
||||||
|
|
||||||
|
time = ast.BinOp(left=node.items[0].context_expr.args[0],
|
||||||
|
op=ast.Mult(),
|
||||||
|
right=ast.Num(1000))
|
||||||
|
time_int = ast.Call(
|
||||||
|
func=ast.Name("round", ast.Load()),
|
||||||
|
args=[time], keywords=[])
|
||||||
|
syscall_set = ast.Call(
|
||||||
|
func=ast.Name("syscall", ast.Load()),
|
||||||
|
args=[ast.Str("watchdog_set"), time_int], keywords=[])
|
||||||
|
stmt_set = ast.copy_location(
|
||||||
|
ast.Assign(targets=[ast.Name(idname, ast.Store())],
|
||||||
|
value=syscall_set),
|
||||||
|
node)
|
||||||
|
|
||||||
|
syscall_clear = ast.Call(
|
||||||
|
func=ast.Name("syscall", ast.Load()),
|
||||||
|
args=[ast.Str("watchdog_clear"),
|
||||||
|
ast.Name(idname, ast.Load())], keywords=[])
|
||||||
|
stmt_clear = ast.copy_location(ast.Expr(syscall_clear), node)
|
||||||
|
|
||||||
|
node.items[0] = ast.withitem(
|
||||||
|
context_expr=ast.Name(id="sequential",
|
||||||
|
ctx=ast.Load()),
|
||||||
|
optional_vars=None)
|
||||||
|
node.body = [
|
||||||
|
stmt_set,
|
||||||
|
ast.Try(body=node.body,
|
||||||
|
handlers=[],
|
||||||
|
orelse=[],
|
||||||
|
finalbody=[stmt_clear])
|
||||||
|
]
|
||||||
|
return node
|
||||||
|
|
||||||
|
|
||||||
|
def quantize_time(func_def, ref_period):
|
||||||
|
_TimeQuantizer(ref_period).visit(func_def)
|
|
@ -0,0 +1,139 @@
|
||||||
|
import ast
|
||||||
|
from fractions import Fraction
|
||||||
|
|
||||||
|
from artiq.language import core as core_language
|
||||||
|
from artiq.language import units
|
||||||
|
|
||||||
|
|
||||||
|
embeddable_funcs = (
|
||||||
|
core_language.delay_mu, core_language.at_mu, core_language.now_mu,
|
||||||
|
core_language.delay,
|
||||||
|
core_language.seconds_to_mu, core_language.mu_to_seconds,
|
||||||
|
core_language.syscall, core_language.watchdog,
|
||||||
|
range, bool, int, float, round, len,
|
||||||
|
core_language.int64, core_language.round64,
|
||||||
|
Fraction, core_language.EncodedException
|
||||||
|
)
|
||||||
|
embeddable_func_names = {func.__name__ for func in embeddable_funcs}
|
||||||
|
|
||||||
|
|
||||||
|
def is_embeddable(func):
|
||||||
|
for ef in embeddable_funcs:
|
||||||
|
if func is ef:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def eval_ast(expr, symdict=dict()):
|
||||||
|
if not isinstance(expr, ast.Expression):
|
||||||
|
expr = ast.copy_location(ast.Expression(expr), expr)
|
||||||
|
ast.fix_missing_locations(expr)
|
||||||
|
code = compile(expr, "<ast>", "eval")
|
||||||
|
return eval(code, symdict)
|
||||||
|
|
||||||
|
|
||||||
|
class NotASTRepresentable(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def value_to_ast(value):
|
||||||
|
if isinstance(value, core_language.int64): # must be before int
|
||||||
|
return ast.Call(
|
||||||
|
func=ast.Name("int64", ast.Load()),
|
||||||
|
args=[ast.Num(int(value))], keywords=[])
|
||||||
|
elif isinstance(value, bool) or value is None:
|
||||||
|
# must also be before int
|
||||||
|
# isinstance(True/False, int) == True
|
||||||
|
return ast.NameConstant(value)
|
||||||
|
elif isinstance(value, (int, float)):
|
||||||
|
return ast.Num(value)
|
||||||
|
elif isinstance(value, Fraction):
|
||||||
|
return ast.Call(
|
||||||
|
func=ast.Name("Fraction", ast.Load()),
|
||||||
|
args=[ast.Num(value.numerator), ast.Num(value.denominator)], keywords=[])
|
||||||
|
elif isinstance(value, str):
|
||||||
|
return ast.Str(value)
|
||||||
|
elif isinstance(value, list):
|
||||||
|
elts = [value_to_ast(elt) for elt in value]
|
||||||
|
return ast.List(elts, ast.Load())
|
||||||
|
else:
|
||||||
|
for kg in core_language.kernel_globals:
|
||||||
|
if value is getattr(core_language, kg):
|
||||||
|
return ast.Name(kg, ast.Load())
|
||||||
|
raise NotASTRepresentable(str(value))
|
||||||
|
|
||||||
|
|
||||||
|
class NotConstant(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def eval_constant(node):
|
||||||
|
if isinstance(node, ast.Num):
|
||||||
|
return node.n
|
||||||
|
elif isinstance(node, ast.Str):
|
||||||
|
return node.s
|
||||||
|
elif isinstance(node, ast.NameConstant):
|
||||||
|
return node.value
|
||||||
|
elif isinstance(node, ast.Call):
|
||||||
|
funcname = node.func.id
|
||||||
|
if funcname == "int64":
|
||||||
|
return core_language.int64(eval_constant(node.args[0]))
|
||||||
|
elif funcname == "Fraction":
|
||||||
|
numerator = eval_constant(node.args[0])
|
||||||
|
denominator = eval_constant(node.args[1])
|
||||||
|
return Fraction(numerator, denominator)
|
||||||
|
else:
|
||||||
|
raise NotConstant
|
||||||
|
else:
|
||||||
|
raise NotConstant
|
||||||
|
|
||||||
|
|
||||||
|
_replaceable_funcs = {
|
||||||
|
"bool", "int", "float", "round",
|
||||||
|
"int64", "round64", "Fraction",
|
||||||
|
"seconds_to_mu", "mu_to_seconds"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _is_ref_transparent(dependencies, expr):
|
||||||
|
if isinstance(expr, (ast.NameConstant, ast.Num, ast.Str)):
|
||||||
|
return True
|
||||||
|
elif isinstance(expr, ast.Name):
|
||||||
|
dependencies.add(expr.id)
|
||||||
|
return True
|
||||||
|
elif isinstance(expr, ast.UnaryOp):
|
||||||
|
return _is_ref_transparent(dependencies, expr.operand)
|
||||||
|
elif isinstance(expr, ast.BinOp):
|
||||||
|
return (_is_ref_transparent(dependencies, expr.left)
|
||||||
|
and _is_ref_transparent(dependencies, expr.right))
|
||||||
|
elif isinstance(expr, ast.BoolOp):
|
||||||
|
return all(_is_ref_transparent(dependencies, v) for v in expr.values)
|
||||||
|
elif isinstance(expr, ast.Call):
|
||||||
|
return (expr.func.id in _replaceable_funcs and
|
||||||
|
all(_is_ref_transparent(dependencies, arg)
|
||||||
|
for arg in expr.args))
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_ref_transparent(expr):
|
||||||
|
dependencies = set()
|
||||||
|
if _is_ref_transparent(dependencies, expr):
|
||||||
|
return True, dependencies
|
||||||
|
else:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
|
||||||
|
class _NodeCounter(ast.NodeVisitor):
|
||||||
|
def __init__(self):
|
||||||
|
self.count = 0
|
||||||
|
|
||||||
|
def generic_visit(self, node):
|
||||||
|
self.count += 1
|
||||||
|
ast.NodeVisitor.generic_visit(self, node)
|
||||||
|
|
||||||
|
|
||||||
|
def count_all_nodes(node):
|
||||||
|
nc = _NodeCounter()
|
||||||
|
nc.visit(node)
|
||||||
|
return nc.count
|
|
@ -0,0 +1,575 @@
|
||||||
|
import sys
|
||||||
|
import ast
|
||||||
|
|
||||||
|
|
||||||
|
# Large float and imaginary literals get turned into infinities in the AST.
|
||||||
|
# We unparse those infinities to INFSTR.
|
||||||
|
INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
|
||||||
|
|
||||||
|
|
||||||
|
def _interleave(inter, f, seq):
|
||||||
|
"""Call f on each item in seq, calling inter() in between.
|
||||||
|
"""
|
||||||
|
seq = iter(seq)
|
||||||
|
try:
|
||||||
|
f(next(seq))
|
||||||
|
except StopIteration:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
for x in seq:
|
||||||
|
inter()
|
||||||
|
f(x)
|
||||||
|
|
||||||
|
|
||||||
|
class _Unparser:
|
||||||
|
"""Methods in this class recursively traverse an AST and
|
||||||
|
output source code for the abstract syntax; original formatting
|
||||||
|
is disregarded. """
|
||||||
|
|
||||||
|
def __init__(self, tree):
|
||||||
|
"""Print the source for tree to the "result" string."""
|
||||||
|
self.result = ""
|
||||||
|
self._indent = 0
|
||||||
|
self.dispatch(tree)
|
||||||
|
self.result += "\n"
|
||||||
|
|
||||||
|
def fill(self, text=""):
|
||||||
|
"Indent a piece of text, according to the current indentation level"
|
||||||
|
self.result += "\n"+" "*self._indent + text
|
||||||
|
|
||||||
|
def write(self, text):
|
||||||
|
"Append a piece of text to the current line."
|
||||||
|
self.result += text
|
||||||
|
|
||||||
|
def enter(self):
|
||||||
|
"Print ':', and increase the indentation."
|
||||||
|
self.write(":")
|
||||||
|
self._indent += 1
|
||||||
|
|
||||||
|
def leave(self):
|
||||||
|
"Decrease the indentation level."
|
||||||
|
self._indent -= 1
|
||||||
|
|
||||||
|
def dispatch(self, tree):
|
||||||
|
"Dispatcher function, dispatching tree type T to method _T."
|
||||||
|
if isinstance(tree, list):
|
||||||
|
for t in tree:
|
||||||
|
self.dispatch(t)
|
||||||
|
return
|
||||||
|
meth = getattr(self, "_"+tree.__class__.__name__)
|
||||||
|
meth(tree)
|
||||||
|
|
||||||
|
# Unparsing methods
|
||||||
|
#
|
||||||
|
# There should be one method per concrete grammar type
|
||||||
|
# Constructors should be grouped by sum type. Ideally,
|
||||||
|
# this would follow the order in the grammar, but
|
||||||
|
# currently doesn't.
|
||||||
|
|
||||||
|
def _Module(self, tree):
|
||||||
|
for stmt in tree.body:
|
||||||
|
self.dispatch(stmt)
|
||||||
|
|
||||||
|
# stmt
|
||||||
|
def _Expr(self, tree):
|
||||||
|
self.fill()
|
||||||
|
self.dispatch(tree.value)
|
||||||
|
|
||||||
|
def _Import(self, t):
|
||||||
|
self.fill("import ")
|
||||||
|
_interleave(lambda: self.write(", "), self.dispatch, t.names)
|
||||||
|
|
||||||
|
def _ImportFrom(self, t):
|
||||||
|
self.fill("from ")
|
||||||
|
self.write("." * t.level)
|
||||||
|
if t.module:
|
||||||
|
self.write(t.module)
|
||||||
|
self.write(" import ")
|
||||||
|
_interleave(lambda: self.write(", "), self.dispatch, t.names)
|
||||||
|
|
||||||
|
def _Assign(self, t):
|
||||||
|
self.fill()
|
||||||
|
for target in t.targets:
|
||||||
|
self.dispatch(target)
|
||||||
|
self.write(" = ")
|
||||||
|
self.dispatch(t.value)
|
||||||
|
|
||||||
|
def _AugAssign(self, t):
|
||||||
|
self.fill()
|
||||||
|
self.dispatch(t.target)
|
||||||
|
self.write(" "+self.binop[t.op.__class__.__name__]+"= ")
|
||||||
|
self.dispatch(t.value)
|
||||||
|
|
||||||
|
def _Return(self, t):
|
||||||
|
self.fill("return")
|
||||||
|
if t.value:
|
||||||
|
self.write(" ")
|
||||||
|
self.dispatch(t.value)
|
||||||
|
|
||||||
|
def _Pass(self, t):
|
||||||
|
self.fill("pass")
|
||||||
|
|
||||||
|
def _Break(self, t):
|
||||||
|
self.fill("break")
|
||||||
|
|
||||||
|
def _Continue(self, t):
|
||||||
|
self.fill("continue")
|
||||||
|
|
||||||
|
def _Delete(self, t):
|
||||||
|
self.fill("del ")
|
||||||
|
_interleave(lambda: self.write(", "), self.dispatch, t.targets)
|
||||||
|
|
||||||
|
def _Assert(self, t):
|
||||||
|
self.fill("assert ")
|
||||||
|
self.dispatch(t.test)
|
||||||
|
if t.msg:
|
||||||
|
self.write(", ")
|
||||||
|
self.dispatch(t.msg)
|
||||||
|
|
||||||
|
def _Global(self, t):
|
||||||
|
self.fill("global ")
|
||||||
|
_interleave(lambda: self.write(", "), self.write, t.names)
|
||||||
|
|
||||||
|
def _Nonlocal(self, t):
|
||||||
|
self.fill("nonlocal ")
|
||||||
|
_interleave(lambda: self.write(", "), self.write, t.names)
|
||||||
|
|
||||||
|
def _Yield(self, t):
|
||||||
|
self.write("(")
|
||||||
|
self.write("yield")
|
||||||
|
if t.value:
|
||||||
|
self.write(" ")
|
||||||
|
self.dispatch(t.value)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
def _YieldFrom(self, t):
|
||||||
|
self.write("(")
|
||||||
|
self.write("yield from")
|
||||||
|
if t.value:
|
||||||
|
self.write(" ")
|
||||||
|
self.dispatch(t.value)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
def _Raise(self, t):
|
||||||
|
self.fill("raise")
|
||||||
|
if not t.exc:
|
||||||
|
assert not t.cause
|
||||||
|
return
|
||||||
|
self.write(" ")
|
||||||
|
self.dispatch(t.exc)
|
||||||
|
if t.cause:
|
||||||
|
self.write(" from ")
|
||||||
|
self.dispatch(t.cause)
|
||||||
|
|
||||||
|
def _Try(self, t):
|
||||||
|
self.fill("try")
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
for ex in t.handlers:
|
||||||
|
self.dispatch(ex)
|
||||||
|
if t.orelse:
|
||||||
|
self.fill("else")
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.orelse)
|
||||||
|
self.leave()
|
||||||
|
if t.finalbody:
|
||||||
|
self.fill("finally")
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.finalbody)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def _ExceptHandler(self, t):
|
||||||
|
self.fill("except")
|
||||||
|
if t.type:
|
||||||
|
self.write(" ")
|
||||||
|
self.dispatch(t.type)
|
||||||
|
if t.name:
|
||||||
|
self.write(" as ")
|
||||||
|
self.write(t.name)
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def _ClassDef(self, t):
|
||||||
|
self.write("\n")
|
||||||
|
for deco in t.decorator_list:
|
||||||
|
self.fill("@")
|
||||||
|
self.dispatch(deco)
|
||||||
|
self.fill("class "+t.name)
|
||||||
|
self.write("(")
|
||||||
|
comma = False
|
||||||
|
for e in t.bases:
|
||||||
|
if comma:
|
||||||
|
self.write(", ")
|
||||||
|
else:
|
||||||
|
comma = True
|
||||||
|
self.dispatch(e)
|
||||||
|
for e in t.keywords:
|
||||||
|
if comma:
|
||||||
|
self.write(", ")
|
||||||
|
else:
|
||||||
|
comma = True
|
||||||
|
self.dispatch(e)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def _FunctionDef(self, t):
|
||||||
|
self.write("\n")
|
||||||
|
for deco in t.decorator_list:
|
||||||
|
self.fill("@")
|
||||||
|
self.dispatch(deco)
|
||||||
|
self.fill("def "+t.name + "(")
|
||||||
|
self.dispatch(t.args)
|
||||||
|
self.write(")")
|
||||||
|
if t.returns:
|
||||||
|
self.write(" -> ")
|
||||||
|
self.dispatch(t.returns)
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def _For(self, t):
|
||||||
|
self.fill("for ")
|
||||||
|
self.dispatch(t.target)
|
||||||
|
self.write(" in ")
|
||||||
|
self.dispatch(t.iter)
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
if t.orelse:
|
||||||
|
self.fill("else")
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.orelse)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def _If(self, t):
|
||||||
|
self.fill("if ")
|
||||||
|
self.dispatch(t.test)
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
# collapse nested ifs into equivalent elifs.
|
||||||
|
while (t.orelse and len(t.orelse) == 1 and
|
||||||
|
isinstance(t.orelse[0], ast.If)):
|
||||||
|
t = t.orelse[0]
|
||||||
|
self.fill("elif ")
|
||||||
|
self.dispatch(t.test)
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
# final else
|
||||||
|
if t.orelse:
|
||||||
|
self.fill("else")
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.orelse)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def _While(self, t):
|
||||||
|
self.fill("while ")
|
||||||
|
self.dispatch(t.test)
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
if t.orelse:
|
||||||
|
self.fill("else")
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.orelse)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def _With(self, t):
|
||||||
|
self.fill("with ")
|
||||||
|
_interleave(lambda: self.write(", "), self.dispatch, t.items)
|
||||||
|
self.enter()
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
# expr
|
||||||
|
def _Bytes(self, t):
|
||||||
|
self.write(repr(t.s))
|
||||||
|
|
||||||
|
def _Str(self, tree):
|
||||||
|
self.write(repr(tree.s))
|
||||||
|
|
||||||
|
def _Name(self, t):
|
||||||
|
self.write(t.id)
|
||||||
|
|
||||||
|
def _NameConstant(self, t):
|
||||||
|
self.write(repr(t.value))
|
||||||
|
|
||||||
|
def _Num(self, t):
|
||||||
|
# Substitute overflowing decimal literal for AST infinities.
|
||||||
|
self.write(repr(t.n).replace("inf", INFSTR))
|
||||||
|
|
||||||
|
def _List(self, t):
|
||||||
|
self.write("[")
|
||||||
|
_interleave(lambda: self.write(", "), self.dispatch, t.elts)
|
||||||
|
self.write("]")
|
||||||
|
|
||||||
|
def _ListComp(self, t):
|
||||||
|
self.write("[")
|
||||||
|
self.dispatch(t.elt)
|
||||||
|
for gen in t.generators:
|
||||||
|
self.dispatch(gen)
|
||||||
|
self.write("]")
|
||||||
|
|
||||||
|
def _GeneratorExp(self, t):
|
||||||
|
self.write("(")
|
||||||
|
self.dispatch(t.elt)
|
||||||
|
for gen in t.generators:
|
||||||
|
self.dispatch(gen)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
def _SetComp(self, t):
|
||||||
|
self.write("{")
|
||||||
|
self.dispatch(t.elt)
|
||||||
|
for gen in t.generators:
|
||||||
|
self.dispatch(gen)
|
||||||
|
self.write("}")
|
||||||
|
|
||||||
|
def _DictComp(self, t):
|
||||||
|
self.write("{")
|
||||||
|
self.dispatch(t.key)
|
||||||
|
self.write(": ")
|
||||||
|
self.dispatch(t.value)
|
||||||
|
for gen in t.generators:
|
||||||
|
self.dispatch(gen)
|
||||||
|
self.write("}")
|
||||||
|
|
||||||
|
def _comprehension(self, t):
|
||||||
|
self.write(" for ")
|
||||||
|
self.dispatch(t.target)
|
||||||
|
self.write(" in ")
|
||||||
|
self.dispatch(t.iter)
|
||||||
|
for if_clause in t.ifs:
|
||||||
|
self.write(" if ")
|
||||||
|
self.dispatch(if_clause)
|
||||||
|
|
||||||
|
def _IfExp(self, t):
|
||||||
|
self.write("(")
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.write(" if ")
|
||||||
|
self.dispatch(t.test)
|
||||||
|
self.write(" else ")
|
||||||
|
self.dispatch(t.orelse)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
def _Set(self, t):
|
||||||
|
assert(t.elts) # should be at least one element
|
||||||
|
self.write("{")
|
||||||
|
_interleave(lambda: self.write(", "), self.dispatch, t.elts)
|
||||||
|
self.write("}")
|
||||||
|
|
||||||
|
def _Dict(self, t):
|
||||||
|
self.write("{")
|
||||||
|
|
||||||
|
def write_pair(pair):
|
||||||
|
(k, v) = pair
|
||||||
|
self.dispatch(k)
|
||||||
|
self.write(": ")
|
||||||
|
self.dispatch(v)
|
||||||
|
_interleave(lambda: self.write(", "), write_pair,
|
||||||
|
zip(t.keys, t.values))
|
||||||
|
self.write("}")
|
||||||
|
|
||||||
|
def _Tuple(self, t):
|
||||||
|
self.write("(")
|
||||||
|
if len(t.elts) == 1:
|
||||||
|
(elt,) = t.elts
|
||||||
|
self.dispatch(elt)
|
||||||
|
self.write(",")
|
||||||
|
else:
|
||||||
|
_interleave(lambda: self.write(", "), self.dispatch, t.elts)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
|
||||||
|
|
||||||
|
def _UnaryOp(self, t):
|
||||||
|
self.write("(")
|
||||||
|
self.write(self.unop[t.op.__class__.__name__])
|
||||||
|
self.write(" ")
|
||||||
|
self.dispatch(t.operand)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
binop = {"Add": "+", "Sub": "-", "Mult": "*", "Div": "/", "Mod": "%",
|
||||||
|
"LShift": "<<", "RShift": ">>",
|
||||||
|
"BitOr": "|", "BitXor": "^", "BitAnd": "&",
|
||||||
|
"FloorDiv": "//", "Pow": "**"}
|
||||||
|
|
||||||
|
def _BinOp(self, t):
|
||||||
|
self.write("(")
|
||||||
|
self.dispatch(t.left)
|
||||||
|
self.write(" " + self.binop[t.op.__class__.__name__] + " ")
|
||||||
|
self.dispatch(t.right)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
cmpops = {"Eq": "==", "NotEq": "!=",
|
||||||
|
"Lt": "<", "LtE": "<=", "Gt": ">", "GtE": ">=",
|
||||||
|
"Is": "is", "IsNot": "is not", "In": "in", "NotIn": "not in"}
|
||||||
|
|
||||||
|
def _Compare(self, t):
|
||||||
|
self.write("(")
|
||||||
|
self.dispatch(t.left)
|
||||||
|
for o, e in zip(t.ops, t.comparators):
|
||||||
|
self.write(" " + self.cmpops[o.__class__.__name__] + " ")
|
||||||
|
self.dispatch(e)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
boolops = {ast.And: "and", ast.Or: "or"}
|
||||||
|
|
||||||
|
def _BoolOp(self, t):
|
||||||
|
self.write("(")
|
||||||
|
s = " %s " % self.boolops[t.op.__class__]
|
||||||
|
_interleave(lambda: self.write(s), self.dispatch, t.values)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
def _Attribute(self, t):
|
||||||
|
self.dispatch(t.value)
|
||||||
|
# Special case: 3.__abs__() is a syntax error, so if t.value
|
||||||
|
# is an integer literal then we need to either parenthesize
|
||||||
|
# it or add an extra space to get 3 .__abs__().
|
||||||
|
if isinstance(t.value, ast.Num) and isinstance(t.value.n, int):
|
||||||
|
self.write(" ")
|
||||||
|
self.write(".")
|
||||||
|
self.write(t.attr)
|
||||||
|
|
||||||
|
def _Call(self, t):
|
||||||
|
self.dispatch(t.func)
|
||||||
|
self.write("(")
|
||||||
|
comma = False
|
||||||
|
for e in t.args:
|
||||||
|
if comma:
|
||||||
|
self.write(", ")
|
||||||
|
else:
|
||||||
|
comma = True
|
||||||
|
self.dispatch(e)
|
||||||
|
for e in t.keywords:
|
||||||
|
if comma:
|
||||||
|
self.write(", ")
|
||||||
|
else:
|
||||||
|
comma = True
|
||||||
|
self.dispatch(e)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
def _Subscript(self, t):
|
||||||
|
self.dispatch(t.value)
|
||||||
|
self.write("[")
|
||||||
|
self.dispatch(t.slice)
|
||||||
|
self.write("]")
|
||||||
|
|
||||||
|
def _Starred(self, t):
|
||||||
|
self.write("*")
|
||||||
|
self.dispatch(t.value)
|
||||||
|
|
||||||
|
# slice
|
||||||
|
def _Ellipsis(self, t):
|
||||||
|
self.write("...")
|
||||||
|
|
||||||
|
def _Index(self, t):
|
||||||
|
self.dispatch(t.value)
|
||||||
|
|
||||||
|
def _Slice(self, t):
|
||||||
|
if t.lower:
|
||||||
|
self.dispatch(t.lower)
|
||||||
|
self.write(":")
|
||||||
|
if t.upper:
|
||||||
|
self.dispatch(t.upper)
|
||||||
|
if t.step:
|
||||||
|
self.write(":")
|
||||||
|
self.dispatch(t.step)
|
||||||
|
|
||||||
|
def _ExtSlice(self, t):
|
||||||
|
_interleave(lambda: self.write(', '), self.dispatch, t.dims)
|
||||||
|
|
||||||
|
# argument
|
||||||
|
def _arg(self, t):
|
||||||
|
self.write(t.arg)
|
||||||
|
if t.annotation:
|
||||||
|
self.write(": ")
|
||||||
|
self.dispatch(t.annotation)
|
||||||
|
|
||||||
|
# others
|
||||||
|
def _arguments(self, t):
|
||||||
|
first = True
|
||||||
|
# normal arguments
|
||||||
|
defaults = [None] * (len(t.args) - len(t.defaults)) + t.defaults
|
||||||
|
for a, d in zip(t.args, defaults):
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
self.write(", ")
|
||||||
|
self.dispatch(a)
|
||||||
|
if d:
|
||||||
|
self.write("=")
|
||||||
|
self.dispatch(d)
|
||||||
|
|
||||||
|
# varargs, or bare '*' if no varargs but keyword-only arguments present
|
||||||
|
if t.vararg or t.kwonlyargs:
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
self.write(", ")
|
||||||
|
self.write("*")
|
||||||
|
if t.vararg:
|
||||||
|
self.write(t.vararg.arg)
|
||||||
|
if t.vararg.annotation:
|
||||||
|
self.write(": ")
|
||||||
|
self.dispatch(t.vararg.annotation)
|
||||||
|
|
||||||
|
# keyword-only arguments
|
||||||
|
if t.kwonlyargs:
|
||||||
|
for a, d in zip(t.kwonlyargs, t.kw_defaults):
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
self.write(", ")
|
||||||
|
self.dispatch(a),
|
||||||
|
if d:
|
||||||
|
self.write("=")
|
||||||
|
self.dispatch(d)
|
||||||
|
|
||||||
|
# kwargs
|
||||||
|
if t.kwarg:
|
||||||
|
if first:
|
||||||
|
first = False
|
||||||
|
else:
|
||||||
|
self.write(", ")
|
||||||
|
self.write("**"+t.kwarg.arg)
|
||||||
|
if t.kwarg.annotation:
|
||||||
|
self.write(": ")
|
||||||
|
self.dispatch(t.kwarg.annotation)
|
||||||
|
|
||||||
|
def _keyword(self, t):
|
||||||
|
if t.arg is None:
|
||||||
|
self.write("**")
|
||||||
|
else:
|
||||||
|
self.write(t.arg)
|
||||||
|
self.write("=")
|
||||||
|
self.dispatch(t.value)
|
||||||
|
|
||||||
|
def _Lambda(self, t):
|
||||||
|
self.write("(")
|
||||||
|
self.write("lambda ")
|
||||||
|
self.dispatch(t.args)
|
||||||
|
self.write(": ")
|
||||||
|
self.dispatch(t.body)
|
||||||
|
self.write(")")
|
||||||
|
|
||||||
|
def _alias(self, t):
|
||||||
|
self.write(t.name)
|
||||||
|
if t.asname:
|
||||||
|
self.write(" as "+t.asname)
|
||||||
|
|
||||||
|
def _withitem(self, t):
|
||||||
|
self.dispatch(t.context_expr)
|
||||||
|
if t.optional_vars:
|
||||||
|
self.write(" as ")
|
||||||
|
self.dispatch(t.optional_vars)
|
||||||
|
|
||||||
|
|
||||||
|
def unparse(tree):
|
||||||
|
unparser = _Unparser(tree)
|
||||||
|
return unparser.result
|
|
@ -0,0 +1,18 @@
|
||||||
|
Uploading conda packages (Python 3.5)
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
Preparing:
|
||||||
|
|
||||||
|
1. [Install miniconda][miniconda]
|
||||||
|
2. `conda update -q conda`
|
||||||
|
3. `conda install conda-build`
|
||||||
|
4. `conda create -q -n py35 python=3.5`
|
||||||
|
5. `conda config --add channels https://conda.anaconda.org/m-labs/channel/dev`
|
||||||
|
|
||||||
|
Building:
|
||||||
|
|
||||||
|
1. `source activate py35`
|
||||||
|
2. `conda build pkgname --python 3.5`; this command displays a path to the freshly built package
|
||||||
|
3. `anaconda upload <package> -c main -c dev`
|
||||||
|
|
||||||
|
[miniconda]: http://conda.pydata.org/docs/install/quick.html#linux-miniconda-install
|
|
@ -11,7 +11,7 @@ $PYTHON setup.py install --single-version-externally-managed --record=record.txt
|
||||||
git clone --recursive https://github.com/m-labs/misoc
|
git clone --recursive https://github.com/m-labs/misoc
|
||||||
export MSCDIR=$SRC_DIR/misoc
|
export MSCDIR=$SRC_DIR/misoc
|
||||||
|
|
||||||
ARTIQ_PREFIX=$PREFIX/lib/python3.4/site-packages/artiq
|
ARTIQ_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq
|
||||||
BIN_PREFIX=$ARTIQ_PREFIX/binaries/
|
BIN_PREFIX=$ARTIQ_PREFIX/binaries/
|
||||||
mkdir -p $ARTIQ_PREFIX/misc
|
mkdir -p $ARTIQ_PREFIX/misc
|
||||||
mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello
|
mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello
|
||||||
|
@ -20,14 +20,14 @@ mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello
|
||||||
|
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd -
|
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd -
|
||||||
make -C soc/runtime clean runtime.fbi
|
make -C soc/runtime clean runtime.fbi
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd -
|
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
|
||||||
|
|
||||||
# install KC705 NIST_QC1 binaries
|
# install KC705 NIST_QC1 binaries
|
||||||
|
|
||||||
mkdir -p $BIN_PREFIX/kc705/nist_qc1
|
mkdir -p $BIN_PREFIX/kc705/nist_qc1
|
||||||
cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc1/
|
cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc1/
|
||||||
cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/kc705/
|
cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/kc705/
|
||||||
cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc1-kc705.bit $BIN_PREFIX/kc705/
|
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc1-kc705.bit $BIN_PREFIX/kc705/
|
||||||
wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
|
wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
|
||||||
mv bscan_spi_kc705.bit $BIN_PREFIX/kc705/
|
mv bscan_spi_kc705.bit $BIN_PREFIX/kc705/
|
||||||
|
|
||||||
|
@ -35,13 +35,13 @@ mv bscan_spi_kc705.bit $BIN_PREFIX/kc705/
|
||||||
|
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello build-headers build-bios; cd -
|
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello build-headers build-bios; cd -
|
||||||
make -C soc/runtime clean runtime.fbi
|
make -C soc/runtime clean runtime.fbi
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream; cd -
|
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream)
|
||||||
|
|
||||||
# install Pipistrello binaries
|
# install Pipistrello binaries
|
||||||
|
|
||||||
cp soc/runtime/runtime.fbi $BIN_PREFIX/pipistrello/
|
cp soc/runtime/runtime.fbi $BIN_PREFIX/pipistrello/
|
||||||
cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/pipistrello/
|
cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/pipistrello/
|
||||||
cp $SRC_DIR/misoc/build/artiq_pipistrello-nist_qc1-pipistrello.bit $BIN_PREFIX/pipistrello/
|
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_pipistrello-nist_qc1-pipistrello.bit $BIN_PREFIX/pipistrello/
|
||||||
wget https://people.phys.ethz.ch/~robertjo/bscan_spi_lx45_csg324.bit
|
wget https://people.phys.ethz.ch/~robertjo/bscan_spi_lx45_csg324.bit
|
||||||
mv bscan_spi_lx45_csg324.bit $BIN_PREFIX/pipistrello/
|
mv bscan_spi_lx45_csg324.bit $BIN_PREFIX/pipistrello/
|
||||||
|
|
||||||
|
@ -49,13 +49,13 @@ mv bscan_spi_lx45_csg324.bit $BIN_PREFIX/pipistrello/
|
||||||
|
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 build-headers; cd -
|
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 build-headers; cd -
|
||||||
make -C soc/runtime clean runtime.fbi
|
make -C soc/runtime clean runtime.fbi
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd -
|
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
|
||||||
|
|
||||||
# install KC705 NIST_QC2 binaries
|
# install KC705 NIST_QC2 binaries
|
||||||
|
|
||||||
mkdir -p $BIN_PREFIX/kc705/nist_qc2
|
mkdir -p $BIN_PREFIX/kc705/nist_qc2
|
||||||
cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc2/
|
cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc2/
|
||||||
cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc2-kc705.bit $BIN_PREFIX/kc705/
|
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc2-kc705.bit $BIN_PREFIX/kc705/
|
||||||
|
|
||||||
cp artiq/frontend/artiq_flash.sh $PREFIX/bin
|
cp artiq/frontend/artiq_flash.sh $PREFIX/bin
|
||||||
|
|
||||||
|
|
|
@ -28,14 +28,14 @@ build:
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
build:
|
build:
|
||||||
- python >=3.4.3
|
- python >=3.5.0
|
||||||
- setuptools
|
- setuptools
|
||||||
- numpy
|
- numpy
|
||||||
- migen
|
- migen 0.0
|
||||||
- pyelftools
|
- pyelftools
|
||||||
- binutils-or1k-linux
|
- binutils-or1k-linux
|
||||||
run:
|
run:
|
||||||
- python >=3.4.3
|
- python >=3.5.0
|
||||||
- llvmlite-artiq
|
- llvmlite-artiq
|
||||||
- scipy
|
- scipy
|
||||||
- numpy
|
- numpy
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
"%PYTHON%" setup.py install
|
||||||
|
if errorlevel 1 exit 1
|
|
@ -0,0 +1 @@
|
||||||
|
$PYTHON setup.py install
|
|
@ -0,0 +1,33 @@
|
||||||
|
package:
|
||||||
|
name: chardet
|
||||||
|
version: 2.2.1
|
||||||
|
|
||||||
|
source:
|
||||||
|
fn: chardet-2.2.1.tar.gz
|
||||||
|
url: https://pypi.python.org/packages/source/c/chardet/chardet-2.2.1.tar.gz
|
||||||
|
md5: 4a758402eaefd0331bdedc7ecb6f452c
|
||||||
|
|
||||||
|
build:
|
||||||
|
entry_points:
|
||||||
|
- chardetect = chardet.chardetect:main
|
||||||
|
number: 0
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
- python
|
||||||
|
- setuptools
|
||||||
|
|
||||||
|
run:
|
||||||
|
- python
|
||||||
|
|
||||||
|
test:
|
||||||
|
# Python imports
|
||||||
|
imports:
|
||||||
|
- chardet
|
||||||
|
|
||||||
|
commands:
|
||||||
|
- chardetect run_test.py
|
||||||
|
|
||||||
|
about:
|
||||||
|
home: https://github.com/chardet/chardet
|
||||||
|
license: GNU Library or Lesser General Public License (LGPL)
|
|
@ -0,0 +1 @@
|
||||||
|
%PYTHON% setup.py install
|
|
@ -0,0 +1 @@
|
||||||
|
$PYTHON setup.py install
|
|
@ -0,0 +1,30 @@
|
||||||
|
package:
|
||||||
|
name: dateutil
|
||||||
|
version: 2.4.2
|
||||||
|
|
||||||
|
source:
|
||||||
|
fn: python-dateutil-2.4.2.tar.gz
|
||||||
|
url: https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.2.tar.gz
|
||||||
|
md5: 4ef68e1c485b09e9f034e10473e5add2
|
||||||
|
|
||||||
|
build:
|
||||||
|
number: 0
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
- python
|
||||||
|
- setuptools
|
||||||
|
- six >=1.5
|
||||||
|
run:
|
||||||
|
- python
|
||||||
|
- six >=1.5
|
||||||
|
|
||||||
|
test:
|
||||||
|
imports:
|
||||||
|
- dateutil
|
||||||
|
- dateutil.zoneinfo
|
||||||
|
|
||||||
|
about:
|
||||||
|
home: https://dateutil.readthedocs.org
|
||||||
|
license: BSD
|
||||||
|
summary: 'Extensions to the standard Python datetime module'
|
|
@ -0,0 +1 @@
|
||||||
|
%PYTHON% setup.py install
|
|
@ -0,0 +1 @@
|
||||||
|
$PYTHON setup.py install
|
|
@ -0,0 +1,26 @@
|
||||||
|
package:
|
||||||
|
name: pyelftools
|
||||||
|
version: 0.23
|
||||||
|
|
||||||
|
source:
|
||||||
|
git_url: https://github.com/eliben/pyelftools.git
|
||||||
|
git_tag: v0.23
|
||||||
|
|
||||||
|
build:
|
||||||
|
number: 0
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
- python
|
||||||
|
- setuptools
|
||||||
|
run:
|
||||||
|
- python
|
||||||
|
|
||||||
|
test:
|
||||||
|
imports:
|
||||||
|
- elftools
|
||||||
|
|
||||||
|
about:
|
||||||
|
home: https://github.com/eliben/pyelftools.git
|
||||||
|
license: Public domain
|
||||||
|
summary: 'Library for analyzing ELF files and DWARF debugging information'
|
|
@ -1,10 +1,10 @@
|
||||||
package:
|
package:
|
||||||
name: pyqtgraph
|
name: pyqtgraph
|
||||||
version: 0.9.10~a6d5e28
|
version: 0.9.10.1036edf
|
||||||
|
|
||||||
source:
|
source:
|
||||||
git_url: https://github.com/pyqtgraph/pyqtgraph.git
|
git_url: https://github.com/pyqtgraph/pyqtgraph.git
|
||||||
git_rev: a6d5e28
|
git_rev: 1036edf
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
build:
|
build:
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
%PYTHON% setup.py install
|
|
@ -0,0 +1 @@
|
||||||
|
$PYTHON setup.py install
|
|
@ -0,0 +1,28 @@
|
||||||
|
package:
|
||||||
|
name: sphinx-argparse
|
||||||
|
version: 0.1.13
|
||||||
|
|
||||||
|
source:
|
||||||
|
fn: sphinx-argparse-0.1.13.tar.gz
|
||||||
|
url: https://pypi.python.org/packages/source/s/sphinx-argparse/sphinx-argparse-0.1.13.tar.gz
|
||||||
|
md5: 5ec84e75e1c4b2ae7ca5fb92a6abd738
|
||||||
|
|
||||||
|
build:
|
||||||
|
number: 0
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
- python
|
||||||
|
- setuptools
|
||||||
|
- sphinx
|
||||||
|
run:
|
||||||
|
- python
|
||||||
|
- sphinx
|
||||||
|
|
||||||
|
test:
|
||||||
|
imports:
|
||||||
|
- sphinxarg
|
||||||
|
|
||||||
|
about:
|
||||||
|
license: MIT
|
||||||
|
summary: 'Sphinx extension that automatically documents argparse commands and options'
|
|
@ -1,7 +1,13 @@
|
||||||
Core drivers reference
|
Core drivers reference
|
||||||
======================
|
======================
|
||||||
|
|
||||||
These drivers are for peripherals closely integrated into the core device, which do not use the controller mechanism.
|
These drivers are for the core device and the peripherals closely integrated into it, which do not use the controller mechanism.
|
||||||
|
|
||||||
|
:mod:`artiq.coredevice.core` module
|
||||||
|
-----------------------------------
|
||||||
|
|
||||||
|
.. automodule:: artiq.coredevice.core
|
||||||
|
:members:
|
||||||
|
|
||||||
:mod:`artiq.coredevice.ttl` module
|
:mod:`artiq.coredevice.ttl` module
|
||||||
-----------------------------------
|
-----------------------------------
|
||||||
|
|
|
@ -13,14 +13,14 @@ As a very first step, we will turn on a LED on the core device. Create a file ``
|
||||||
|
|
||||||
class LED(EnvExperiment):
|
class LED(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("led")
|
self.setattr_device("led")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
self.led.on()
|
self.led.on()
|
||||||
|
|
||||||
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``attr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``attr_device`` like any other.
|
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``setattr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``setattr_device`` like any other.
|
||||||
|
|
||||||
Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples/master`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform.
|
Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples/master`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform.
|
||||||
|
|
||||||
|
@ -48,8 +48,8 @@ Modify the code as follows: ::
|
||||||
|
|
||||||
class LED(EnvExperiment):
|
class LED(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("led")
|
self.setattr_device("led")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -98,8 +98,8 @@ Create a new file ``rtio.py`` containing the following: ::
|
||||||
|
|
||||||
class Tutorial(EnvExperiment):
|
class Tutorial(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("ttl0")
|
self.setattr_device("ttl0")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -122,8 +122,8 @@ Try reducing the period of the generated waveform until the CPU cannot keep up w
|
||||||
|
|
||||||
class Tutorial(EnvExperiment):
|
class Tutorial(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("ttl0")
|
self.setattr_device("ttl0")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
|
|
@ -53,7 +53,7 @@ Experiments may have arguments whose values can be set in the GUI and used in th
|
||||||
|
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_argument("count", NumberValue(ndecimals=0))
|
self.setattr_argument("count", NumberValue(ndecimals=0))
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
for i in range(int(self.count)):
|
for i in range(int(self.count)):
|
||||||
|
|
|
@ -13,9 +13,9 @@ Installing using conda
|
||||||
Installing Anaconda or Miniconda
|
Installing Anaconda or Miniconda
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||||
|
|
||||||
* You can either install Anaconda (chose Python 3.4) from https://store.continuum.io/cshop/anaconda/
|
* You can either install Anaconda (chose Python 3.5) from https://store.continuum.io/cshop/anaconda/
|
||||||
|
|
||||||
* Or install the more minimalistic Miniconda (chose Python 3.4) from http://conda.pydata.org/miniconda.html
|
* Or install the more minimalistic Miniconda (chose Python 3.5) from http://conda.pydata.org/miniconda.html
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
If you are installing on Windows, chose the Windows 32-bit version regardless of whether you have
|
If you are installing on Windows, chose the Windows 32-bit version regardless of whether you have
|
||||||
|
@ -148,7 +148,7 @@ These steps are required to generate bitstream (``.bit``) files, build the MiSoC
|
||||||
$ python3 setup.py develop --user
|
$ python3 setup.py develop --user
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
The options ``develop`` and ``--user`` are for setup.py to install Migen in ``~/.local/lib/python3.4``.
|
The options ``develop`` and ``--user`` are for setup.py to install Migen in ``~/.local/lib/python3.5``.
|
||||||
|
|
||||||
.. _install-xc3sprog:
|
.. _install-xc3sprog:
|
||||||
|
|
||||||
|
@ -369,7 +369,7 @@ This command installs all the required packages: ::
|
||||||
|
|
||||||
$ sudo apt-get install build-essential autotools-dev file git patch perl xutils-dev python3-pip texinfo flex bison libmpc-dev python3-serial python3-dateutil python3-prettytable python3-setuptools python3-numpy python3-scipy python3-sphinx python3-h5py python3-dev python-dev subversion cmake libusb-dev libftdi-dev pkg-config libffi-dev libgit2-dev
|
$ sudo apt-get install build-essential autotools-dev file git patch perl xutils-dev python3-pip texinfo flex bison libmpc-dev python3-serial python3-dateutil python3-prettytable python3-setuptools python3-numpy python3-scipy python3-sphinx python3-h5py python3-dev python-dev subversion cmake libusb-dev libftdi-dev pkg-config libffi-dev libgit2-dev
|
||||||
|
|
||||||
Note that ARTIQ requires Python 3.4.3 or above.
|
Note that ARTIQ requires Python 3.5.0 or above.
|
||||||
|
|
||||||
To set user permissions on the JTAG and serial ports of the Pipistrello, create a ``/etc/udev/rules.d/30-usb-papilio.rules`` file containing the following: ::
|
To set user permissions on the JTAG and serial ports of the Pipistrello, create a ``/etc/udev/rules.d/30-usb-papilio.rules`` file containing the following: ::
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,8 @@ A controller manager connects to the master and uses the device database to dete
|
||||||
|
|
||||||
Controller managers use the local network address of the connection to the master to filter the device database and run only those controllers that are allocated to the current node. Hostname resolution is supported.
|
Controller managers use the local network address of the connection to the master to filter the device database and run only those controllers that are allocated to the current node. Hostname resolution is supported.
|
||||||
|
|
||||||
|
.. warning:: With some network setups, the current machine's hostname without the domain name resolves to a localhost address (127.0.0.1 or even 127.0.1.1). If you wish to use controllers across a network, make sure that the hostname you provide resolves to an IP address visible on the network (e.g. try providing the full hostname including the domain name).
|
||||||
|
|
||||||
Command-line client
|
Command-line client
|
||||||
-------------------
|
-------------------
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
{"flopping_freq": 1499.9876804260716}
|
|
@ -1 +0,0 @@
|
||||||
{"flopping_freq": 1500.0164816344934}
|
|
|
@ -3,10 +3,11 @@ from artiq import *
|
||||||
|
|
||||||
class SubComponent1(HasEnvironment):
|
class SubComponent1(HasEnvironment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_argument("sc1_scan", Scannable(default=NoScan(325)),
|
self.setattr_argument("sc1_scan", Scannable(default=NoScan(3250),
|
||||||
"Flux capacitor")
|
scale=1e3, unit="Hz"),
|
||||||
self.attr_argument("sc1_enum", EnumerationValue(["1", "2", "3"]),
|
"Flux capacitor")
|
||||||
"Flux capacitor")
|
self.setattr_argument("sc1_enum", EnumerationValue(["1", "2", "3"]),
|
||||||
|
"Flux capacitor")
|
||||||
|
|
||||||
def do(self):
|
def do(self):
|
||||||
print("SC1:")
|
print("SC1:")
|
||||||
|
@ -17,12 +18,12 @@ class SubComponent1(HasEnvironment):
|
||||||
|
|
||||||
class SubComponent2(HasEnvironment):
|
class SubComponent2(HasEnvironment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_argument("sc2_boolean", BooleanValue(False),
|
self.setattr_argument("sc2_boolean", BooleanValue(False),
|
||||||
"Transporter")
|
"Transporter")
|
||||||
self.attr_argument("sc2_scan", Scannable(default=NoScan(325)),
|
self.setattr_argument("sc2_scan", Scannable(default=NoScan(325)),
|
||||||
"Transporter")
|
"Transporter")
|
||||||
self.attr_argument("sc2_enum", EnumerationValue(["3", "4", "5"]),
|
self.setattr_argument("sc2_enum", EnumerationValue(["3", "4", "5"]),
|
||||||
"Transporter")
|
"Transporter")
|
||||||
|
|
||||||
def do(self):
|
def do(self):
|
||||||
print("SC2:")
|
print("SC2:")
|
||||||
|
@ -34,15 +35,16 @@ class SubComponent2(HasEnvironment):
|
||||||
|
|
||||||
class ArgumentsDemo(EnvExperiment):
|
class ArgumentsDemo(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_argument("free_value", FreeValue(None))
|
self.setattr_argument("free_value", FreeValue(None))
|
||||||
self.attr_argument("number", NumberValue(42, unit="s", step=0.1,
|
self.setattr_argument("number", NumberValue(42e-6,
|
||||||
ndecimals=4))
|
unit="s", scale=1e-6,
|
||||||
self.attr_argument("string", StringValue("Hello World"))
|
ndecimals=4))
|
||||||
self.attr_argument("scan", Scannable(global_max=400,
|
self.setattr_argument("string", StringValue("Hello World"))
|
||||||
default=NoScan(325),
|
self.setattr_argument("scan", Scannable(global_max=400,
|
||||||
ndecimals=6))
|
default=NoScan(325),
|
||||||
self.attr_argument("boolean", BooleanValue(True), "Group")
|
ndecimals=6))
|
||||||
self.attr_argument("enum", EnumerationValue(
|
self.setattr_argument("boolean", BooleanValue(True), "Group")
|
||||||
|
self.setattr_argument("enum", EnumerationValue(
|
||||||
["foo", "bar", "quux"], "foo"), "Group")
|
["foo", "bar", "quux"], "foo"), "Group")
|
||||||
|
|
||||||
self.sc1 = SubComponent1(parent=self)
|
self.sc1 = SubComponent1(parent=self)
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
from operator import itemgetter
|
||||||
|
|
||||||
|
from artiq import *
|
||||||
|
|
||||||
|
|
||||||
|
class DDSSetter(EnvExperiment):
|
||||||
|
"""DDS Setter"""
|
||||||
|
def build(self):
|
||||||
|
self.dds = dict()
|
||||||
|
|
||||||
|
device_db = self.get_device_db()
|
||||||
|
for k, v in sorted(device_db.items(), key=itemgetter(0)):
|
||||||
|
if (isinstance(v, dict)
|
||||||
|
and v["type"] == "local"
|
||||||
|
and v["module"] == "artiq.coredevice.dds"
|
||||||
|
and v["class"] in {"AD9858", "AD9914"}):
|
||||||
|
self.dds[k] = {
|
||||||
|
"driver": self.get_device(k),
|
||||||
|
"frequency": self.get_argument("{}_frequency".format(k),
|
||||||
|
NumberValue())
|
||||||
|
}
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for k, v in self.dds.items():
|
||||||
|
v["driver"].set(v["frequency"])
|
|
@ -5,15 +5,15 @@ class DDSTest(EnvExperiment):
|
||||||
"""DDS test"""
|
"""DDS test"""
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("dds_bus")
|
self.setattr_device("dds_bus")
|
||||||
self.attr_device("dds0")
|
self.setattr_device("dds0")
|
||||||
self.attr_device("dds1")
|
self.setattr_device("dds1")
|
||||||
self.attr_device("dds2")
|
self.setattr_device("dds2")
|
||||||
self.attr_device("ttl0")
|
self.setattr_device("ttl0")
|
||||||
self.attr_device("ttl1")
|
self.setattr_device("ttl1")
|
||||||
self.attr_device("ttl2")
|
self.setattr_device("ttl2")
|
||||||
self.attr_device("led")
|
self.setattr_device("led")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
|
|
@ -27,21 +27,21 @@ class FloppingF(EnvExperiment):
|
||||||
"""Flopping F simulation"""
|
"""Flopping F simulation"""
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_argument("frequency_scan", Scannable(
|
self.setattr_argument("frequency_scan", Scannable(
|
||||||
default=LinearScan(1000, 2000, 100)))
|
default=LinearScan(1000, 2000, 100)))
|
||||||
|
|
||||||
self.attr_argument("F0", NumberValue(1500, min=1000, max=2000))
|
self.setattr_argument("F0", NumberValue(1500, min=1000, max=2000))
|
||||||
self.attr_argument("noise_amplitude", NumberValue(0.1, min=0, max=100,
|
self.setattr_argument("noise_amplitude", NumberValue(0.1, min=0, max=100,
|
||||||
step=0.01))
|
step=0.01))
|
||||||
|
|
||||||
self.attr_device("scheduler")
|
self.setattr_device("scheduler")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
frequency = self.set_result("flopping_f_frequency", [],
|
frequency = self.set_dataset("flopping_f_frequency", [],
|
||||||
realtime=True, store=False)
|
broadcast=True, save=False)
|
||||||
brightness = self.set_result("flopping_f_brightness", [],
|
brightness = self.set_dataset("flopping_f_brightness", [],
|
||||||
realtime=True)
|
broadcast=True)
|
||||||
self.set_result("flopping_f_fit", [], realtime=True, store=False)
|
self.set_dataset("flopping_f_fit", [], broadcast=True, save=False)
|
||||||
|
|
||||||
for f in self.frequency_scan:
|
for f in self.frequency_scan:
|
||||||
m_brightness = model(f, self.F0) + self.noise_amplitude*random.random()
|
m_brightness = model(f, self.F0) + self.noise_amplitude*random.random()
|
||||||
|
@ -52,16 +52,16 @@ class FloppingF(EnvExperiment):
|
||||||
self.scheduler.priority, time.time() + 20, False)
|
self.scheduler.priority, time.time() + 20, False)
|
||||||
|
|
||||||
def analyze(self):
|
def analyze(self):
|
||||||
# Use get_result so that analyze can be run stand-alone.
|
# Use get_dataset so that analyze can be run stand-alone.
|
||||||
frequency = self.get_result("flopping_f_frequency")
|
frequency = self.get_dataset("flopping_f_frequency")
|
||||||
brightness = self.get_result("flopping_f_brightness")
|
brightness = self.get_dataset("flopping_f_brightness")
|
||||||
popt, pcov = curve_fit(model_numpy,
|
popt, pcov = curve_fit(model_numpy,
|
||||||
frequency, brightness,
|
frequency, brightness,
|
||||||
p0=[self.get_parameter("flopping_freq")])
|
p0=[self.get_dataset("flopping_freq")])
|
||||||
perr = np.sqrt(np.diag(pcov))
|
perr = np.sqrt(np.diag(pcov))
|
||||||
if perr < 0.1:
|
if perr < 0.1:
|
||||||
F0 = float(popt)
|
F0 = float(popt)
|
||||||
self.set_parameter("flopping_freq", F0)
|
self.set_dataset("flopping_freq", F0, persist=True, save=False)
|
||||||
self.set_result("flopping_f_fit",
|
self.set_dataset("flopping_f_fit",
|
||||||
[model(x, F0) for x in frequency],
|
[model(x, F0) for x in frequency],
|
||||||
realtime=True, store=False)
|
broadcast=True, save=False)
|
||||||
|
|
|
@ -3,8 +3,8 @@ from artiq import *
|
||||||
|
|
||||||
class Handover(EnvExperiment):
|
class Handover(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("led")
|
self.setattr_device("led")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def blink_once(self):
|
def blink_once(self):
|
||||||
|
|
|
@ -7,7 +7,7 @@ class Mandelbrot(EnvExperiment):
|
||||||
"""Mandelbrot set demo"""
|
"""Mandelbrot set demo"""
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
def col(self, i):
|
def col(self, i):
|
||||||
sys.stdout.write(" .,-:;i+hHM$*#@ "[i])
|
sys.stdout.write(" .,-:;i+hHM$*#@ "[i])
|
||||||
|
|
|
@ -5,20 +5,20 @@ class PhotonHistogram(EnvExperiment):
|
||||||
"""Photon histogram"""
|
"""Photon histogram"""
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("dds_bus")
|
self.setattr_device("dds_bus")
|
||||||
self.attr_device("bd_dds")
|
self.setattr_device("bd_dds")
|
||||||
self.attr_device("bd_sw")
|
self.setattr_device("bd_sw")
|
||||||
self.attr_device("bdd_dds")
|
self.setattr_device("bdd_dds")
|
||||||
self.attr_device("bdd_sw")
|
self.setattr_device("bdd_sw")
|
||||||
self.attr_device("pmt")
|
self.setattr_device("pmt")
|
||||||
|
|
||||||
self.attr_argument("nbins", FreeValue(100))
|
self.setattr_argument("nbins", FreeValue(100))
|
||||||
self.attr_argument("repeats", FreeValue(100))
|
self.setattr_argument("repeats", FreeValue(100))
|
||||||
|
|
||||||
self.attr_parameter("cool_f", 230*MHz)
|
self.setattr_dataset("cool_f", 230*MHz)
|
||||||
self.attr_parameter("detect_f", 220*MHz)
|
self.setattr_dataset("detect_f", 220*MHz)
|
||||||
self.attr_parameter("detect_t", 100*us)
|
self.setattr_dataset("detect_t", 100*us)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def program_cooling(self):
|
def program_cooling(self):
|
||||||
|
@ -60,8 +60,9 @@ class PhotonHistogram(EnvExperiment):
|
||||||
hist[n] += 1
|
hist[n] += 1
|
||||||
total += n
|
total += n
|
||||||
|
|
||||||
self.set_result("cooling_photon_histogram", hist)
|
self.set_dataset("cooling_photon_histogram", hist)
|
||||||
self.set_parameter("ion_present", total > 5*self.repeats)
|
self.set_dataset("ion_present", total > 5*self.repeats,
|
||||||
|
broadcast=True)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -0,0 +1,18 @@
|
||||||
|
from itertools import count
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
from artiq import *
|
||||||
|
|
||||||
|
|
||||||
|
class RunForever(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.setattr_device("scheduler")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
try:
|
||||||
|
for i in count():
|
||||||
|
self.scheduler.pause()
|
||||||
|
sleep(1)
|
||||||
|
print("ping", i)
|
||||||
|
except TerminationRequested:
|
||||||
|
print("Terminated gracefully")
|
|
@ -13,7 +13,7 @@ class _PayloadNOP(EnvExperiment):
|
||||||
|
|
||||||
class _PayloadCoreNOP(EnvExperiment):
|
class _PayloadCoreNOP(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -22,7 +22,7 @@ class _PayloadCoreNOP(EnvExperiment):
|
||||||
|
|
||||||
class _PayloadCoreSend100Ints(EnvExperiment):
|
class _PayloadCoreSend100Ints(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
def devnull(self, d):
|
def devnull(self, d):
|
||||||
pass
|
pass
|
||||||
|
@ -35,7 +35,7 @@ class _PayloadCoreSend100Ints(EnvExperiment):
|
||||||
|
|
||||||
class _PayloadCoreSend1MB(EnvExperiment):
|
class _PayloadCoreSend1MB(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
def devnull(self, d):
|
def devnull(self, d):
|
||||||
pass
|
pass
|
||||||
|
@ -48,7 +48,7 @@ class _PayloadCoreSend1MB(EnvExperiment):
|
||||||
|
|
||||||
class _PayloadCorePrimes(EnvExperiment):
|
class _PayloadCorePrimes(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
|
|
||||||
def devnull(self, d):
|
def devnull(self, d):
|
||||||
pass
|
pass
|
||||||
|
@ -70,17 +70,17 @@ class _PayloadCorePrimes(EnvExperiment):
|
||||||
class SpeedBenchmark(EnvExperiment):
|
class SpeedBenchmark(EnvExperiment):
|
||||||
"""Speed benchmark"""
|
"""Speed benchmark"""
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_argument("mode", EnumerationValue(["Single experiment",
|
self.setattr_argument("mode", EnumerationValue(["Single experiment",
|
||||||
"With pause",
|
"With pause",
|
||||||
"With scheduler"]))
|
"With scheduler"]))
|
||||||
self.attr_argument("payload", EnumerationValue(["NOP",
|
self.setattr_argument("payload", EnumerationValue(["NOP",
|
||||||
"CoreNOP",
|
"CoreNOP",
|
||||||
"CoreSend100Ints",
|
"CoreSend100Ints",
|
||||||
"CoreSend1MB",
|
"CoreSend1MB",
|
||||||
"CorePrimes"]))
|
"CorePrimes"]))
|
||||||
self.attr_argument("nruns", NumberValue(10, min=1, max=1000))
|
self.setattr_argument("nruns", NumberValue(10, min=1, max=1000, ndecimals=0))
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("scheduler")
|
self.setattr_device("scheduler")
|
||||||
|
|
||||||
def run_with_scheduler(self):
|
def run_with_scheduler(self):
|
||||||
nruns = int(self.nruns)
|
nruns = int(self.nruns)
|
||||||
|
@ -111,9 +111,9 @@ class SpeedBenchmark(EnvExperiment):
|
||||||
self.scheduler.pause()
|
self.scheduler.pause()
|
||||||
end_time = time.monotonic()
|
end_time = time.monotonic()
|
||||||
|
|
||||||
self.set_result("benchmark_run_time",
|
self.set_dataset("benchmark_run_time",
|
||||||
(end_time-start_time)/self.nruns,
|
(end_time-start_time)/self.nruns,
|
||||||
realtime=True)
|
broadcast=True)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
if self.mode == "Single experiment":
|
if self.mode == "Single experiment":
|
||||||
|
@ -128,11 +128,11 @@ class SpeedBenchmark(EnvExperiment):
|
||||||
|
|
||||||
class _Report(EnvExperiment):
|
class _Report(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_argument("start_time")
|
self.setattr_argument("start_time")
|
||||||
self.attr_argument("nruns")
|
self.setattr_argument("nruns")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
end_time = time.monotonic()
|
end_time = time.monotonic()
|
||||||
self.set_result("benchmark_run_time",
|
self.set_dataset("benchmark_run_time",
|
||||||
(end_time-self.start_time)/self.nruns,
|
(end_time-self.start_time)/self.nruns,
|
||||||
realtime=True)
|
broadcast=True)
|
||||||
|
|
|
@ -31,9 +31,9 @@ class TDR(EnvExperiment):
|
||||||
This is also equivalent to a loopback tester or a delay measurement.
|
This is also equivalent to a loopback tester or a delay measurement.
|
||||||
"""
|
"""
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("pmt0")
|
self.setattr_device("pmt0")
|
||||||
self.attr_device("ttl2")
|
self.setattr_device("ttl2")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
n = 1000 # repetitions
|
n = 1000 # repetitions
|
||||||
|
|
|
@ -16,16 +16,16 @@ class Transport(EnvExperiment):
|
||||||
"""Transport"""
|
"""Transport"""
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("bd")
|
self.setattr_device("bd")
|
||||||
self.attr_device("bdd")
|
self.setattr_device("bdd")
|
||||||
self.attr_device("pmt")
|
self.setattr_device("pmt")
|
||||||
self.attr_device("electrodes")
|
self.setattr_device("electrodes")
|
||||||
|
|
||||||
self.attr_argument("wait_at_stop", FreeValue(100*us))
|
self.setattr_argument("wait_at_stop", FreeValue(100*us))
|
||||||
self.attr_argument("speed", FreeValue(1.5))
|
self.setattr_argument("speed", FreeValue(1.5))
|
||||||
self.attr_argument("repeats", FreeValue(100))
|
self.setattr_argument("repeats", FreeValue(100))
|
||||||
self.attr_argument("nbins", FreeValue(100))
|
self.setattr_argument("nbins", FreeValue(100))
|
||||||
|
|
||||||
def calc_waveforms(self, stop):
|
def calc_waveforms(self, stop):
|
||||||
t = transport_data["t"][:stop]*self.speed
|
t = transport_data["t"][:stop]*self.speed
|
||||||
|
|
|
@ -5,16 +5,16 @@ class AluminumSpectroscopy(EnvExperiment):
|
||||||
"""Aluminum spectroscopy (simulation)"""
|
"""Aluminum spectroscopy (simulation)"""
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
self.attr_device("mains_sync")
|
self.setattr_device("mains_sync")
|
||||||
self.attr_device("laser_cooling")
|
self.setattr_device("laser_cooling")
|
||||||
self.attr_device("spectroscopy")
|
self.setattr_device("spectroscopy")
|
||||||
self.attr_device("spectroscopy_b")
|
self.setattr_device("spectroscopy_b")
|
||||||
self.attr_device("state_detection")
|
self.setattr_device("state_detection")
|
||||||
self.attr_device("pmt")
|
self.setattr_device("pmt")
|
||||||
self.attr_parameter("spectroscopy_freq", 432*MHz)
|
self.setattr_dataset("spectroscopy_freq", 432*MHz)
|
||||||
self.attr_argument("photon_limit_low", FreeValue(10))
|
self.setattr_argument("photon_limit_low", FreeValue(10))
|
||||||
self.attr_argument("photon_limit_high", FreeValue(15))
|
self.setattr_argument("photon_limit_high", FreeValue(15))
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
|
|
@ -5,9 +5,9 @@ class SimpleSimulation(EnvExperiment):
|
||||||
"""Simple simulation"""
|
"""Simple simulation"""
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
self.attr_device("core")
|
self.setattr_device("core")
|
||||||
for wo in "abcd":
|
for wo in "abcd":
|
||||||
self.attr_device(wo)
|
self.setattr_device(wo)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def run(self):
|
def run(self):
|
||||||
|
|
4
setup.py
4
setup.py
|
@ -5,8 +5,8 @@ import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info[:3] < (3, 4, 3):
|
if sys.version_info[:3] < (3, 5, 0):
|
||||||
raise Exception("You need at least Python 3.4.3 to run ARTIQ")
|
raise Exception("You need Python 3.5.0+")
|
||||||
|
|
||||||
|
|
||||||
class PushDocCommand(Command):
|
class PushDocCommand(Command):
|
||||||
|
|
Loading…
Reference in New Issue