forked from M-Labs/artiq
Merge remote-tracking branch 'origin/master' into new-py2llvm
This commit is contained in:
commit
c581af29d7
24
.travis.yml
24
.travis.yml
|
@ -7,37 +7,37 @@ branches:
|
||||||
sudo: false
|
sudo: false
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
- MSCDIR=$TRAVIS_BUILD_DIR/misoc
|
|
||||||
- PATH=$HOME/miniconda/bin:/usr/local/llvm-or1k/bin:$PATH
|
|
||||||
- BUILD_SOC=1
|
- BUILD_SOC=1
|
||||||
- secure: "DUk/Ihg8KbbzEgPF0qrHqlxU8e8eET9i/BtzNvFddIGX4HP/P2qz0nk3cVkmjuWhqJXSbC22RdKME9qqPzw6fJwJ6dpJ3OR6dDmSd7rewavq+niwxu52PVa+yK8mL4yf1terM7QQ5tIRf+yUL9qGKrZ2xyvEuRit6d4cFep43Ws="
|
- secure: "DUk/Ihg8KbbzEgPF0qrHqlxU8e8eET9i/BtzNvFddIGX4HP/P2qz0nk3cVkmjuWhqJXSbC22RdKME9qqPzw6fJwJ6dpJ3OR6dDmSd7rewavq+niwxu52PVa+yK8mL4yf1terM7QQ5tIRf+yUL9qGKrZ2xyvEuRit6d4cFep43Ws="
|
||||||
before_install:
|
before_install:
|
||||||
- mkdir -p $HOME/.mlabs
|
- mkdir -p $HOME/.mlabs
|
||||||
- if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=0; fi
|
- if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=0; fi
|
||||||
- . ./.travis/get-toolchain.sh
|
|
||||||
- if [ $BUILD_SOC -ne 0 ]; then ./.travis/get-xilinx.sh; fi
|
- if [ $BUILD_SOC -ne 0 ]; then ./.travis/get-xilinx.sh; fi
|
||||||
- ./.travis/get-anaconda.sh
|
- . ./.travis/get-toolchain.sh
|
||||||
|
- . ./.travis/get-anaconda.sh
|
||||||
- source $HOME/miniconda/bin/activate py34
|
- source $HOME/miniconda/bin/activate py34
|
||||||
- conda install pip coverage binstar migen cython
|
- conda install -q pip coverage binstar migen cython
|
||||||
- pip install coveralls
|
- pip install coveralls
|
||||||
install:
|
install:
|
||||||
- conda build conda/artiq
|
- conda build conda/artiq
|
||||||
- conda install $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2
|
- conda install -q artiq --use-local
|
||||||
script:
|
script:
|
||||||
- coverage run --source=artiq setup.py test
|
- coverage run --source=artiq setup.py test
|
||||||
- make -C doc/manual html
|
- make -C doc/manual html
|
||||||
after_success:
|
after_success:
|
||||||
- binstar login --hostname $(hostname) --username $binstar_login --password $binstar_password
|
- binstar -q login --hostname $(hostname) --username $binstar_login --password $binstar_password
|
||||||
- binstar upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2
|
- binstar -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2
|
||||||
|
- binstar -q logout
|
||||||
- coveralls
|
- coveralls
|
||||||
notifications:
|
notifications:
|
||||||
email: false
|
email:
|
||||||
|
recipients:
|
||||||
|
- rjordens@nist.gov
|
||||||
|
on_success: always
|
||||||
|
on_failure: never
|
||||||
irc:
|
irc:
|
||||||
channels:
|
channels:
|
||||||
- chat.freenode.net#m-labs
|
- chat.freenode.net#m-labs
|
||||||
template:
|
template:
|
||||||
- "%{repository}#%{build_number} (%{branch} - %{commit} : %{author}): %{message}"
|
- "%{repository}#%{build_number} (%{branch} - %{commit} : %{author}): %{message}"
|
||||||
- "Build details : %{build_url}"
|
- "Build details : %{build_url}"
|
||||||
webhooks:
|
|
||||||
urls:
|
|
||||||
- https://webhooks.gitter.im/e/d26782523952bfa53814
|
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
wget http://repo.continuum.io/miniconda/Miniconda3-3.7.3-Linux-x86_64.sh -O miniconda.sh
|
export PATH=$HOME/miniconda/bin:$PATH
|
||||||
|
wget http://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh
|
||||||
bash miniconda.sh -b -p $HOME/miniconda
|
bash miniconda.sh -b -p $HOME/miniconda
|
||||||
hash -r
|
hash -r
|
||||||
conda config --set always_yes yes --set changeps1 no
|
conda config --set always_yes yes --set changeps1 no
|
||||||
|
@ -9,4 +10,4 @@ conda info -a
|
||||||
conda install conda-build jinja2
|
conda install conda-build jinja2
|
||||||
conda create -q -n py34 python=$TRAVIS_PYTHON_VERSION
|
conda create -q -n py34 python=$TRAVIS_PYTHON_VERSION
|
||||||
conda config --add channels fallen
|
conda config --add channels fallen
|
||||||
conda config --add channels https://conda.binstar.org/fallen/channel/dev
|
conda config --add channels https://conda.anaconda.org/fallen/channel/dev
|
||||||
|
|
|
@ -24,12 +24,13 @@ echo "$secret" | gpg --passphrase-fd 0 Xilinx.lic.gpg
|
||||||
mkdir -p ~/.Xilinx
|
mkdir -p ~/.Xilinx
|
||||||
mv Xilinx.lic ~/.Xilinx/Xilinx.lic
|
mv Xilinx.lic ~/.Xilinx/Xilinx.lic
|
||||||
|
|
||||||
# Tell mibuild where Vivado is installed
|
|
||||||
echo "MISOC_EXTRA_VIVADO_CMDLINE=\"-Ob vivado_path $HOME/Xilinx/Vivado\"" >> $HOME/.mlabs/build_settings.sh
|
|
||||||
echo "MISOC_EXTRA_ISE_CMDLINE=\"-Ob ise_path $HOME/opt/Xilinx/\"" >> $HOME/.mlabs/build_settings.sh
|
|
||||||
|
|
||||||
# Lie to Vivado by hooking the ioctl used to retrieve mac address for license verification
|
|
||||||
git clone https://github.com/fallen/impersonate_macaddress
|
git clone https://github.com/fallen/impersonate_macaddress
|
||||||
make -C impersonate_macaddress
|
make -C impersonate_macaddress
|
||||||
echo "export MACADDR=$macaddress" >> $HOME/.mlabs/build_settings.sh
|
# Tell mibuild where Xilinx toolchains are installed
|
||||||
echo "export LD_PRELOAD=$PWD/impersonate_macaddress/impersonate_macaddress.so" >> $HOME/.mlabs/build_settings.sh
|
# and feed it the mac address corresponding to the license
|
||||||
|
cat > $HOME/.mlabs/build_settings.sh << EOF
|
||||||
|
MISOC_EXTRA_VIVADO_CMDLINE="-Ob vivado_path $HOME/Xilinx/Vivado"
|
||||||
|
MISOC_EXTRA_ISE_CMDLINE="-Ob ise_path $HOME/opt/Xilinx/"
|
||||||
|
export MACADDR=$macaddress
|
||||||
|
export LD_PRELOAD=$PWD/impersonate_macaddress/impersonate_macaddress.so
|
||||||
|
EOF
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
from artiq.language.core import *
|
from artiq import language
|
||||||
from artiq.language.experiment import Experiment
|
from artiq.language import *
|
||||||
from artiq.language.db import *
|
|
||||||
from artiq.language.units import check_unit
|
__all__ = []
|
||||||
from artiq.language.units import ps, ns, us, ms, s
|
__all__.extend(language.__all__)
|
||||||
from artiq.language.units import Hz, kHz, MHz, GHz
|
|
||||||
|
|
|
@ -1,21 +1,9 @@
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
from artiq.language.db import AutoDB
|
|
||||||
from artiq.language.units import ms
|
|
||||||
from artiq.coredevice.runtime import LinkInterface
|
|
||||||
|
|
||||||
|
class Comm:
|
||||||
class _RuntimeEnvironment(LinkInterface):
|
def __init__(self, dmgr):
|
||||||
def __init__(self):
|
pass
|
||||||
self.warmup_time = 1*ms
|
|
||||||
|
|
||||||
def emit_object(self):
|
|
||||||
return str(self.llvm_module)
|
|
||||||
|
|
||||||
|
|
||||||
class Comm(AutoDB):
|
|
||||||
def get_runtime_env(self):
|
|
||||||
return _RuntimeEnvironment()
|
|
||||||
|
|
||||||
def switch_clock(self, external):
|
def switch_clock(self, external):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -21,6 +21,11 @@ class _H2DMsgType(Enum):
|
||||||
|
|
||||||
RPC_REPLY = 6
|
RPC_REPLY = 6
|
||||||
|
|
||||||
|
FLASH_READ_REQUEST = 7
|
||||||
|
FLASH_WRITE_REQUEST = 8
|
||||||
|
FLASH_ERASE_REQUEST = 9
|
||||||
|
FLASH_REMOVE_REQUEST = 10
|
||||||
|
|
||||||
|
|
||||||
class _D2HMsgType(Enum):
|
class _D2HMsgType(Enum):
|
||||||
LOG_REPLY = 1
|
LOG_REPLY = 1
|
||||||
|
@ -37,6 +42,10 @@ class _D2HMsgType(Enum):
|
||||||
|
|
||||||
RPC_REQUEST = 10
|
RPC_REQUEST = 10
|
||||||
|
|
||||||
|
FLASH_READ_REPLY = 11
|
||||||
|
FLASH_OK_REPLY = 12
|
||||||
|
FLASH_ERROR_REPLY = 13
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedDevice(Exception):
|
class UnsupportedDevice(Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -86,7 +95,12 @@ class CommGeneric:
|
||||||
def _write_header(self, length, ty):
|
def _write_header(self, length, ty):
|
||||||
self.open()
|
self.open()
|
||||||
logger.debug("sending message: type=%r length=%d", ty, length)
|
logger.debug("sending message: type=%r length=%d", ty, length)
|
||||||
self.write(struct.pack(">llB", 0x5a5a5a5a, length, ty.value))
|
self.write(struct.pack(">ll", 0x5a5a5a5a, length))
|
||||||
|
if ty is not None:
|
||||||
|
self.write(struct.pack("B", ty.value))
|
||||||
|
|
||||||
|
def reset_session(self):
|
||||||
|
self._write_header(0, None)
|
||||||
|
|
||||||
def check_ident(self):
|
def check_ident(self):
|
||||||
self._write_header(9, _H2DMsgType.IDENT_REQUEST)
|
self._write_header(9, _H2DMsgType.IDENT_REQUEST)
|
||||||
|
@ -116,12 +130,46 @@ class CommGeneric:
|
||||||
if ty != _D2HMsgType.LOAD_COMPLETED:
|
if ty != _D2HMsgType.LOAD_COMPLETED:
|
||||||
raise IOError("Incorrect reply from device: "+str(ty))
|
raise IOError("Incorrect reply from device: "+str(ty))
|
||||||
|
|
||||||
def run(self, kname, reset_now):
|
def run(self, kname):
|
||||||
self._write_header(len(kname) + 10, _H2DMsgType.RUN_KERNEL)
|
self._write_header(len(kname) + 9, _H2DMsgType.RUN_KERNEL)
|
||||||
self.write(struct.pack("B", reset_now))
|
|
||||||
self.write(bytes(kname, "ascii"))
|
self.write(bytes(kname, "ascii"))
|
||||||
logger.debug("running kernel: %s", kname)
|
logger.debug("running kernel: %s", kname)
|
||||||
|
|
||||||
|
def flash_storage_read(self, key):
|
||||||
|
self._write_header(9+len(key), _H2DMsgType.FLASH_READ_REQUEST)
|
||||||
|
self.write(key)
|
||||||
|
length, ty = self._read_header()
|
||||||
|
if ty != _D2HMsgType.FLASH_READ_REPLY:
|
||||||
|
raise IOError("Incorrect reply from device: {}".format(ty))
|
||||||
|
value = self.read(length - 9)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def flash_storage_write(self, key, value):
|
||||||
|
self._write_header(9+len(key)+1+len(value),
|
||||||
|
_H2DMsgType.FLASH_WRITE_REQUEST)
|
||||||
|
self.write(key)
|
||||||
|
self.write(b"\x00")
|
||||||
|
self.write(value)
|
||||||
|
_, ty = self._read_header()
|
||||||
|
if ty != _D2HMsgType.FLASH_OK_REPLY:
|
||||||
|
if ty == _D2HMsgType.FLASH_ERROR_REPLY:
|
||||||
|
raise IOError("Flash storage is full")
|
||||||
|
else:
|
||||||
|
raise IOError("Incorrect reply from device: {}".format(ty))
|
||||||
|
|
||||||
|
def flash_storage_erase(self):
|
||||||
|
self._write_header(9, _H2DMsgType.FLASH_ERASE_REQUEST)
|
||||||
|
_, ty = self._read_header()
|
||||||
|
if ty != _D2HMsgType.FLASH_OK_REPLY:
|
||||||
|
raise IOError("Incorrect reply from device: {}".format(ty))
|
||||||
|
|
||||||
|
def flash_storage_remove(self, key):
|
||||||
|
self._write_header(9+len(key), _H2DMsgType.FLASH_REMOVE_REQUEST)
|
||||||
|
self.write(key)
|
||||||
|
_, ty = self._read_header()
|
||||||
|
if ty != _D2HMsgType.FLASH_OK_REPLY:
|
||||||
|
raise IOError("Incorrect reply from device: {}".format(ty))
|
||||||
|
|
||||||
def _receive_rpc_value(self, type_tag):
|
def _receive_rpc_value(self, type_tag):
|
||||||
if type_tag == "n":
|
if type_tag == "n":
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -3,22 +3,22 @@ import serial
|
||||||
import struct
|
import struct
|
||||||
|
|
||||||
from artiq.coredevice.comm_generic import CommGeneric
|
from artiq.coredevice.comm_generic import CommGeneric
|
||||||
from artiq.language.db import *
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Comm(CommGeneric, AutoDB):
|
class Comm(CommGeneric):
|
||||||
class DBKeys:
|
def __init__(self, dmgr, serial_dev, baud_rate=115200):
|
||||||
serial_dev = Argument()
|
self.serial_dev = serial_dev
|
||||||
baud_rate = Argument(115200)
|
self.baud_rate = baud_rate
|
||||||
|
|
||||||
def open(self):
|
def open(self):
|
||||||
if hasattr(self, "port"):
|
if hasattr(self, "port"):
|
||||||
return
|
return
|
||||||
self.port = serial.serial_for_url(self.serial_dev,
|
self.port = serial.serial_for_url(self.serial_dev,
|
||||||
baudrate=self.baud_rate)
|
baudrate=self.baud_rate)
|
||||||
|
self.reset_session()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
if not hasattr(self, "port"):
|
if not hasattr(self, "port"):
|
||||||
|
|
|
@ -2,16 +2,15 @@ import logging
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
from artiq.coredevice.comm_generic import CommGeneric
|
from artiq.coredevice.comm_generic import CommGeneric
|
||||||
from artiq.language.db import *
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Comm(CommGeneric, AutoDB):
|
class Comm(CommGeneric):
|
||||||
class DBKeys:
|
def __init__(self, dmgr, host, port=1381):
|
||||||
host = Argument()
|
self.host = host
|
||||||
port = Argument(1381)
|
self.port = port
|
||||||
|
|
||||||
def open(self):
|
def open(self):
|
||||||
if hasattr(self, "socket"):
|
if hasattr(self, "socket"):
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
from artiq.language.db import *
|
|
||||||
from artiq.language.units import ns
|
from artiq.language.units import ns
|
||||||
|
|
||||||
from artiq.transforms.inline import inline
|
from artiq.transforms.inline import inline
|
||||||
from artiq.transforms.lower_units import lower_units
|
|
||||||
from artiq.transforms.quantize_time import quantize_time
|
from artiq.transforms.quantize_time import quantize_time
|
||||||
from artiq.transforms.remove_inter_assigns import remove_inter_assigns
|
from artiq.transforms.remove_inter_assigns import remove_inter_assigns
|
||||||
from artiq.transforms.fold_constants import fold_constants
|
from artiq.transforms.fold_constants import fold_constants
|
||||||
|
@ -15,7 +13,7 @@ from artiq.transforms.interleave import interleave
|
||||||
from artiq.transforms.lower_time import lower_time
|
from artiq.transforms.lower_time import lower_time
|
||||||
from artiq.transforms.unparse import unparse
|
from artiq.transforms.unparse import unparse
|
||||||
|
|
||||||
from artiq.coredevice.runtime import Environment
|
from artiq.coredevice.runtime import Runtime
|
||||||
|
|
||||||
from artiq.py2llvm import get_runtime_binary
|
from artiq.py2llvm import get_runtime_binary
|
||||||
|
|
||||||
|
@ -47,27 +45,23 @@ def _no_debug_unparse(label, node):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Core(AutoDB):
|
class Core:
|
||||||
class DBKeys:
|
def __init__(self, dmgr, ref_period=8*ns, external_clock=False):
|
||||||
comm = Device()
|
self.comm = dmgr.get("comm")
|
||||||
ref_period = Argument(8*ns)
|
self.ref_period = ref_period
|
||||||
external_clock = Argument(False)
|
self.external_clock = external_clock
|
||||||
|
|
||||||
def build(self):
|
|
||||||
self.first_run = True
|
self.first_run = True
|
||||||
self.core = self
|
self.core = self
|
||||||
self.comm.core = self
|
self.comm.core = self
|
||||||
self.runtime_env = Environment()
|
self.runtime = Runtime()
|
||||||
|
|
||||||
def transform_stack(self, func_def, rpc_map, exception_map,
|
def transform_stack(self, func_def, rpc_map, exception_map,
|
||||||
debug_unparse=_no_debug_unparse):
|
debug_unparse=_no_debug_unparse):
|
||||||
lower_units(func_def, rpc_map)
|
|
||||||
debug_unparse("lower_units", func_def)
|
|
||||||
|
|
||||||
remove_inter_assigns(func_def)
|
remove_inter_assigns(func_def)
|
||||||
debug_unparse("remove_inter_assigns_1", func_def)
|
debug_unparse("remove_inter_assigns_1", func_def)
|
||||||
|
|
||||||
quantize_time(func_def, self.ref_period.amount)
|
quantize_time(func_def, self.ref_period)
|
||||||
debug_unparse("quantize_time", func_def)
|
debug_unparse("quantize_time", func_def)
|
||||||
|
|
||||||
fold_constants(func_def)
|
fold_constants(func_def)
|
||||||
|
@ -108,7 +102,7 @@ class Core(AutoDB):
|
||||||
debug_unparse("inline", func_def)
|
debug_unparse("inline", func_def)
|
||||||
self.transform_stack(func_def, rpc_map, exception_map, debug_unparse)
|
self.transform_stack(func_def, rpc_map, exception_map, debug_unparse)
|
||||||
|
|
||||||
binary = get_runtime_binary(self.runtime_env, func_def)
|
binary = get_runtime_binary(self.runtime, func_def)
|
||||||
|
|
||||||
return binary, rpc_map, exception_map
|
return binary, rpc_map, exception_map
|
||||||
|
|
||||||
|
@ -120,15 +114,14 @@ class Core(AutoDB):
|
||||||
binary, rpc_map, exception_map = self.compile(
|
binary, rpc_map, exception_map = self.compile(
|
||||||
k_function, k_args, k_kwargs)
|
k_function, k_args, k_kwargs)
|
||||||
self.comm.load(binary)
|
self.comm.load(binary)
|
||||||
self.comm.run(k_function.__name__, self.first_run)
|
self.comm.run(k_function.__name__)
|
||||||
self.comm.serve(rpc_map, exception_map)
|
self.comm.serve(rpc_map, exception_map)
|
||||||
self.first_run = False
|
self.first_run = False
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def get_rtio_time(self):
|
def get_rtio_counter_mu(self):
|
||||||
return cycles_to_time(syscall("rtio_get_counter"))
|
return syscall("rtio_get_counter")
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def break_realtime(self):
|
def break_realtime(self):
|
||||||
t = syscall("rtio_get_counter") + 125000
|
at_mu(syscall("rtio_get_counter") + 125000)
|
||||||
at(cycles_to_time(t))
|
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
from artiq.language.db import *
|
|
||||||
from artiq.language.units import *
|
from artiq.language.units import *
|
||||||
|
|
||||||
|
|
||||||
PHASE_MODE_DEFAULT = -1
|
_PHASE_MODE_DEFAULT = -1
|
||||||
# keep in sync with dds.h
|
# keep in sync with dds.h
|
||||||
PHASE_MODE_CONTINUOUS = 0
|
PHASE_MODE_CONTINUOUS = 0
|
||||||
PHASE_MODE_ABSOLUTE = 1
|
PHASE_MODE_ABSOLUTE = 1
|
||||||
|
@ -23,21 +22,19 @@ class _BatchContextManager:
|
||||||
self.dds_bus.batch_exit()
|
self.dds_bus.batch_exit()
|
||||||
|
|
||||||
|
|
||||||
class DDSBus(AutoDB):
|
class DDSBus:
|
||||||
"""Core device Direct Digital Synthesis (DDS) bus batching driver.
|
"""Core device Direct Digital Synthesis (DDS) bus batching driver.
|
||||||
|
|
||||||
Manages batching of DDS commands on a DDS shared bus."""
|
Manages batching of DDS commands on a DDS shared bus."""
|
||||||
class DBKeys:
|
def __init__(self, dmgr):
|
||||||
core = Device()
|
self.core = dmgr.get("core")
|
||||||
|
|
||||||
def build(self):
|
|
||||||
self.batch = _BatchContextManager(self)
|
self.batch = _BatchContextManager(self)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def batch_enter(self):
|
def batch_enter(self):
|
||||||
"""Starts a DDS command batch. All DDS commands are buffered
|
"""Starts a DDS command batch. All DDS commands are buffered
|
||||||
after this call, until ``batch_exit`` is called."""
|
after this call, until ``batch_exit`` is called."""
|
||||||
syscall("dds_batch_enter", time_to_cycles(now()))
|
syscall("dds_batch_enter", now_mu())
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def batch_exit(self):
|
def batch_exit(self):
|
||||||
|
@ -46,21 +43,21 @@ class DDSBus(AutoDB):
|
||||||
syscall("dds_batch_exit")
|
syscall("dds_batch_exit")
|
||||||
|
|
||||||
|
|
||||||
class DDS(AutoDB):
|
class _DDSGeneric:
|
||||||
"""Core device Direct Digital Synthesis (DDS) driver.
|
"""Core device Direct Digital Synthesis (DDS) driver.
|
||||||
|
|
||||||
Controls one DDS channel managed directly by the core device's runtime.
|
Controls one DDS channel managed directly by the core device's runtime.
|
||||||
|
|
||||||
:param dds_sysclk: DDS system frequency, used for computing the frequency
|
This class should not be used directly, instead, use the chip-specific
|
||||||
tuning words.
|
drivers such as ``AD9858`` and ``AD9914``.
|
||||||
|
|
||||||
|
:param sysclk: DDS system frequency.
|
||||||
:param channel: channel number of the DDS device to control.
|
:param channel: channel number of the DDS device to control.
|
||||||
"""
|
"""
|
||||||
class DBKeys:
|
def __init__(self, dmgr, sysclk, channel):
|
||||||
core = Device()
|
self.core = dmgr.get("core")
|
||||||
dds_sysclk = Argument(1*GHz)
|
self.sysclk = sysclk
|
||||||
channel = Argument()
|
self.channel = channel
|
||||||
|
|
||||||
def build(self):
|
|
||||||
self.phase_mode = PHASE_MODE_CONTINUOUS
|
self.phase_mode = PHASE_MODE_CONTINUOUS
|
||||||
|
|
||||||
@portable
|
@portable
|
||||||
|
@ -68,21 +65,33 @@ class DDS(AutoDB):
|
||||||
"""Returns the frequency tuning word corresponding to the given
|
"""Returns the frequency tuning word corresponding to the given
|
||||||
frequency.
|
frequency.
|
||||||
"""
|
"""
|
||||||
return round(2**32*frequency/self.dds_sysclk)
|
return round(2**32*frequency/self.sysclk)
|
||||||
|
|
||||||
@portable
|
@portable
|
||||||
def ftw_to_frequency(self, ftw):
|
def ftw_to_frequency(self, ftw):
|
||||||
"""Returns the frequency corresponding to the given frequency tuning
|
"""Returns the frequency corresponding to the given frequency tuning
|
||||||
word.
|
word.
|
||||||
"""
|
"""
|
||||||
return ftw*self.dds_sysclk/2**32
|
return ftw*self.sysclk/2**32
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def turns_to_pow(self, turns):
|
||||||
|
"""Returns the phase offset word corresponding to the given phase
|
||||||
|
in turns."""
|
||||||
|
return round(turns*2**self.pow_width)
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def pow_to_turns(self, pow):
|
||||||
|
"""Returns the phase in turns corresponding to the given phase offset
|
||||||
|
word."""
|
||||||
|
return pow/2**self.pow_width
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def init(self):
|
def init(self):
|
||||||
"""Resets and initializes the DDS channel.
|
"""Resets and initializes the DDS channel.
|
||||||
|
|
||||||
The runtime does this for all channels upon core device startup."""
|
The runtime does this for all channels upon core device startup."""
|
||||||
syscall("dds_init", time_to_cycles(now()), self.channel)
|
syscall("dds_init", now_mu(), self.channel)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def set_phase_mode(self, phase_mode):
|
def set_phase_mode(self, phase_mode):
|
||||||
|
@ -105,17 +114,37 @@ class DDS(AutoDB):
|
||||||
self.phase_mode = phase_mode
|
self.phase_mode = phase_mode
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def set(self, frequency, phase_mode=PHASE_MODE_DEFAULT, phase_offset=0):
|
def set_mu(self, frequency, phase=0, phase_mode=_PHASE_MODE_DEFAULT):
|
||||||
"""Sets the DDS channel to the specified frequency and phase.
|
"""Sets the DDS channel to the specified frequency and phase.
|
||||||
|
|
||||||
|
This uses machine units (FTW and POW). The frequency tuning word width
|
||||||
|
is 32, whereas the phase offset word width depends on the type of DDS
|
||||||
|
chip and can be retrieved via the ``pow_width`` attribute.
|
||||||
|
|
||||||
:param frequency: frequency to generate.
|
:param frequency: frequency to generate.
|
||||||
|
:param phase: adds an offset, in turns, to the phase.
|
||||||
:param phase_mode: if specified, overrides the default phase mode set
|
:param phase_mode: if specified, overrides the default phase mode set
|
||||||
by ``set_phase_mode`` for this call.
|
by ``set_phase_mode`` for this call.
|
||||||
:param phase_offset: adds an offset, in turns, to the phase.
|
|
||||||
"""
|
"""
|
||||||
if phase_mode == PHASE_MODE_DEFAULT:
|
if phase_mode == _PHASE_MODE_DEFAULT:
|
||||||
phase_mode = self.phase_mode
|
phase_mode = self.phase_mode
|
||||||
|
syscall("dds_set", now_mu(), self.channel,
|
||||||
|
frequency, round(phase*2**self.pow_width), phase_mode)
|
||||||
|
|
||||||
syscall("dds_set", time_to_cycles(now()), self.channel,
|
@kernel
|
||||||
self.frequency_to_ftw(frequency), round(phase_offset*2**14),
|
def set(self, frequency, phase=0, phase_mode=_PHASE_MODE_DEFAULT):
|
||||||
self.phase_mode)
|
"""Like ``set_mu``, but uses Hz and turns."""
|
||||||
|
self.set_mu(self.frequency_to_ftw(frequency),
|
||||||
|
self.turns_to_pow(phase), phase_mode)
|
||||||
|
|
||||||
|
|
||||||
|
class AD9858(_DDSGeneric):
|
||||||
|
"""Driver for AD9858 DDS chips. See ``_DDSGeneric`` for a description
|
||||||
|
of the functionality."""
|
||||||
|
pow_width = 14
|
||||||
|
|
||||||
|
|
||||||
|
class AD9914(_DDSGeneric):
|
||||||
|
"""Driver for AD9914 DDS chips. See ``_DDSGeneric`` for a description
|
||||||
|
of the functionality."""
|
||||||
|
pow_width = 16
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import llvmlite.ir as ll
|
import llvmlite_or1k.ir as ll
|
||||||
import llvmlite.binding as llvm
|
import llvmlite_or1k.binding as llvm
|
||||||
|
|
||||||
from artiq.py2llvm import base_types, fractions, lists
|
from artiq.py2llvm import base_types, fractions, lists
|
||||||
from artiq.language import units
|
from artiq.language import units
|
||||||
|
@ -21,6 +21,7 @@ _syscalls = {
|
||||||
"ttl_set_oe": "Iib:n",
|
"ttl_set_oe": "Iib:n",
|
||||||
"ttl_set_sensitivity": "Iii:n",
|
"ttl_set_sensitivity": "Iii:n",
|
||||||
"ttl_get": "iI:I",
|
"ttl_get": "iI:I",
|
||||||
|
"ttl_clock_set": "Iii:n",
|
||||||
"dds_init": "Ii:n",
|
"dds_init": "Ii:n",
|
||||||
"dds_batch_enter": "I:n",
|
"dds_batch_enter": "I:n",
|
||||||
"dds_batch_exit": "n:n",
|
"dds_batch_exit": "n:n",
|
||||||
|
@ -195,7 +196,7 @@ def _debug_dump_obj(obj):
|
||||||
raise IOError
|
raise IOError
|
||||||
|
|
||||||
|
|
||||||
class Environment(LinkInterface):
|
class Runtime(LinkInterface):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.cpu_type = "or1k"
|
self.cpu_type = "or1k"
|
||||||
# allow 1ms for all initial DDS programming
|
# allow 1ms for all initial DDS programming
|
||||||
|
@ -208,4 +209,4 @@ class Environment(LinkInterface):
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "<Environment {}>".format(self.cpu_type)
|
return "<Runtime {}>".format(self.cpu_type)
|
||||||
|
|
|
@ -1,49 +1,7 @@
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
from artiq.language.db import *
|
|
||||||
|
|
||||||
|
|
||||||
class LLTTLOut(AutoDB):
|
class TTLOut:
|
||||||
"""Low-level RTIO TTL output driver.
|
|
||||||
|
|
||||||
Allows setting RTIO TTL outputs at arbitrary times, without time
|
|
||||||
unit conversion.
|
|
||||||
|
|
||||||
This is meant to be used mostly in drivers; consider using
|
|
||||||
``TTLOut`` instead.
|
|
||||||
|
|
||||||
This should be used with output-only channels.
|
|
||||||
"""
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
channel = Argument()
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def set_o(self, t, value):
|
|
||||||
"""Sets the output value of the RTIO channel.
|
|
||||||
|
|
||||||
:param t: timestamp in RTIO cycles (64-bit integer).
|
|
||||||
:param value: value to set at the output.
|
|
||||||
"""
|
|
||||||
syscall("ttl_set_o", t, self.channel, value)
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def on(self, t):
|
|
||||||
"""Turns the RTIO channel on.
|
|
||||||
|
|
||||||
:param t: timestamp in RTIO cycles (64-bit integer).
|
|
||||||
"""
|
|
||||||
self.set_o(t, True)
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def off(self, t):
|
|
||||||
"""Turns the RTIO channel off.
|
|
||||||
|
|
||||||
:param t: timestamp in RTIO cycles (64-bit integer).
|
|
||||||
"""
|
|
||||||
self.set_o(t, False)
|
|
||||||
|
|
||||||
|
|
||||||
class TTLOut(AutoDB):
|
|
||||||
"""RTIO TTL output driver.
|
"""RTIO TTL output driver.
|
||||||
|
|
||||||
This should be used with output-only channels.
|
This should be used with output-only channels.
|
||||||
|
@ -51,45 +9,53 @@ class TTLOut(AutoDB):
|
||||||
:param core: core device
|
:param core: core device
|
||||||
:param channel: channel number
|
:param channel: channel number
|
||||||
"""
|
"""
|
||||||
class DBKeys:
|
def __init__(self, dmgr, channel):
|
||||||
core = Device()
|
self.core = dmgr.get("core")
|
||||||
channel = Argument()
|
self.channel = channel
|
||||||
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
# in RTIO cycles
|
# in RTIO cycles
|
||||||
self.o_previous_timestamp = int64(0)
|
self.o_previous_timestamp = int64(0)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def _set_o(self, o):
|
def set_o(self, o):
|
||||||
syscall("ttl_set_o", time_to_cycles(now()), self.channel, o)
|
syscall("ttl_set_o", now_mu(), self.channel, o)
|
||||||
self.o_previous_timestamp = time_to_cycles(now())
|
self.o_previous_timestamp = now_mu()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def sync(self):
|
def sync(self):
|
||||||
"""Busy-waits until all programmed level switches have been effected."""
|
"""Busy-wait until all programmed level switches have been
|
||||||
|
effected."""
|
||||||
while syscall("rtio_get_counter") < self.o_previous_timestamp:
|
while syscall("rtio_get_counter") < self.o_previous_timestamp:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def on(self):
|
def on(self):
|
||||||
"""Sets the output to a logic high state."""
|
"""Sets the output to a logic high state."""
|
||||||
self._set_o(True)
|
self.set_o(True)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def off(self):
|
def off(self):
|
||||||
"""Sets the output to a logic low state."""
|
"""Set the output to a logic low state."""
|
||||||
self._set_o(False)
|
self.set_o(False)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def pulse_mu(self, duration):
|
||||||
|
"""Pulse the output high for the specified duration
|
||||||
|
(in machine units)."""
|
||||||
|
self.on()
|
||||||
|
delay_mu(duration)
|
||||||
|
self.off()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def pulse(self, duration):
|
def pulse(self, duration):
|
||||||
"""Pulses the output high for the specified duration."""
|
"""Pulse the output high for the specified duration
|
||||||
|
(in seconds)."""
|
||||||
self.on()
|
self.on()
|
||||||
delay(duration)
|
delay(duration)
|
||||||
self.off()
|
self.off()
|
||||||
|
|
||||||
|
|
||||||
class TTLInOut(AutoDB):
|
class TTLInOut:
|
||||||
"""RTIO TTL input/output driver.
|
"""RTIO TTL input/output driver.
|
||||||
|
|
||||||
In output mode, provides functions to set the logic level on the signal.
|
In output mode, provides functions to set the logic level on the signal.
|
||||||
|
@ -107,78 +73,113 @@ class TTLInOut(AutoDB):
|
||||||
:param core: core device
|
:param core: core device
|
||||||
:param channel: channel number
|
:param channel: channel number
|
||||||
"""
|
"""
|
||||||
class DBKeys:
|
def __init__(self, dmgr, channel):
|
||||||
core = Device()
|
self.core = dmgr.get("core")
|
||||||
channel = Argument()
|
self.channel = channel
|
||||||
|
|
||||||
def build(self):
|
|
||||||
# in RTIO cycles
|
# in RTIO cycles
|
||||||
self.o_previous_timestamp = int64(0)
|
self.o_previous_timestamp = int64(0)
|
||||||
self.i_previous_timestamp = int64(0)
|
self.i_previous_timestamp = int64(0)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def _set_oe(self, oe):
|
def set_oe(self, oe):
|
||||||
syscall("ttl_set_oe", time_to_cycles(now()), self.channel, oe)
|
syscall("ttl_set_oe", now_mu(), self.channel, oe)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def output(self):
|
def output(self):
|
||||||
self._set_oe(True)
|
self.set_oe(True)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def input(self):
|
def input(self):
|
||||||
self._set_oe(False)
|
self.set_oe(False)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def _set_o(self, o):
|
def set_o(self, o):
|
||||||
syscall("ttl_set_o", time_to_cycles(now()), self.channel, o)
|
syscall("ttl_set_o", now_mu(), self.channel, o)
|
||||||
self.o_previous_timestamp = time_to_cycles(now())
|
self.o_previous_timestamp = now_mu()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def sync(self):
|
def sync(self):
|
||||||
"""Busy-waits until all programmed level switches have been effected."""
|
"""Busy-wait until all programmed level switches have been
|
||||||
|
effected."""
|
||||||
while syscall("rtio_get_counter") < self.o_previous_timestamp:
|
while syscall("rtio_get_counter") < self.o_previous_timestamp:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def on(self):
|
def on(self):
|
||||||
"""Sets the output to a logic high state."""
|
"""Set the output to a logic high state."""
|
||||||
self._set_o(True)
|
self.set_o(True)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def off(self):
|
def off(self):
|
||||||
"""Sets the output to a logic low state."""
|
"""Set the output to a logic low state."""
|
||||||
self._set_o(False)
|
self.set_o(False)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def pulse_mu(self, duration):
|
||||||
|
"""Pulses the output high for the specified duration
|
||||||
|
(in machine units)."""
|
||||||
|
self.on()
|
||||||
|
delay_mu(duration)
|
||||||
|
self.off()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def pulse(self, duration):
|
def pulse(self, duration):
|
||||||
"""Pulses the output high for the specified duration."""
|
"""Pulses the output high for the specified duration
|
||||||
|
(in seconds)."""
|
||||||
self.on()
|
self.on()
|
||||||
delay(duration)
|
delay(duration)
|
||||||
self.off()
|
self.off()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def _set_sensitivity(self, value):
|
def _set_sensitivity(self, value):
|
||||||
syscall("ttl_set_sensitivity", time_to_cycles(now()), self.channel, value)
|
syscall("ttl_set_sensitivity", now_mu(), self.channel, value)
|
||||||
self.i_previous_timestamp = time_to_cycles(now())
|
self.i_previous_timestamp = now_mu()
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def gate_rising_mu(self, duration):
|
||||||
|
"""Register rising edge events for the specified duration
|
||||||
|
(in machine units)."""
|
||||||
|
self._set_sensitivity(1)
|
||||||
|
delay_mu(duration)
|
||||||
|
self._set_sensitivity(0)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def gate_falling_mu(self, duration):
|
||||||
|
"""Register falling edge events for the specified duration
|
||||||
|
(in machine units)."""
|
||||||
|
self._set_sensitivity(2)
|
||||||
|
delay_mu(duration)
|
||||||
|
self._set_sensitivity(0)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def gate_both_mu(self, duration):
|
||||||
|
"""Register both rising and falling edge events for the specified
|
||||||
|
duration (in machine units)."""
|
||||||
|
self._set_sensitivity(3)
|
||||||
|
delay_mu(duration)
|
||||||
|
self._set_sensitivity(0)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def gate_rising(self, duration):
|
def gate_rising(self, duration):
|
||||||
"""Register rising edge events for the specified duration."""
|
"""Register rising edge events for the specified duration
|
||||||
|
(in seconds)."""
|
||||||
self._set_sensitivity(1)
|
self._set_sensitivity(1)
|
||||||
delay(duration)
|
delay(duration)
|
||||||
self._set_sensitivity(0)
|
self._set_sensitivity(0)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def gate_falling(self, duration):
|
def gate_falling(self, duration):
|
||||||
"""Register falling edge events for the specified duration."""
|
"""Register falling edge events for the specified duration
|
||||||
|
(in seconds)."""
|
||||||
self._set_sensitivity(2)
|
self._set_sensitivity(2)
|
||||||
delay(duration)
|
delay(duration)
|
||||||
self._set_sensitivity(0)
|
self._set_sensitivity(0)
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def gate_both(self, duration):
|
def gate_both_mu(self, duration):
|
||||||
"""Register both rising and falling edge events for the specified
|
"""Register both rising and falling edge events for the specified
|
||||||
duration."""
|
duration (in seconds)."""
|
||||||
self._set_sensitivity(3)
|
self._set_sensitivity(3)
|
||||||
delay(duration)
|
delay(duration)
|
||||||
self._set_sensitivity(0)
|
self._set_sensitivity(0)
|
||||||
|
@ -194,11 +195,81 @@ class TTLInOut(AutoDB):
|
||||||
return count
|
return count
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def timestamp(self):
|
def timestamp_mu(self):
|
||||||
"""Poll the RTIO input and returns an event timestamp, according to
|
"""Poll the RTIO input and returns an event timestamp, according to
|
||||||
the gating.
|
the gating.
|
||||||
|
|
||||||
If the gate is permanently closed, returns a negative value.
|
If the gate is permanently closed, returns a negative value.
|
||||||
"""
|
"""
|
||||||
return cycles_to_time(syscall("ttl_get", self.channel,
|
return syscall("ttl_get", self.channel, self.i_previous_timestamp)
|
||||||
self.i_previous_timestamp))
|
|
||||||
|
|
||||||
|
class TTLClockGen:
|
||||||
|
"""RTIO TTL clock generator driver.
|
||||||
|
|
||||||
|
This should be used with TTL channels that have a clock generator
|
||||||
|
built into the gateware (not compatible with regular TTL channels).
|
||||||
|
|
||||||
|
:param core: core device
|
||||||
|
:param channel: channel number
|
||||||
|
"""
|
||||||
|
def __init__(self, dmgr, channel):
|
||||||
|
self.core = dmgr.get("core")
|
||||||
|
self.channel = channel
|
||||||
|
|
||||||
|
def build(self):
|
||||||
|
# in RTIO cycles
|
||||||
|
self.previous_timestamp = int64(0)
|
||||||
|
self.acc_width = 24
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def frequency_to_ftw(self, frequency):
|
||||||
|
"""Returns the frequency tuning word corresponding to the given
|
||||||
|
frequency.
|
||||||
|
"""
|
||||||
|
return round(2**self.acc_width*frequency*self.core.ref_period)
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def ftw_to_frequency(self, ftw):
|
||||||
|
"""Returns the frequency corresponding to the given frequency tuning
|
||||||
|
word.
|
||||||
|
"""
|
||||||
|
return ftw/self.core.ref_period/2**self.acc_width
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def set_mu(self, frequency):
|
||||||
|
"""Set the frequency of the clock, in machine units.
|
||||||
|
|
||||||
|
This also sets the phase, as the time of the first generated rising
|
||||||
|
edge corresponds to the time of the call.
|
||||||
|
|
||||||
|
The clock generator contains a 24-bit phase accumulator operating on
|
||||||
|
the RTIO clock. At each RTIO clock tick, the frequency tuning word is
|
||||||
|
added to the phase accumulator. The most significant bit of the phase
|
||||||
|
accumulator is connected to the TTL line. Setting the frequency tuning
|
||||||
|
word has the additional effect of setting the phase accumulator to
|
||||||
|
0x800000.
|
||||||
|
|
||||||
|
Due to the way the clock generator operates, frequency tuning words
|
||||||
|
that are not powers of two cause jitter of one RTIO clock cycle at the
|
||||||
|
output.
|
||||||
|
"""
|
||||||
|
syscall("ttl_clock_set", now_mu(), self.channel, frequency)
|
||||||
|
self.previous_timestamp = now_mu()
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def set(self, frequency):
|
||||||
|
"""Like ``set_mu``, but using Hz."""
|
||||||
|
self.set_mu(self.frequency_to_ftw(frequency))
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def stop(self):
|
||||||
|
"""Stop the toggling of the clock and set the output level to 0."""
|
||||||
|
self.set_mu(0)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def sync(self):
|
||||||
|
"""Busy-wait until all programmed frequency switches and stops have
|
||||||
|
been effected."""
|
||||||
|
while syscall("rtio_get_counter") < self.o_previous_timestamp:
|
||||||
|
pass
|
||||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
||||||
import ctypes
|
import ctypes
|
||||||
import struct
|
import struct
|
||||||
|
|
||||||
from artiq.language.units import dB, check_unit, Quantity
|
from artiq.language.units import dB
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger("lda")
|
logger = logging.getLogger("lda")
|
||||||
|
@ -47,14 +47,7 @@ class Ldasim:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
step = self.get_att_step_size()
|
step = self.get_att_step_size()
|
||||||
|
att = round(attenuation/step)*step
|
||||||
if isinstance(attenuation, Quantity):
|
|
||||||
check_unit(attenuation, "dB")
|
|
||||||
att = attenuation
|
|
||||||
else:
|
|
||||||
att = attenuation*dB
|
|
||||||
|
|
||||||
att = round(att/step)*step
|
|
||||||
|
|
||||||
if att > self.get_att_max():
|
if att > self.get_att_max():
|
||||||
raise ValueError("Cannot set attenuation {} > {}"
|
raise ValueError("Cannot set attenuation {} > {}"
|
||||||
|
@ -62,7 +55,7 @@ class Ldasim:
|
||||||
elif att < 0*dB:
|
elif att < 0*dB:
|
||||||
raise ValueError("Cannot set attenuation {} < 0".format(att))
|
raise ValueError("Cannot set attenuation {} < 0".format(att))
|
||||||
else:
|
else:
|
||||||
att = round(att.amount*4)/4. * dB
|
att = round(att*4)/4. * dB
|
||||||
self._attenuation = att
|
self._attenuation = att
|
||||||
|
|
||||||
def ping(self):
|
def ping(self):
|
||||||
|
@ -117,7 +110,7 @@ class Lda:
|
||||||
self._product_ids[self.product],
|
self._product_ids[self.product],
|
||||||
self.serial)
|
self.serial)
|
||||||
if not self._dev:
|
if not self._dev:
|
||||||
raise IOError
|
raise IOError("Device not found")
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close the device."""
|
"""Close the device."""
|
||||||
|
@ -218,14 +211,7 @@ class Lda:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
step = self.get_att_step_size()
|
step = self.get_att_step_size()
|
||||||
|
att = round(attenuation/step)*step
|
||||||
if isinstance(attenuation, Quantity):
|
|
||||||
check_unit(attenuation, "dB")
|
|
||||||
att = attenuation
|
|
||||||
else:
|
|
||||||
att = attenuation*dB
|
|
||||||
|
|
||||||
att = round(att/step)*step
|
|
||||||
|
|
||||||
if att > self.get_att_max():
|
if att > self.get_att_max():
|
||||||
raise ValueError("Cannot set attenuation {} > {}"
|
raise ValueError("Cannot set attenuation {} > {}"
|
||||||
|
@ -233,7 +219,7 @@ class Lda:
|
||||||
elif att < 0*dB:
|
elif att < 0*dB:
|
||||||
raise ValueError("Cannot set attenuation {} < 0".format(att))
|
raise ValueError("Cannot set attenuation {} < 0".format(att))
|
||||||
else:
|
else:
|
||||||
self.set(0x8d, bytes([int(round(att.amount*4))]))
|
self.set(0x8d, bytes([int(round(att*4))]))
|
||||||
|
|
||||||
def ping(self):
|
def ping(self):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -15,10 +15,26 @@ class UnexpectedResponse(Exception):
|
||||||
|
|
||||||
|
|
||||||
class Novatech409B:
|
class Novatech409B:
|
||||||
"""Driver for Novatech 409B 4-channel DDS"""
|
"""Driver for Novatech 409B 4-channel DDS.
|
||||||
|
|
||||||
# maximum frequency of Novatech 409B when using PLL and external reference
|
All output channels are in range [0, 1, 2, 3].
|
||||||
max_freq_with_pll = 171.1276031
|
All frequencies are in Hz.
|
||||||
|
All phases are in turns.
|
||||||
|
All amplitudes are in volts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
error_codes = {
|
||||||
|
"?0": "Unrecognized Command",
|
||||||
|
"?1": "Bad Frequency",
|
||||||
|
"?2": "Bad AM Command",
|
||||||
|
"?3": "Input line too long",
|
||||||
|
"?4": "Bad Phase",
|
||||||
|
"?5": "Bad Time",
|
||||||
|
"?6": "Bad Mode",
|
||||||
|
"?7": "Bad Amp",
|
||||||
|
"?8": "Bad Constant",
|
||||||
|
"?f": "Bad Byte"
|
||||||
|
}
|
||||||
|
|
||||||
def __init__(self, serial_dev):
|
def __init__(self, serial_dev):
|
||||||
if serial_dev is None:
|
if serial_dev is None:
|
||||||
|
@ -32,68 +48,67 @@ class Novatech409B:
|
||||||
parity="N",
|
parity="N",
|
||||||
stopbits=1,
|
stopbits=1,
|
||||||
xonxoff=0,
|
xonxoff=0,
|
||||||
timeout=0.2)
|
timeout=1.0)
|
||||||
self.setup()
|
self.setup()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close the serial port"""
|
"""Close the serial port."""
|
||||||
if not self.simulation:
|
if not self.simulation:
|
||||||
self.port.close()
|
self.port.close()
|
||||||
|
|
||||||
def _ser_send(self, cmd, get_response=True):
|
def _ser_send(self, cmd, get_response=True):
|
||||||
"""send a string to the serial port
|
"""Send a string to the serial port."""
|
||||||
|
|
||||||
Routine for sending serial commands to device. It sends strings
|
# Low-level routine for sending serial commands to device. It sends
|
||||||
and listens for a response terminated by a carriage return.
|
# strings and listens for a response terminated by a carriage return.
|
||||||
|
# example:
|
||||||
|
# ser_send("F0 1.0") # sets the freq of channel 0 to 1.0 MHz
|
||||||
|
|
||||||
example:
|
|
||||||
ser_send("F0 1.0") # sets the freq of channel 0 to 1.0 MHz
|
|
||||||
|
|
||||||
:param cmd: a character string to send to device
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
if self.simulation:
|
if self.simulation:
|
||||||
print(cmd)
|
print(cmd)
|
||||||
else:
|
else:
|
||||||
self.port.flush()
|
self.port.flushInput()
|
||||||
self.port.write((cmd + "\r\n").encode())
|
self.port.write((cmd + "\r\n").encode())
|
||||||
|
result = self.port.readline().rstrip().decode()
|
||||||
if get_response:
|
if get_response:
|
||||||
result = self.port.readline().rstrip().decode()
|
logger.debug("got response from device: %s", result)
|
||||||
if result != "OK":
|
if result == "OK":
|
||||||
raise UnexpectedResponse(result)
|
pass
|
||||||
|
elif result == "":
|
||||||
|
raise UnexpectedResponse("Response from device timed out")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
errstr = self.error_codes[result]
|
||||||
|
except KeyError:
|
||||||
|
errstr = "Unrecognized reply: '{}'".format(result)
|
||||||
|
s = "Error Code = {ec}, {ecs}".format(ec=result, ecs=errstr)
|
||||||
|
raise UnexpectedResponse(s)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
"""command hardware reset of 409B
|
"""Hardware reset of 409B."""
|
||||||
|
|
||||||
returns: None
|
|
||||||
"""
|
|
||||||
self._ser_send("R", get_response=False)
|
self._ser_send("R", get_response=False)
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
self.setup()
|
self.setup()
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
"""initial setup of 409B
|
"""Initial setup of 409B."""
|
||||||
|
|
||||||
Setup the Novatech 409B with the following defaults.
|
# Setup the Novatech 409B with the following defaults:
|
||||||
* command echo off ("E d")
|
# * command echo off ("E d")
|
||||||
* external clock ("") 10 MHz sinusoid -1 to +7 dBm
|
# * external clock ("") 10 MHz sinusoid -1 to +7 dBm
|
||||||
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
# disable command echo
|
|
||||||
self._ser_send("E d", get_response=False)
|
self._ser_send("E d", get_response=False)
|
||||||
self.set_phase_continuous(True)
|
self.set_phase_continuous(True)
|
||||||
self.set_simultaneous_update(False)
|
self.set_simultaneous_update(False)
|
||||||
|
|
||||||
def save_state_to_eeprom(self):
|
def save_state_to_eeprom(self):
|
||||||
"""save current state to EEPROM
|
"""Save current state to EEPROM."""
|
||||||
|
|
||||||
Saves current state into EEPROM and sets valid flag.
|
|
||||||
State used as default upon next power up or reset. """
|
|
||||||
self._ser_send("S")
|
self._ser_send("S")
|
||||||
|
|
||||||
def set_phase_continuous(self, is_continuous):
|
def set_phase_continuous(self, is_continuous):
|
||||||
"""toggle phase continuous mode
|
"""Toggle phase continuous mode.
|
||||||
|
|
||||||
Sends the "M n" command. This turns off the automatic
|
Sends the "M n" command. This turns off the automatic
|
||||||
clearing of the phase register. In this mode, the phase
|
clearing of the phase register. In this mode, the phase
|
||||||
|
@ -109,7 +124,9 @@ class Novatech409B:
|
||||||
self._ser_send("M a")
|
self._ser_send("M a")
|
||||||
|
|
||||||
def set_simultaneous_update(self, simultaneous):
|
def set_simultaneous_update(self, simultaneous):
|
||||||
"""Sends the "I m" command. In this mode an update
|
"""Set simultaneous update mode.
|
||||||
|
|
||||||
|
Sends the "I m" command. In this mode an update
|
||||||
pulse will not be sent to the DDS chip until
|
pulse will not be sent to the DDS chip until
|
||||||
an "I p" command is sent. This is useful when it is
|
an "I p" command is sent. This is useful when it is
|
||||||
important to change all the outputs to new values
|
important to change all the outputs to new values
|
||||||
|
@ -121,140 +138,75 @@ class Novatech409B:
|
||||||
self._ser_send("I a")
|
self._ser_send("I a")
|
||||||
|
|
||||||
def set_freq(self, ch_no, freq):
|
def set_freq(self, ch_no, freq):
|
||||||
"""set_freq(ch_no,freq):
|
"""Set frequency of one channel."""
|
||||||
Set ch_no to frequency freq MHz"""
|
|
||||||
if ch_no < 0 or ch_no > 3:
|
|
||||||
raise ValueError("Incorrect channel number {}".format(ch_no))
|
|
||||||
if freq < 0.0 or freq > self.max_freq_with_pll:
|
|
||||||
raise ValueError("Incorrect frequency {}".format(freq))
|
|
||||||
# do this immediately, disable SimultaneousUpdate mode
|
|
||||||
self.set_simultaneous_update(False)
|
self.set_simultaneous_update(False)
|
||||||
self._ser_send("F{:d} {:f}".format(ch_no, freq))
|
# Novatech expects MHz
|
||||||
|
self._ser_send("F{:d} {:f}".format(ch_no, freq/1e6))
|
||||||
|
|
||||||
def set_phase(self, ch_no, phase):
|
def set_phase(self, ch_no, phase):
|
||||||
"""set DDS phase
|
"""Set phase of one channel."""
|
||||||
|
|
||||||
:param ch_no: 0 to 3
|
|
||||||
:param phase: phase angle in cycles [0, 1]
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
if ch_no < 0 or ch_no > 3:
|
|
||||||
raise ValueError("Incorrect channel number {}".format(ch_no))
|
|
||||||
if phase < 0 or phase > 1:
|
|
||||||
raise ValueError("Incorrect phase {}".format(phase))
|
|
||||||
# do this immediately, disable SimultaneousUpdate mode
|
# do this immediately, disable SimultaneousUpdate mode
|
||||||
self.set_simultaneous_update(False)
|
self.set_simultaneous_update(False)
|
||||||
# phase word is required by device
|
# phase word is required by device
|
||||||
# N is an integer from 0 to 16383. Phase is set to
|
# N is an integer from 0 to 16383. Phase is set to
|
||||||
# N*360/16384 deg; in ARTIQ represent phase in cycles [0, 1]
|
# N*360/16384 deg; in ARTIQ represent phase in cycles [0, 1]
|
||||||
phase_word = round(phase*16384)
|
phase_word = round(phase*16383)
|
||||||
if phase_word >= 16384:
|
|
||||||
phase_word -= 16384
|
|
||||||
cmd = "P{:d} {:d}".format(ch_no, phase_word)
|
cmd = "P{:d} {:d}".format(ch_no, phase_word)
|
||||||
self._ser_send(cmd)
|
self._ser_send(cmd)
|
||||||
|
|
||||||
def set_freq_all_phase_continuous(self, freq):
|
def set_freq_all_phase_continuous(self, freq):
|
||||||
"""set frequency of all channels simultaneously
|
"""Set frequency of all channels simultaneously.
|
||||||
|
|
||||||
Set frequency of all channels simultaneously.
|
Set frequency of all channels simultaneously.
|
||||||
1) all DDSs are set to phase continuous mode
|
1) all DDSs are set to phase continuous mode
|
||||||
2) all DDSs are simultaneously set to new frequency
|
2) all DDSs are simultaneously set to new frequency
|
||||||
Together 1 and 2 ensure phase continuous frequency switching.
|
Together 1 and 2 ensure phase continuous frequency switching.
|
||||||
|
|
||||||
:param freq: frequency in MHz
|
|
||||||
:returns: None
|
|
||||||
"""
|
"""
|
||||||
self.set_simultaneous_update(True)
|
self.set_simultaneous_update(True)
|
||||||
self.set_phase_continuous(True)
|
self.set_phase_continuous(True)
|
||||||
for channel_num in range(4):
|
for i in range(4):
|
||||||
self.set_freq(channel_num, freq)
|
self.set_freq(i, freq)
|
||||||
# send command necessary to update all channels at the same time
|
# send command necessary to update all channels at the same time
|
||||||
self._ser_send("I p")
|
self._ser_send("I p")
|
||||||
|
|
||||||
def set_phase_all(self, phase):
|
def set_phase_all(self, phase):
|
||||||
"""set phase of all DDS channels simultaneously
|
"""Set phase of all channels simultaneously."""
|
||||||
|
|
||||||
Set phase of all DDS channels at the same time. For example,::
|
|
||||||
set_phase_all([0, .25, 0.5, 0.75])
|
|
||||||
|
|
||||||
:param phase: vector of four phases (in cycles [0, 1])
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
self.set_simultaneous_update(True)
|
self.set_simultaneous_update(True)
|
||||||
# Note that this only works if the continuous
|
# Note that this only works if the continuous
|
||||||
# phase switching is turned off.
|
# phase switching is turned off.
|
||||||
self.set_phase_continuous(False)
|
self.set_phase_continuous(False)
|
||||||
for ch_no in range(4):
|
for i in range(4):
|
||||||
self.set_phase(ch_no, phase[ch_no])
|
self.set_phase(i, phase)
|
||||||
# send command necessary to update all channels at the same time
|
# send command necessary to update all channels at the same time
|
||||||
self._ser_send("I p")
|
self._ser_send("I p")
|
||||||
|
|
||||||
def freq_sweep_all_phase_continuous(self, f0, f1, t):
|
def set_gain(self, ch_no, volts):
|
||||||
""" sweep phase of all DDSs, phase continuous
|
"""Set amplitude of one channel."""
|
||||||
|
|
||||||
Sweep frequency in a phase continuous fashion.
|
# due to error in Novatech it doesn't generate an error for
|
||||||
|
# dac_value>1024, so need to trap.
|
||||||
|
dac_value = int(math.floor(volts/0.51*1024))
|
||||||
|
if dac_value < 0 or dac_value > 1023:
|
||||||
|
s = "Amplitude out of range {v}".format(v=volts)
|
||||||
|
raise ValueError(s)
|
||||||
|
|
||||||
:param f0: starting frequency (MHz)
|
|
||||||
:param f1: ending frequency (MHz)
|
|
||||||
:param t: sweep duration (seconds)
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
# TODO: consider using artiq.language.units
|
|
||||||
if f0 == f1:
|
|
||||||
return
|
|
||||||
# get sign of sweep
|
|
||||||
if f1 > f0:
|
|
||||||
df_sign = 1
|
|
||||||
else:
|
|
||||||
df_sign = -1
|
|
||||||
|
|
||||||
self.set_phase_continuous(True)
|
|
||||||
self.set_simultaneous_update(True)
|
|
||||||
# calculate delay
|
|
||||||
# note that a single call to self.set_freq_all_phase_continuous()
|
|
||||||
# takes time t_for_one_freq_set; fix duration empirically
|
|
||||||
t_for_one_freq_set = 0.264
|
|
||||||
dt = t_for_one_freq_set
|
|
||||||
n_steps = int(math.ceil(t/dt))
|
|
||||||
df = abs(f0-f1)/n_steps
|
|
||||||
for n in range(n_steps):
|
|
||||||
fnow = f0+n*df_sign*df
|
|
||||||
self.set_freq_all_phase_continuous(fnow)
|
|
||||||
self.set_freq_all_phase_continuous(f1)
|
|
||||||
|
|
||||||
def output_scale(self, ch_no, frac):
|
|
||||||
"""changes amplitude of a DDS
|
|
||||||
|
|
||||||
:param ch_no: DDS channel 0, 1, 2 or 3
|
|
||||||
:param frac: 0 to 1 (full attenuation to no attenuation)
|
|
||||||
:returns: None
|
|
||||||
"""
|
|
||||||
self.set_simultaneous_update(False)
|
self.set_simultaneous_update(False)
|
||||||
dac_ch_no = int(math.floor(frac*1024))
|
s = "V{:d} {:d}".format(ch_no, dac_value)
|
||||||
s = "V{:d} {:d}".format(ch_no, dac_ch_no)
|
|
||||||
self._ser_send(s)
|
self._ser_send(s)
|
||||||
|
|
||||||
def output_scale_all(self, frac):
|
def get_status(self):
|
||||||
"""changes amplitude of all DDSs
|
if self.simulation:
|
||||||
|
return ["00989680 2000 01F5 0000 00000000 00000000 000301",
|
||||||
:param frac: 0 to 1 (full attenuation to no attenuation)
|
"00989680 2000 01F5 0000 00000000 00000000 000301",
|
||||||
"""
|
"00989680 2000 01F5 0000 00000000 00000000 000301",
|
||||||
for ch_no in range(4):
|
"00989680 2000 01F5 0000 00000000 00000000 000301",
|
||||||
self.output_scale(ch_no, frac)
|
"80 BC0000 0000 0102 21"]
|
||||||
|
|
||||||
def output_on_off(self, ch_no, on):
|
|
||||||
"""turns on or off the DDS
|
|
||||||
|
|
||||||
:param ch_no: DDS channel 0, 1, 2 or 3
|
|
||||||
"""
|
|
||||||
if on:
|
|
||||||
self.output_scale(ch_no, 1.0)
|
|
||||||
else:
|
else:
|
||||||
self.output_scale(ch_no, 0.0)
|
# status message is multi-line
|
||||||
|
self.port.flushInput()
|
||||||
def output_on_off_all(self, on):
|
self.port.write(("QUE" + "\r\n").encode())
|
||||||
"""turns on or off the all the DDSs"""
|
result = self.port.readlines()
|
||||||
if on:
|
result = [r.rstrip().decode() for r in result]
|
||||||
self.output_scale_all(1.0)
|
logger.debug("got device status: %s", result)
|
||||||
else:
|
return result
|
||||||
self.output_scale_all(0.0)
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
from artiq.language.core import *
|
from artiq.language.core import *
|
||||||
from artiq.language.db import *
|
|
||||||
from artiq.language.units import *
|
from artiq.language.units import *
|
||||||
from artiq.coredevice import ttl
|
|
||||||
|
|
||||||
|
|
||||||
frame_setup = 20*ns
|
frame_setup = 20*ns
|
||||||
|
@ -76,7 +74,7 @@ class _Frame:
|
||||||
self.pdq = pdq
|
self.pdq = pdq
|
||||||
self.frame_number = frame_number
|
self.frame_number = frame_number
|
||||||
self.segments = []
|
self.segments = []
|
||||||
self.segment_count = 0
|
self.segment_count = 0 # == len(self.segments), used in kernel
|
||||||
|
|
||||||
self.invalidated = False
|
self.invalidated = False
|
||||||
|
|
||||||
|
@ -99,7 +97,7 @@ class _Frame:
|
||||||
|
|
||||||
def _arm(self):
|
def _arm(self):
|
||||||
self.segment_delays = [
|
self.segment_delays = [
|
||||||
time_to_cycles(s.get_duration()*delay_margin_factor, self.core)
|
seconds_to_mu(s.get_duration()*delay_margin_factor, self.core)
|
||||||
for s in self.segments]
|
for s in self.segments]
|
||||||
|
|
||||||
def _invalidate(self):
|
def _invalidate(self):
|
||||||
|
@ -125,7 +123,8 @@ class _Frame:
|
||||||
if not self.pdq.armed:
|
if not self.pdq.armed:
|
||||||
raise ArmError
|
raise ArmError
|
||||||
|
|
||||||
t = time_to_cycles(now()) - time_to_cycles(trigger_duration/2)
|
call_t = now_mu()
|
||||||
|
trigger_start_t = call_t - seconds_to_mu(trigger_duration/2)
|
||||||
|
|
||||||
if self.pdq.current_frame >= 0:
|
if self.pdq.current_frame >= 0:
|
||||||
# PDQ is in the middle of a frame. Check it is us.
|
# PDQ is in the middle of a frame. Check it is us.
|
||||||
|
@ -136,15 +135,16 @@ class _Frame:
|
||||||
# to play our first segment.
|
# to play our first segment.
|
||||||
self.pdq.current_frame = self.frame_number
|
self.pdq.current_frame = self.frame_number
|
||||||
self.pdq.next_segment = 0
|
self.pdq.next_segment = 0
|
||||||
t2 = t - time_to_cycles(frame_setup)
|
at_mu(trigger_start_t - seconds_to_mu(frame_setup))
|
||||||
self.pdq.frame0.set_value(t2, self.frame_number & 1)
|
self.pdq.frame0.set_o(bool(self.frame_number & 1))
|
||||||
self.pdq.frame1.set_value(t2, (self.frame_number & 2) >> 1)
|
self.pdq.frame1.set_o(bool((self.frame_number & 2) >> 1))
|
||||||
self.pdq.frame2.set_value(t2, (self.frame_number & 4) >> 2)
|
self.pdq.frame2.set_o(bool((self.frame_number & 4) >> 2))
|
||||||
|
|
||||||
self.pdq.trigger.on(t)
|
at_mu(trigger_start_t)
|
||||||
self.pdq.trigger.off(t + time_to_cycles(trigger_duration))
|
self.pdq.trigger.pulse(trigger_duration)
|
||||||
|
|
||||||
delay(cycles_to_time(self.segment_delays[self.pdq.next_segment]))
|
at_mu(call_t)
|
||||||
|
delay_mu(self.segment_delays[self.pdq.next_segment])
|
||||||
self.pdq.next_segment += 1
|
self.pdq.next_segment += 1
|
||||||
|
|
||||||
# test for end of frame
|
# test for end of frame
|
||||||
|
@ -153,23 +153,14 @@ class _Frame:
|
||||||
self.pdq.next_segment = -1
|
self.pdq.next_segment = -1
|
||||||
|
|
||||||
|
|
||||||
class CompoundPDQ2(AutoDB):
|
class CompoundPDQ2:
|
||||||
class DBKeys:
|
def __init__(self, dmgr, pdq2_devices, trigger_device, frame_devices):
|
||||||
core = Device()
|
self.core = dmgr.get("core")
|
||||||
pdq2_devices = Argument()
|
self.pdq2s = [dmgr.get(d) for d in self.pdq2_devices]
|
||||||
rtio_trigger = Argument()
|
self.trigger = dmgr.get(trigger_device)
|
||||||
rtio_frame = Argument()
|
self.frame0 = dmgr.get(frame_devices[0])
|
||||||
|
self.frame1 = dmgr.get(frame_devices[1])
|
||||||
def build(self):
|
self.frame2 = dmgr.get(frame_devices[2])
|
||||||
self.pdq2s = [self.dbh.get_device(d) for d in self.pdq2_devices]
|
|
||||||
self.trigger = ttl.LLTTLOut(
|
|
||||||
core=self.core, channel=self.rtio_trigger)
|
|
||||||
self.frame0 = ttl.LLTTLOut(
|
|
||||||
core=self.core, channel=self.rtio_frame[0])
|
|
||||||
self.frame1 = ttl.LLTTLOut(
|
|
||||||
core=self.core, channel=self.rtio_frame[1])
|
|
||||||
self.frame2 = ttl.LLTTLOut(
|
|
||||||
core=self.core, channel=self.rtio_frame[2])
|
|
||||||
|
|
||||||
self.frames = []
|
self.frames = []
|
||||||
self.current_frame = -1
|
self.current_frame = -1
|
||||||
|
@ -187,6 +178,7 @@ class CompoundPDQ2(AutoDB):
|
||||||
raise ArmError
|
raise ArmError
|
||||||
for frame in self.frames:
|
for frame in self.frames:
|
||||||
frame._arm()
|
frame._arm()
|
||||||
|
self.armed = True
|
||||||
|
|
||||||
full_program = [f._get_program() for f in self.frames]
|
full_program = [f._get_program() for f in self.frames]
|
||||||
for n, pdq2 in enumerate(self.pdq2s):
|
for n, pdq2 in enumerate(self.pdq2s):
|
||||||
|
|
|
@ -1,9 +1,14 @@
|
||||||
# Yann Sionneau <ys@m-labs.hk>, 2015
|
# Yann Sionneau <ys@m-labs.hk>, 2015
|
||||||
|
|
||||||
from ctypes import byref
|
from ctypes import byref, c_ulong
|
||||||
|
import logging
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DAQmxSim:
|
class DAQmxSim:
|
||||||
def load_sample_values(self, values):
|
def load_sample_values(self, values):
|
||||||
pass
|
pass
|
||||||
|
@ -11,61 +16,124 @@ class DAQmxSim:
|
||||||
def close(self):
|
def close(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def ping(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class DAQmx:
|
class DAQmx:
|
||||||
"""NI PXI6733 DAQ interface."""
|
"""NI PXI6733 DAQ interface."""
|
||||||
|
|
||||||
def __init__(self, device, analog_output, clock):
|
def __init__(self, channels, clock):
|
||||||
import PyDAQmx as daq
|
"""
|
||||||
|
:param channels: List of channels as a string, following
|
||||||
|
the physical channels lists and ranges NI-DAQmx syntax.
|
||||||
|
|
||||||
self.device = device
|
Example: Dev1/ao0, Dev1/ao1:ao3
|
||||||
self.analog_output = analog_output
|
:param clock: Clock source terminal as a string, following
|
||||||
self.clock = clock
|
NI-DAQmx terminal names syntax.
|
||||||
self.tasks = []
|
|
||||||
self.daq = daq
|
|
||||||
|
|
||||||
def done_callback_py(self, taskhandle, status, callback_data):
|
Example: PFI5
|
||||||
self.daq.DAQmxClearTask(taskhandle)
|
|
||||||
self.tasks.remove(taskhandle)
|
|
||||||
|
|
||||||
def load_sample_values(self, values):
|
|
||||||
"""Load sample values into PXI 6733 device.
|
|
||||||
|
|
||||||
This loads sample values into the PXI 6733 device and then
|
|
||||||
configures a task to output those samples at each clock rising
|
|
||||||
edge.
|
|
||||||
|
|
||||||
A callback is registered to clear the task (deallocate resources)
|
|
||||||
when the task has completed.
|
|
||||||
|
|
||||||
:param values: A numpy array of sample values to load in the device.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import PyDAQmx as daq
|
||||||
|
|
||||||
|
self.channels = channels.encode()
|
||||||
|
self.clock = clock.encode()
|
||||||
|
self.task = None
|
||||||
|
self.daq = daq
|
||||||
|
|
||||||
|
def _done_callback(self, taskhandle, status, callback_data):
|
||||||
|
if taskhandle != self.task:
|
||||||
|
logger.warning("done callback called with unexpected task")
|
||||||
|
else:
|
||||||
|
self.clear_pending_task()
|
||||||
|
|
||||||
|
def ping(self):
|
||||||
|
try:
|
||||||
|
data = (c_ulong*1)()
|
||||||
|
self.daq.DAQmxGetDevSerialNum(self.device, data)
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def load_sample_values(self, sampling_freq, values):
|
||||||
|
"""Load sample values into PXI 6733 device.
|
||||||
|
|
||||||
|
This loads sample values into the PXI 6733 device.
|
||||||
|
The device will output samples at each clock rising edge.
|
||||||
|
The device waits for a clock rising edge to output the first sample.
|
||||||
|
|
||||||
|
When using several channels simultaneously, you can either concatenate
|
||||||
|
the values for the different channels in a 1-dimensional ``values``
|
||||||
|
numpy ndarray.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
>>> values = np.array([ch0_samp0, ch0_samp1, ch1_samp0, ch1_samp1],
|
||||||
|
dtype=float)
|
||||||
|
|
||||||
|
In this example the first two samples will be output via the first
|
||||||
|
channel and the two following samples will be output via the second
|
||||||
|
channel.
|
||||||
|
|
||||||
|
Or you can use a 2-dimensional numpy ndarray like this:
|
||||||
|
|
||||||
|
>>> values = np.array([[ch0_samp0, ch0_samp1],[ch1_samp0, ch1_samp1]],
|
||||||
|
dtype=float)
|
||||||
|
|
||||||
|
Any call to this method will cancel any previous task even if it has
|
||||||
|
not yet completed.
|
||||||
|
|
||||||
|
:param sampling_freq: The sampling frequency in samples per second.
|
||||||
|
:param values: A numpy ndarray of sample values (in volts) to load in
|
||||||
|
the device.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.clear_pending_task()
|
||||||
|
values = values.flatten()
|
||||||
t = self.daq.Task()
|
t = self.daq.Task()
|
||||||
t.CreateAOVoltageChan(self.device+b"/"+self.analog_output, b"",
|
t.CreateAOVoltageChan(self.channels, b"",
|
||||||
min(values), max(values),
|
min(values), max(values),
|
||||||
self.daq.DAQmx_Val_Volts, None)
|
self.daq.DAQmx_Val_Volts, None)
|
||||||
t.CfgSampClkTiming(self.clock, 1000.0, self.daq.DAQmx_Val_Rising,
|
|
||||||
self.daq.DAQmx_Val_FiniteSamps, len(values))
|
channel_number = (c_ulong*1)()
|
||||||
|
t.GetTaskNumChans(channel_number)
|
||||||
|
nb_values = len(values)
|
||||||
|
if nb_values % channel_number[0]:
|
||||||
|
self.daq.DAQmxClearTask(t.taskHandle)
|
||||||
|
raise ValueError("The size of the values array must be a multiple "
|
||||||
|
"of the number of channels ({})"
|
||||||
|
.format(channel_number[0]))
|
||||||
|
samps_per_channel = nb_values // channel_number[0]
|
||||||
|
|
||||||
|
t.CfgSampClkTiming(self.clock, sampling_freq,
|
||||||
|
self.daq.DAQmx_Val_Rising,
|
||||||
|
self.daq.DAQmx_Val_FiniteSamps, samps_per_channel)
|
||||||
num_samps_written = self.daq.int32()
|
num_samps_written = self.daq.int32()
|
||||||
values = np.require(values, dtype=float,
|
values = np.require(values, dtype=float,
|
||||||
requirements=["C_CONTIGUOUS", "WRITEABLE"])
|
requirements=["C_CONTIGUOUS", "WRITEABLE"])
|
||||||
ret = t.WriteAnalogF64(len(values), False, 0,
|
ret = t.WriteAnalogF64(samps_per_channel, False, 0,
|
||||||
self.daq.DAQmx_Val_GroupByChannel, values,
|
self.daq.DAQmx_Val_GroupByChannel, values,
|
||||||
byref(num_samps_written), None)
|
byref(num_samps_written), None)
|
||||||
if num_samps_written.value != len(values):
|
if num_samps_written.value != nb_values:
|
||||||
raise IOError("Error: only {} sample values were written"
|
raise IOError("Error: only {} sample values were written"
|
||||||
.format(num_samps_written.value))
|
.format(num_samps_written.value))
|
||||||
if ret:
|
if ret:
|
||||||
raise IOError("Error while writing samples to the channel buffer")
|
raise IOError("Error while writing samples to the channel buffer")
|
||||||
|
|
||||||
done_cb = self.daq.DAQmxDoneEventCallbackPtr(self.done_callback_py)
|
done_cb = self.daq.DAQmxDoneEventCallbackPtr(self._done_callback)
|
||||||
self.tasks.append(t.taskHandle)
|
self.task = t.taskHandle
|
||||||
self.daq.DAQmxRegisterDoneEvent(t.taskHandle, 0, done_cb, None)
|
self.daq.DAQmxRegisterDoneEvent(t.taskHandle, 0, done_cb, None)
|
||||||
t.StartTask()
|
t.StartTask()
|
||||||
|
|
||||||
def close(self):
|
def clear_pending_task(self):
|
||||||
"""Clear all pending tasks."""
|
"""Clear any pending task."""
|
||||||
|
|
||||||
for t in self.tasks:
|
if self.task is not None:
|
||||||
self.daq.DAQmxClearTask(t)
|
self.daq.DAQmxClearTask(self.task)
|
||||||
|
self.task = None
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Free any allocated resources."""
|
||||||
|
|
||||||
|
self.clear_pending_task()
|
||||||
|
|
|
@ -0,0 +1,177 @@
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from artiq.language.core import *
|
||||||
|
from artiq.language.units import *
|
||||||
|
from artiq.wavesynth.compute_samples import Synthesizer
|
||||||
|
|
||||||
|
|
||||||
|
class SegmentSequenceError(Exception):
|
||||||
|
"""Raised when attempting to play back a named segment which is not the
|
||||||
|
next in the sequence."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidatedError(Exception):
|
||||||
|
"""Raised when attemting to use a frame or segment that has been
|
||||||
|
invalidated (due to disarming the DAQmx)."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ArmError(Exception):
|
||||||
|
"""Raised when attempting to arm an already armed DAQmx, to modify the
|
||||||
|
program of an armed DAQmx, or to play a segment on a disarmed DAQmx."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _ceil_div(a, b):
|
||||||
|
return (a + b - 1)//b
|
||||||
|
|
||||||
|
|
||||||
|
def _compute_duration_mu(nsamples, ftw, acc_width):
|
||||||
|
# This returns the precise duration so that the clock can be stopped
|
||||||
|
# exactly at the next rising edge (RTLink commands take precedence over
|
||||||
|
# toggling from the accumulator).
|
||||||
|
# If segments are played continuously, replacement of the stop command
|
||||||
|
# will keep the clock running. If the FTW is not a power of two, note that
|
||||||
|
# the accumulator is reset at that time, which causes jitter and frequency
|
||||||
|
# inaccuracy.
|
||||||
|
# Formally:
|
||||||
|
# duration *ftw >= nsamples*2**acc_width
|
||||||
|
# (duration - 1)*ftw < nsamples*2**acc_width
|
||||||
|
return _ceil_div(nsamples*2**acc_width, ftw)
|
||||||
|
|
||||||
|
|
||||||
|
class _Segment:
|
||||||
|
def __init__(self, frame, segment_number):
|
||||||
|
self.frame = frame
|
||||||
|
self.segment_number = segment_number
|
||||||
|
|
||||||
|
self.lines = []
|
||||||
|
|
||||||
|
# for @kernel
|
||||||
|
self.core = frame.daqmx.core
|
||||||
|
|
||||||
|
def add_line(self, duration, channel_data):
|
||||||
|
if self.frame.invalidated:
|
||||||
|
raise InvalidatedError
|
||||||
|
if self.frame.daqmx.armed:
|
||||||
|
raise ArmError
|
||||||
|
self.lines.append((duration, channel_data))
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def advance(self):
|
||||||
|
if self.frame.invalidated:
|
||||||
|
raise InvalidatedError
|
||||||
|
if not self.frame.daqmx.armed:
|
||||||
|
raise ArmError
|
||||||
|
# If the frame is currently being played, check that we are next.
|
||||||
|
if (self.frame.daqmx.next_segment >= 0
|
||||||
|
and self.frame.daqmx.next_segment != self.segment_number):
|
||||||
|
raise SegmentSequenceError
|
||||||
|
self.frame.advance()
|
||||||
|
|
||||||
|
|
||||||
|
class _Frame:
|
||||||
|
def __init__(self, daqmx):
|
||||||
|
self.daqmx = daqmx
|
||||||
|
self.segments = []
|
||||||
|
self.segment_count = 0 # == len(self.segments), used in kernel
|
||||||
|
|
||||||
|
self.invalidated = False
|
||||||
|
|
||||||
|
# for @kernel
|
||||||
|
self.core = self.daqmx.core
|
||||||
|
|
||||||
|
def create_segment(self, name=None):
|
||||||
|
if self.invalidated:
|
||||||
|
raise InvalidatedError
|
||||||
|
if self.daqmx.armed:
|
||||||
|
raise ArmError
|
||||||
|
segment = _Segment(self, self.segment_count)
|
||||||
|
if name is not None:
|
||||||
|
if hasattr(self, name):
|
||||||
|
raise NameError("Segment name already exists")
|
||||||
|
setattr(self, name, segment)
|
||||||
|
self.segments.append(segment)
|
||||||
|
self.segment_count += 1
|
||||||
|
return segment
|
||||||
|
|
||||||
|
def _arm(self):
|
||||||
|
self.segment_delays = [
|
||||||
|
_compute_duration_mu(s.get_sample_count(),
|
||||||
|
self.daqmx.sample_rate,
|
||||||
|
self.daqmx.clock.acc_width)
|
||||||
|
for s in self.segments]
|
||||||
|
|
||||||
|
def _invalidate(self):
|
||||||
|
self.invalidated = True
|
||||||
|
|
||||||
|
def _get_samples(self):
|
||||||
|
program = [
|
||||||
|
{
|
||||||
|
"dac_divider": 1,
|
||||||
|
"duration": duration,
|
||||||
|
"channel_data": channel_data,
|
||||||
|
} for duration, channel_data in segment.lines
|
||||||
|
for segment in self.segments]
|
||||||
|
synth = Synthesizer(self.daqmx.channel_count, program)
|
||||||
|
synth.select(0)
|
||||||
|
# not setting any trigger flag in the program causes the whole
|
||||||
|
# waveform to be computed here for all segments.
|
||||||
|
# slicing the segments is done by stopping the clock.
|
||||||
|
return synth.trigger()
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def advance(self):
|
||||||
|
if self.invalidated:
|
||||||
|
raise InvalidatedError
|
||||||
|
if not self.daqmx.armed:
|
||||||
|
raise ArmError
|
||||||
|
|
||||||
|
self.daqmx.clock.set(self.daqmx.sample_rate)
|
||||||
|
delay_mu(self.segment_delays[self.daqmx.next_segment])
|
||||||
|
self.daqmx.next_segment += 1
|
||||||
|
self.daqmx.clock.stop()
|
||||||
|
|
||||||
|
# test for end of frame
|
||||||
|
if self.daqmx.next_segment == self.segment_count:
|
||||||
|
self.daqmx.next_segment = -1
|
||||||
|
|
||||||
|
|
||||||
|
class CompoundDAQmx:
|
||||||
|
def __init__(self, dmgr, daqmx_device, clock_device, channel_count,
|
||||||
|
sample_rate, sample_rate_in_mu=False):
|
||||||
|
self.core = dmgr.get("core")
|
||||||
|
self.daqmx = dmgr.get(daqmx_device)
|
||||||
|
self.clock = dmgr.get(clock_device)
|
||||||
|
self.channel_count = channel_count
|
||||||
|
if self.sample_rate_in_mu:
|
||||||
|
self.sample_rate = sample_rate
|
||||||
|
else:
|
||||||
|
self.sample_rate = self.clock.frequency_to_ftw(sample_rate)
|
||||||
|
|
||||||
|
self.frame = None
|
||||||
|
self.next_segment = -1
|
||||||
|
self.armed = False
|
||||||
|
|
||||||
|
def disarm(self):
|
||||||
|
if self.frame is not None:
|
||||||
|
self.frame._invalidate()
|
||||||
|
self.frame = None
|
||||||
|
self.armed = False
|
||||||
|
|
||||||
|
def arm(self):
|
||||||
|
if self.armed:
|
||||||
|
raise ArmError
|
||||||
|
if self.frame is not None:
|
||||||
|
self.frame._arm()
|
||||||
|
self.daqmx.load_sample_values(
|
||||||
|
self.clock.ftw_to_frequency(self.sample_rate),
|
||||||
|
np.array(self.frame._get_samples()))
|
||||||
|
self.armed = True
|
||||||
|
|
||||||
|
def create_frame(self):
|
||||||
|
if self.armed:
|
||||||
|
raise ArmError
|
||||||
|
self.frame = _Frame(self)
|
||||||
|
return self.frame
|
|
@ -4,7 +4,7 @@ import struct as st
|
||||||
|
|
||||||
import serial
|
import serial
|
||||||
|
|
||||||
from artiq.language.units import V, strip_unit
|
from artiq.language.units import V
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -221,10 +221,10 @@ class Tcube:
|
||||||
def handle_message(self, msg):
|
def handle_message(self, msg):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def send_request(self, msgreq_id, msgget_id, param1=0, param2=0):
|
def send_request(self, msgreq_id, wait_for_msgs, param1=0, param2=0, data=None):
|
||||||
Message(msgreq_id, param1, param2).send(self.port)
|
Message(msgreq_id, param1, param2, data=data).send(self.port)
|
||||||
msg = msg_id = None
|
msg = msg_id = None
|
||||||
while msg is None or msg_id != msgget_id:
|
while msg is None or msg_id not in wait_for_msgs:
|
||||||
msg = Message.recv(self.port)
|
msg = Message.recv(self.port)
|
||||||
self.handle_message(msg)
|
self.handle_message(msg)
|
||||||
msg_id = msg.id
|
msg_id = msg.id
|
||||||
|
@ -247,7 +247,7 @@ class Tcube:
|
||||||
|
|
||||||
def get_channel_enable_state(self):
|
def get_channel_enable_state(self):
|
||||||
get_msg = self.send_request(MGMSG.MOD_REQ_CHANENABLESTATE,
|
get_msg = self.send_request(MGMSG.MOD_REQ_CHANENABLESTATE,
|
||||||
MGMSG.MOD_GET_CHANENABLESTATE, 1)
|
[MGMSG.MOD_GET_CHANENABLESTATE], 1)
|
||||||
self.chan_enabled = get_msg.param2
|
self.chan_enabled = get_msg.param2
|
||||||
if self.chan_enabled == 1:
|
if self.chan_enabled == 1:
|
||||||
self.chan_enabled = True
|
self.chan_enabled = True
|
||||||
|
@ -286,7 +286,7 @@ class Tcube:
|
||||||
|
|
||||||
def hardware_request_information(self):
|
def hardware_request_information(self):
|
||||||
return self.send_request(MGMSG.HW_REQ_INFO,
|
return self.send_request(MGMSG.HW_REQ_INFO,
|
||||||
MGMSG.HW_GET_INFO)
|
[MGMSG.HW_GET_INFO])
|
||||||
|
|
||||||
def is_channel_enabled(self):
|
def is_channel_enabled(self):
|
||||||
return self.chan_enabled
|
return self.chan_enabled
|
||||||
|
@ -302,7 +302,7 @@ class Tcube:
|
||||||
class Tpz(Tcube):
|
class Tpz(Tcube):
|
||||||
def __init__(self, serial_dev):
|
def __init__(self, serial_dev):
|
||||||
Tcube.__init__(self, serial_dev)
|
Tcube.__init__(self, serial_dev)
|
||||||
self.voltage_limit = self.get_tpz_io_settings()[0].amount
|
self.voltage_limit = self.get_tpz_io_settings()[0]
|
||||||
|
|
||||||
def handle_message(self, msg):
|
def handle_message(self, msg):
|
||||||
msg_id = msg.id
|
msg_id = msg.id
|
||||||
|
@ -355,7 +355,7 @@ class Tpz(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_POSCONTROLMODE,
|
get_msg = self.send_request(MGMSG.PZ_REQ_POSCONTROLMODE,
|
||||||
MGMSG.PZ_GET_POSCONTROLMODE, 1)
|
[MGMSG.PZ_GET_POSCONTROLMODE], 1)
|
||||||
return get_msg.param2
|
return get_msg.param2
|
||||||
|
|
||||||
def set_output_volts(self, voltage):
|
def set_output_volts(self, voltage):
|
||||||
|
@ -372,8 +372,6 @@ class Tpz(Tcube):
|
||||||
between the three values 75 V, 100 V and 150 V.
|
between the three values 75 V, 100 V and 150 V.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
voltage = strip_unit(voltage, "V")
|
|
||||||
|
|
||||||
if voltage < 0 or voltage > self.voltage_limit:
|
if voltage < 0 or voltage > self.voltage_limit:
|
||||||
raise ValueError("Voltage must be in range [0;{}]"
|
raise ValueError("Voltage must be in range [0;{}]"
|
||||||
.format(self.voltage_limit))
|
.format(self.voltage_limit))
|
||||||
|
@ -389,8 +387,8 @@ class Tpz(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_OUTPUTVOLTS,
|
get_msg = self.send_request(MGMSG.PZ_REQ_OUTPUTVOLTS,
|
||||||
MGMSG.PZ_GET_OUTPUTVOLTS, 1)
|
[MGMSG.PZ_GET_OUTPUTVOLTS], 1)
|
||||||
return st.unpack("<H", get_msg.data[2:])[0]*self.voltage_limit*V/32767
|
return st.unpack("<H", get_msg.data[2:])[0]*self.voltage_limit/32767
|
||||||
|
|
||||||
def set_output_position(self, position_sw):
|
def set_output_position(self, position_sw):
|
||||||
"""Set output position of the piezo actuator.
|
"""Set output position of the piezo actuator.
|
||||||
|
@ -417,7 +415,7 @@ class Tpz(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_OUTPUTPOS,
|
get_msg = self.send_request(MGMSG.PZ_REQ_OUTPUTPOS,
|
||||||
MGMSG.PZ_GET_OUTPUTPOS, 1)
|
[MGMSG.PZ_GET_OUTPUTPOS], 1)
|
||||||
return st.unpack("<H", get_msg.data[2:])[0]
|
return st.unpack("<H", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def set_input_volts_source(self, volt_src):
|
def set_input_volts_source(self, volt_src):
|
||||||
|
@ -459,7 +457,7 @@ class Tpz(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_INPUTVOLTSSRC,
|
get_msg = self.send_request(MGMSG.PZ_REQ_INPUTVOLTSSRC,
|
||||||
MGMSG.PZ_GET_INPUTVOLTSSRC, 1)
|
[MGMSG.PZ_GET_INPUTVOLTSSRC], 1)
|
||||||
return st.unpack("<H", get_msg.data[2:])[0]
|
return st.unpack("<H", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def set_pi_constants(self, prop_const, int_const):
|
def set_pi_constants(self, prop_const, int_const):
|
||||||
|
@ -488,7 +486,7 @@ class Tpz(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_PICONSTS,
|
get_msg = self.send_request(MGMSG.PZ_REQ_PICONSTS,
|
||||||
MGMSG.PZ_GET_PICONSTS, 1)
|
[MGMSG.PZ_GET_PICONSTS], 1)
|
||||||
return st.unpack("<HH", get_msg.data[2:])
|
return st.unpack("<HH", get_msg.data[2:])
|
||||||
|
|
||||||
def set_output_lut(self, lut_index, output):
|
def set_output_lut(self, lut_index, output):
|
||||||
|
@ -532,7 +530,6 @@ class Tpz(Tcube):
|
||||||
<artiq.devices.thorlabs.driver.Tpz.set_tpz_io_settings>` method.
|
<artiq.devices.thorlabs.driver.Tpz.set_tpz_io_settings>` method.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
output = strip_unit(output, "V")
|
|
||||||
volt = round(output*32767/self.voltage_limit)
|
volt = round(output*32767/self.voltage_limit)
|
||||||
payload = st.pack("<HHH", 1, lut_index, volt)
|
payload = st.pack("<HHH", 1, lut_index, volt)
|
||||||
self.send(Message(MGMSG.PZ_SET_OUTPUTLUT, data=payload))
|
self.send(Message(MGMSG.PZ_SET_OUTPUTLUT, data=payload))
|
||||||
|
@ -546,9 +543,9 @@ class Tpz(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_OUTPUTLUT,
|
get_msg = self.send_request(MGMSG.PZ_REQ_OUTPUTLUT,
|
||||||
MGMSG.PZ_GET_OUTPUTLUT, 1)
|
[MGMSG.PZ_GET_OUTPUTLUT], 1)
|
||||||
(index, output) = st.unpack("<Hh", get_msg.data[2:])
|
(index, output) = st.unpack("<Hh", get_msg.data[2:])
|
||||||
return index, output*self.voltage_limit*V/32767
|
return index, output*self.voltage_limit/32767
|
||||||
|
|
||||||
def set_output_lut_parameters(self, mode, cycle_length, num_cycles,
|
def set_output_lut_parameters(self, mode, cycle_length, num_cycles,
|
||||||
delay_time, precycle_rest, postcycle_rest):
|
delay_time, precycle_rest, postcycle_rest):
|
||||||
|
@ -626,7 +623,7 @@ class Tpz(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_OUTPUTLUTPARAMS,
|
get_msg = self.send_request(MGMSG.PZ_REQ_OUTPUTLUTPARAMS,
|
||||||
MGMSG.PZ_GET_OUTPUTLUTPARAMS, 1)
|
[MGMSG.PZ_GET_OUTPUTLUTPARAMS], 1)
|
||||||
return st.unpack("<HHLLLL", get_msg.data[2:22])
|
return st.unpack("<HHLLLL", get_msg.data[2:22])
|
||||||
|
|
||||||
def start_lut_output(self):
|
def start_lut_output(self):
|
||||||
|
@ -669,7 +666,7 @@ class Tpz(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_TPZ_DISPSETTINGS,
|
get_msg = self.send_request(MGMSG.PZ_REQ_TPZ_DISPSETTINGS,
|
||||||
MGMSG.PZ_GET_TPZ_DISPSETTINGS, 1)
|
[MGMSG.PZ_GET_TPZ_DISPSETTINGS], 1)
|
||||||
return st.unpack("<H", get_msg.data)[0]
|
return st.unpack("<H", get_msg.data)[0]
|
||||||
|
|
||||||
def set_tpz_io_settings(self, voltage_limit, hub_analog_input):
|
def set_tpz_io_settings(self, voltage_limit, hub_analog_input):
|
||||||
|
@ -684,9 +681,6 @@ class Tpz(Tcube):
|
||||||
100 V limit.
|
100 V limit.
|
||||||
|
|
||||||
150 V limit.
|
150 V limit.
|
||||||
|
|
||||||
You can either provide this parameter as an integer or as a
|
|
||||||
:class:`artiq.language.units` Volt quantity (e.g. 75*V).
|
|
||||||
:param hub_analog_input: When the T-Cube piezo driver unit is used in
|
:param hub_analog_input: When the T-Cube piezo driver unit is used in
|
||||||
conjunction with the T-Cube Strain Gauge Reader (TSG001) on the
|
conjunction with the T-Cube Strain Gauge Reader (TSG001) on the
|
||||||
T-Cube Controller Hub (TCH001), a feedback signal can be passed
|
T-Cube Controller Hub (TCH001), a feedback signal can be passed
|
||||||
|
@ -706,7 +700,7 @@ class Tpz(Tcube):
|
||||||
connectors.
|
connectors.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.voltage_limit = strip_unit(voltage_limit, "V")
|
self.voltage_limit = voltage_limit
|
||||||
|
|
||||||
if self.voltage_limit == 75:
|
if self.voltage_limit == 75:
|
||||||
voltage_limit = 1
|
voltage_limit = 1
|
||||||
|
@ -727,21 +721,21 @@ class Tpz(Tcube):
|
||||||
Hub analog input. Refer to :py:meth:`set_tpz_io_settings()
|
Hub analog input. Refer to :py:meth:`set_tpz_io_settings()
|
||||||
<artiq.devices.thorlabs.driver.Tpz.set_tpz_io_settings>` for the
|
<artiq.devices.thorlabs.driver.Tpz.set_tpz_io_settings>` for the
|
||||||
meaning of those parameters.
|
meaning of those parameters.
|
||||||
:rtype: a 2 elements tuple (Quantity, int)
|
:rtype: a 2 elements tuple (int, int)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.PZ_REQ_TPZ_IOSETTINGS,
|
get_msg = self.send_request(MGMSG.PZ_REQ_TPZ_IOSETTINGS,
|
||||||
MGMSG.PZ_GET_TPZ_IOSETTINGS, 1)
|
[MGMSG.PZ_GET_TPZ_IOSETTINGS], 1)
|
||||||
voltage_limit, hub_analog_input = st.unpack("<HH", get_msg.data[2:6])
|
voltage_limit, hub_analog_input = st.unpack("<HH", get_msg.data[2:6])
|
||||||
if voltage_limit == 1:
|
if voltage_limit == 1:
|
||||||
voltage_limit = 75*V
|
voltage_limit = 75
|
||||||
elif voltage_limit == 2:
|
elif voltage_limit == 2:
|
||||||
voltage_limit = 100*V
|
voltage_limit = 100
|
||||||
elif voltage_limit == 3:
|
elif voltage_limit == 3:
|
||||||
voltage_limit = 150*V
|
voltage_limit = 150
|
||||||
else:
|
else:
|
||||||
raise ValueError("Voltage limit should be in range [1; 3]")
|
raise ValueError("Voltage limit should be in range [1; 3]")
|
||||||
self.voltage_limit = voltage_limit.amount
|
self.voltage_limit = voltage_limit
|
||||||
return voltage_limit, hub_analog_input
|
return voltage_limit, hub_analog_input
|
||||||
|
|
||||||
|
|
||||||
|
@ -763,8 +757,6 @@ class Tdc(Tcube):
|
||||||
raise MsgError("Hardware error {}: {}"
|
raise MsgError("Hardware error {}: {}"
|
||||||
.format(code,
|
.format(code,
|
||||||
data[4:].decode(encoding="ascii")))
|
data[4:].decode(encoding="ascii")))
|
||||||
elif msg_id == MGMSG.MOT_MOVE_HOMED:
|
|
||||||
pass
|
|
||||||
elif (msg_id == MGMSG.MOT_MOVE_COMPLETED or
|
elif (msg_id == MGMSG.MOT_MOVE_COMPLETED or
|
||||||
msg_id == MGMSG.MOT_MOVE_STOPPED or
|
msg_id == MGMSG.MOT_MOVE_STOPPED or
|
||||||
msg_id == MGMSG.MOT_GET_DCSTATUSUPDATE):
|
msg_id == MGMSG.MOT_GET_DCSTATUSUPDATE):
|
||||||
|
@ -777,6 +769,10 @@ class Tdc(Tcube):
|
||||||
(self.position, self.velocity, r, self.status) = st.unpack(
|
(self.position, self.velocity, r, self.status) = st.unpack(
|
||||||
"<LHHL", data[2:])
|
"<LHHL", data[2:])
|
||||||
|
|
||||||
|
def is_moving(self):
|
||||||
|
status_bits = self.get_status_bits()
|
||||||
|
return (status_bits & 0x2F0) != 0
|
||||||
|
|
||||||
def set_pot_parameters(self, zero_wnd, vel1, wnd1, vel2, wnd2, vel3,
|
def set_pot_parameters(self, zero_wnd, vel1, wnd1, vel2, wnd2, vel3,
|
||||||
wnd3, vel4):
|
wnd3, vel4):
|
||||||
"""Set pot parameters.
|
"""Set pot parameters.
|
||||||
|
@ -811,12 +807,12 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_POTPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_POTPARAMS,
|
||||||
MGMSG.MOT_GET_POTPARAMS, 1)
|
[MGMSG.MOT_GET_POTPARAMS], 1)
|
||||||
return st.unpack("<HLHLHLHL", get_msg.data[2:])
|
return st.unpack("<HLHLHLHL", get_msg.data[2:])
|
||||||
|
|
||||||
def hub_get_bay_used(self):
|
def hub_get_bay_used(self):
|
||||||
get_msg = self.send_request(MGMSG.HUB_REQ_BAYUSED,
|
get_msg = self.send_request(MGMSG.HUB_REQ_BAYUSED,
|
||||||
MGMSG.HUB_GET_BAYUSED)
|
[MGMSG.HUB_GET_BAYUSED])
|
||||||
return get_msg.param1
|
return get_msg.param1
|
||||||
|
|
||||||
def set_position_counter(self, position):
|
def set_position_counter(self, position):
|
||||||
|
@ -841,7 +837,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_POSCOUNTER,
|
get_msg = self.send_request(MGMSG.MOT_REQ_POSCOUNTER,
|
||||||
MGMSG.MOT_GET_POSCOUNTER, 1)
|
[MGMSG.MOT_GET_POSCOUNTER], 1)
|
||||||
return st.unpack("<l", get_msg.data[2:])[0]
|
return st.unpack("<l", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def set_encoder_counter(self, encoder_count):
|
def set_encoder_counter(self, encoder_count):
|
||||||
|
@ -865,7 +861,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_ENCCOUNTER,
|
get_msg = self.send_request(MGMSG.MOT_REQ_ENCCOUNTER,
|
||||||
MGMSG.MOT_GET_ENCCOUNTER, 1)
|
[MGMSG.MOT_GET_ENCCOUNTER], 1)
|
||||||
return st.unpack("<l", get_msg.data[2:])[0]
|
return st.unpack("<l", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def set_velocity_parameters(self, acceleration, max_velocity):
|
def set_velocity_parameters(self, acceleration, max_velocity):
|
||||||
|
@ -886,7 +882,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_VELPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_VELPARAMS,
|
||||||
MGMSG.MOT_GET_VELPARAMS, 1)
|
[MGMSG.MOT_GET_VELPARAMS], 1)
|
||||||
return st.unpack("<LL", get_msg.data[6:])
|
return st.unpack("<LL", get_msg.data[6:])
|
||||||
|
|
||||||
def set_jog_parameters(self, mode, step_size, acceleration,
|
def set_jog_parameters(self, mode, step_size, acceleration,
|
||||||
|
@ -915,7 +911,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_JOGPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_JOGPARAMS,
|
||||||
MGMSG.MOT_GET_JOGPARAMS, 1)
|
[MGMSG.MOT_GET_JOGPARAMS], 1)
|
||||||
(jog_mode, step_size, _, acceleration, max_velocity,
|
(jog_mode, step_size, _, acceleration, max_velocity,
|
||||||
stop_mode) = st.unpack("<HLLLLH", get_msg.data[2:])
|
stop_mode) = st.unpack("<HLLLLH", get_msg.data[2:])
|
||||||
return jog_mode, step_size, acceleration, max_velocity, stop_mode
|
return jog_mode, step_size, acceleration, max_velocity, stop_mode
|
||||||
|
@ -938,7 +934,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_GENMOVEPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_GENMOVEPARAMS,
|
||||||
MGMSG.MOT_GET_GENMOVEPARAMS, 1)
|
[MGMSG.MOT_GET_GENMOVEPARAMS], 1)
|
||||||
return st.unpack("<l", get_msg.data[2:])[0]
|
return st.unpack("<l", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def set_move_relative_parameters(self, relative_distance):
|
def set_move_relative_parameters(self, relative_distance):
|
||||||
|
@ -960,7 +956,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_MOVERELPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_MOVERELPARAMS,
|
||||||
MGMSG.MOT_GET_MOVERELPARAMS, 1)
|
[MGMSG.MOT_GET_MOVERELPARAMS], 1)
|
||||||
return st.unpack("<l", get_msg.data[2:])[0]
|
return st.unpack("<l", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def set_move_absolute_parameters(self, absolute_position):
|
def set_move_absolute_parameters(self, absolute_position):
|
||||||
|
@ -982,7 +978,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_MOVEABSPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_MOVEABSPARAMS,
|
||||||
MGMSG.MOT_GET_MOVEABSPARAMS, 1)
|
[MGMSG.MOT_GET_MOVEABSPARAMS], 1)
|
||||||
return st.unpack("<l", get_msg.data[2:])[0]
|
return st.unpack("<l", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def set_home_parameters(self, home_velocity):
|
def set_home_parameters(self, home_velocity):
|
||||||
|
@ -1002,17 +998,17 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_HOMEPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_HOMEPARAMS,
|
||||||
MGMSG.MOT_GET_HOMEPARAMS, 1)
|
[MGMSG.MOT_GET_HOMEPARAMS], 1)
|
||||||
return st.unpack("<L", get_msg.data[6:10])[0]
|
return st.unpack("<L", get_msg.data[6:10])[0]
|
||||||
|
|
||||||
def move_home(self):
|
def move_home(self):
|
||||||
"""Start a home move sequence.
|
"""Start a home move sequence.
|
||||||
|
|
||||||
This call is blocking until device is homed.
|
This call is blocking until device is homed or move is stopped.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.send_request(MGMSG.MOT_MOVE_HOME,
|
self.send_request(MGMSG.MOT_MOVE_HOME,
|
||||||
MGMSG.MOT_MOVE_HOMED, 1)
|
[MGMSG.MOT_MOVE_HOMED, MGMSG.MOT_MOVE_STOPPED], 1)
|
||||||
|
|
||||||
def set_limit_switch_parameters(self, cw_hw_limit, ccw_hw_limit):
|
def set_limit_switch_parameters(self, cw_hw_limit, ccw_hw_limit):
|
||||||
"""Set the limit switch parameters.
|
"""Set the limit switch parameters.
|
||||||
|
@ -1057,7 +1053,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_LIMSWITCHPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_LIMSWITCHPARAMS,
|
||||||
MGMSG.MOT_GET_LIMSWITCHPARAMS, 1)
|
[MGMSG.MOT_GET_LIMSWITCHPARAMS], 1)
|
||||||
return st.unpack("<HH", get_msg.data[2:6])
|
return st.unpack("<HH", get_msg.data[2:6])
|
||||||
|
|
||||||
def move_relative_memory(self):
|
def move_relative_memory(self):
|
||||||
|
@ -1069,7 +1065,8 @@ class Tdc(Tcube):
|
||||||
command.
|
command.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.send(Message(MGMSG.MOT_MOVE_RELATIVE, param1=1))
|
self.send_request(MGMSG.MOT_MOVE_RELATIVE,
|
||||||
|
[MGMSG.MOT_MOVE_COMPLETED, MGMSG.MOT_MOVE_STOPPED], 1)
|
||||||
|
|
||||||
def move_relative(self, relative_distance):
|
def move_relative(self, relative_distance):
|
||||||
"""Start a relative move
|
"""Start a relative move
|
||||||
|
@ -1079,7 +1076,9 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
payload = st.pack("<Hl", 1, relative_distance)
|
payload = st.pack("<Hl", 1, relative_distance)
|
||||||
self.send(Message(MGMSG.MOT_MOVE_RELATIVE, data=payload))
|
self.send_request(MGMSG.MOT_MOVE_RELATIVE,
|
||||||
|
[MGMSG.MOT_MOVE_COMPLETED, MGMSG.MOT_MOVE_STOPPED],
|
||||||
|
data=payload)
|
||||||
|
|
||||||
def move_absolute_memory(self):
|
def move_absolute_memory(self):
|
||||||
"""Start an absolute move of distance in the controller's memory.
|
"""Start an absolute move of distance in the controller's memory.
|
||||||
|
@ -1090,7 +1089,9 @@ class Tdc(Tcube):
|
||||||
command.
|
command.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.send(Message(MGMSG.MOT_MOVE_ABSOLUTE, param1=1))
|
self.send_request(MGMSG.MOT_MOVE_ABSOLUTE,
|
||||||
|
[MGMSG.MOT_MOVE_COMPLETED, MGMSG.MOT_MOVE_STOPPED],
|
||||||
|
param1=1)
|
||||||
|
|
||||||
def move_absolute(self, absolute_distance):
|
def move_absolute(self, absolute_distance):
|
||||||
"""Start an absolute move.
|
"""Start an absolute move.
|
||||||
|
@ -1101,21 +1102,19 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
payload = st.pack("<Hl", 1, absolute_distance)
|
payload = st.pack("<Hl", 1, absolute_distance)
|
||||||
self.send(Message(MGMSG.MOT_MOVE_ABSOLUTE, data=payload))
|
self.send_request(MGMSG.MOT_MOVE_ABSOLUTE,
|
||||||
|
[MGMSG.MOT_MOVE_COMPLETED, MGMSG.MOT_MOVE_STOPPED],
|
||||||
|
data=payload)
|
||||||
|
|
||||||
def move_jog(self, direction, async=False):
|
def move_jog(self, direction):
|
||||||
"""Start a job move.
|
"""Start a job move.
|
||||||
|
|
||||||
:param direction: The direction to jog. 1 is forward, 2 is backward.
|
:param direction: The direction to jog. 1 is forward, 2 is backward.
|
||||||
:param async: If True then the command does not wait for the move to
|
|
||||||
finish. If False the command only returns when move is finished.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if async:
|
self.send_request(MGMSG.MOT_MOVE_JOG,
|
||||||
self.send_request(MGMSG.MOT_MOVE_JOG,
|
[MGMSG.MOT_MOVE_COMPLETED, MGMSG.MOT_MOVE_STOPPED],
|
||||||
MGMSG.MOT_MOVE_COMPLETED, 1, direction)
|
param1=1, param2=direction)
|
||||||
else:
|
|
||||||
self.send(Message(MGMSG.MOT_MOVE_JOG, param1=1, param2=direction))
|
|
||||||
|
|
||||||
def move_velocity(self, direction):
|
def move_velocity(self, direction):
|
||||||
"""Start a move.
|
"""Start a move.
|
||||||
|
@ -1134,7 +1133,7 @@ class Tdc(Tcube):
|
||||||
|
|
||||||
self.send(Message(MGMSG.MOT_MOVE_VELOCITY, param1=1, param2=direction))
|
self.send(Message(MGMSG.MOT_MOVE_VELOCITY, param1=1, param2=direction))
|
||||||
|
|
||||||
def move_stop(self, stop_mode, async=False):
|
def move_stop(self, stop_mode):
|
||||||
"""Stop any type of motor move.
|
"""Stop any type of motor move.
|
||||||
|
|
||||||
Stops any of those motor move: relative, absolute, homing or move at
|
Stops any of those motor move: relative, absolute, homing or move at
|
||||||
|
@ -1143,15 +1142,13 @@ class Tdc(Tcube):
|
||||||
:param stop_mode: The stop mode defines either an immediate (abrupt)
|
:param stop_mode: The stop mode defines either an immediate (abrupt)
|
||||||
or profiled stop. Set this byte to 1 to stop immediately, or to 2
|
or profiled stop. Set this byte to 1 to stop immediately, or to 2
|
||||||
to stop in a controlled (profiled) manner.
|
to stop in a controlled (profiled) manner.
|
||||||
:param async: If set to False, this method will block until motor
|
|
||||||
is really stopped. Returns immediately if set to True.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if async:
|
if self.is_moving():
|
||||||
self.send(Message(MGMSG.MOT_MOVE_STOP, param1=1, param2=stop_mode))
|
|
||||||
else:
|
|
||||||
self.send_request(MGMSG.MOT_MOVE_STOP,
|
self.send_request(MGMSG.MOT_MOVE_STOP,
|
||||||
MGMSG.MOT_MOVE_STOPPED, 1, stop_mode)
|
[MGMSG.MOT_MOVE_STOPPED,
|
||||||
|
MGMSG.MOT_MOVE_COMPLETED],
|
||||||
|
1, stop_mode)
|
||||||
|
|
||||||
def set_dc_pid_parameters(self, proportional, integral, differential,
|
def set_dc_pid_parameters(self, proportional, integral, differential,
|
||||||
integral_limit, filter_control=0x0F):
|
integral_limit, filter_control=0x0F):
|
||||||
|
@ -1185,7 +1182,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_DCPIDPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_DCPIDPARAMS,
|
||||||
MGMSG.MOT_GET_DCPIDPARAMS, 1)
|
[MGMSG.MOT_GET_DCPIDPARAMS], 1)
|
||||||
return st.unpack("<LLLLH", get_msg.data[2:])
|
return st.unpack("<LLLLH", get_msg.data[2:])
|
||||||
|
|
||||||
def set_av_modes(self, mode_bits):
|
def set_av_modes(self, mode_bits):
|
||||||
|
@ -1212,7 +1209,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_AVMODES,
|
get_msg = self.send_request(MGMSG.MOT_REQ_AVMODES,
|
||||||
MGMSG.MOT_GET_AVMODES, 1)
|
[MGMSG.MOT_GET_AVMODES], 1)
|
||||||
return st.unpack("<H", get_msg.data[2:])[0]
|
return st.unpack("<H", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def set_button_parameters(self, mode, position1, position2):
|
def set_button_parameters(self, mode, position1, position2):
|
||||||
|
@ -1250,7 +1247,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_BUTTONPARAMS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_BUTTONPARAMS,
|
||||||
MGMSG.MOT_GET_BUTTONPARAMS, 1)
|
[MGMSG.MOT_GET_BUTTONPARAMS], 1)
|
||||||
return st.unpack("<Hll", get_msg.data[2:12])
|
return st.unpack("<Hll", get_msg.data[2:12])
|
||||||
|
|
||||||
def set_eeprom_parameters(self, msg_id):
|
def set_eeprom_parameters(self, msg_id):
|
||||||
|
@ -1274,7 +1271,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_DCSTATUSUPDATE,
|
get_msg = self.send_request(MGMSG.MOT_REQ_DCSTATUSUPDATE,
|
||||||
MGMSG.MOT_GET_DCSTATUSUPDATE, 1)
|
[MGMSG.MOT_GET_DCSTATUSUPDATE], 1)
|
||||||
pos, vel, _, stat = st.unpack("<LHHL", get_msg.data[2:])
|
pos, vel, _, stat = st.unpack("<LHHL", get_msg.data[2:])
|
||||||
return pos, vel, stat
|
return pos, vel, stat
|
||||||
|
|
||||||
|
@ -1286,7 +1283,7 @@ class Tdc(Tcube):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
get_msg = self.send_request(MGMSG.MOT_REQ_STATUSBITS,
|
get_msg = self.send_request(MGMSG.MOT_REQ_STATUSBITS,
|
||||||
MGMSG.MOT_GET_STATUSBITS, 1)
|
[MGMSG.MOT_GET_STATUSBITS], 1)
|
||||||
return st.unpack("<L", get_msg.data[2:])[0]
|
return st.unpack("<L", get_msg.data[2:])[0]
|
||||||
|
|
||||||
def suspend_end_of_move_messages(self):
|
def suspend_end_of_move_messages(self):
|
||||||
|
@ -1320,6 +1317,9 @@ class TpzSim:
|
||||||
self.voltage_limit = 150
|
self.voltage_limit = 150
|
||||||
self.hub_analog_input = 1
|
self.hub_analog_input = 1
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
def module_identify(self):
|
def module_identify(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -1392,17 +1392,19 @@ class TpzSim:
|
||||||
return self.intensity
|
return self.intensity
|
||||||
|
|
||||||
def set_tpz_io_settings(self, voltage_limit, hub_analog_input):
|
def set_tpz_io_settings(self, voltage_limit, hub_analog_input):
|
||||||
self.voltage_limit = strip_unit(voltage_limit, "V")
|
if voltage_limit not in [75, 100, 150]:
|
||||||
|
|
||||||
if self.voltage_limit not in [75, 100, 150]:
|
|
||||||
raise ValueError("voltage_limit must be 75 V, 100 V or 150 V")
|
raise ValueError("voltage_limit must be 75 V, 100 V or 150 V")
|
||||||
|
self.voltage_limit = voltage_limit
|
||||||
self.hub_analog_input = hub_analog_input
|
self.hub_analog_input = hub_analog_input
|
||||||
|
|
||||||
def get_tpz_io_settings(self):
|
def get_tpz_io_settings(self):
|
||||||
return self.voltage_limit*V, self.hub_analog_input
|
return self.voltage_limit, self.hub_analog_input
|
||||||
|
|
||||||
|
|
||||||
class TdcSim:
|
class TdcSim:
|
||||||
|
def close(self):
|
||||||
|
pass
|
||||||
|
|
||||||
def module_identify(self):
|
def module_identify(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,6 @@ from prettytable import PrettyTable
|
||||||
from artiq.protocols.pc_rpc import Client
|
from artiq.protocols.pc_rpc import Client
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.tools import format_arguments
|
|
||||||
|
|
||||||
|
|
||||||
def clear_screen():
|
def clear_screen():
|
||||||
|
@ -32,16 +31,19 @@ def get_argparser():
|
||||||
subparsers.required = True
|
subparsers.required = True
|
||||||
|
|
||||||
parser_add = subparsers.add_parser("submit", help="submit an experiment")
|
parser_add = subparsers.add_parser("submit", help="submit an experiment")
|
||||||
parser_add.add_argument("-t", "--timed", default=None, type=str,
|
|
||||||
help="set a due date for the experiment")
|
|
||||||
parser_add.add_argument("-p", "--pipeline", default="main", type=str,
|
parser_add.add_argument("-p", "--pipeline", default="main", type=str,
|
||||||
help="pipeline to run the experiment in "
|
help="pipeline to run the experiment in "
|
||||||
"(default: %(default)s)")
|
"(default: %(default)s)")
|
||||||
parser_add.add_argument("-P", "--priority", default=0, type=int,
|
parser_add.add_argument("-P", "--priority", default=0, type=int,
|
||||||
help="priority (higher value means sooner "
|
help="priority (higher value means sooner "
|
||||||
"scheduling, default: %(default)s)")
|
"scheduling, default: %(default)s)")
|
||||||
parser_add.add_argument("-e", "--experiment", default=None,
|
parser_add.add_argument("-t", "--timed", default=None, type=str,
|
||||||
help="experiment to run")
|
help="set a due date for the experiment")
|
||||||
|
parser_add.add_argument("-f", "--flush", default=False, action="store_true",
|
||||||
|
help="flush the pipeline before preparing "
|
||||||
|
"the experiment")
|
||||||
|
parser_add.add_argument("-c", "--class-name", default=None,
|
||||||
|
help="name of the class to run")
|
||||||
parser_add.add_argument("file",
|
parser_add.add_argument("file",
|
||||||
help="file containing the experiment to run")
|
help="file containing the experiment to run")
|
||||||
parser_add.add_argument("arguments", nargs="*",
|
parser_add.add_argument("arguments", nargs="*",
|
||||||
|
@ -79,6 +81,9 @@ def get_argparser():
|
||||||
"what",
|
"what",
|
||||||
help="select object to show: schedule/devices/parameters")
|
help="select object to show: schedule/devices/parameters")
|
||||||
|
|
||||||
|
parser_scan_repository = subparsers.add_parser(
|
||||||
|
"scan-repository", help="rescan repository")
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
@ -99,14 +104,15 @@ def _action_submit(remote, args):
|
||||||
|
|
||||||
expid = {
|
expid = {
|
||||||
"file": args.file,
|
"file": args.file,
|
||||||
"experiment": args.experiment,
|
"class_name": args.class_name,
|
||||||
"arguments": arguments,
|
"arguments": arguments,
|
||||||
}
|
}
|
||||||
if args.timed is None:
|
if args.timed is None:
|
||||||
due_date = None
|
due_date = None
|
||||||
else:
|
else:
|
||||||
due_date = time.mktime(parse_date(args.timed).timetuple())
|
due_date = time.mktime(parse_date(args.timed).timetuple())
|
||||||
rid = remote.submit(args.pipeline, expid, args.priority, due_date)
|
rid = remote.submit(args.pipeline, expid,
|
||||||
|
args.priority, due_date, args.flush)
|
||||||
print("RID: {}".format(rid))
|
print("RID: {}".format(rid))
|
||||||
|
|
||||||
|
|
||||||
|
@ -130,15 +136,19 @@ def _action_del_parameter(remote, args):
|
||||||
remote.delete(args.name)
|
remote.delete(args.name)
|
||||||
|
|
||||||
|
|
||||||
|
def _action_scan_repository(remote, args):
|
||||||
|
remote.scan_async()
|
||||||
|
|
||||||
|
|
||||||
def _show_schedule(schedule):
|
def _show_schedule(schedule):
|
||||||
clear_screen()
|
clear_screen()
|
||||||
if schedule:
|
if schedule:
|
||||||
l = sorted(schedule.items(),
|
l = sorted(schedule.items(),
|
||||||
key=lambda x: (x[1]["due_date"] or 0,
|
key=lambda x: (-x[1]["priority"],
|
||||||
-x[1]["priority"],
|
x[1]["due_date"] or 0,
|
||||||
x[0]))
|
x[0]))
|
||||||
table = PrettyTable(["RID", "Pipeline", " Status ", "Prio",
|
table = PrettyTable(["RID", "Pipeline", " Status ", "Prio",
|
||||||
"Due date", "File", "Experiment", "Arguments"])
|
"Due date", "File", "Class name"])
|
||||||
for rid, v in l:
|
for rid, v in l:
|
||||||
row = [rid, v["pipeline"], v["status"], v["priority"]]
|
row = [rid, v["pipeline"], v["status"], v["priority"]]
|
||||||
if v["due_date"] is None:
|
if v["due_date"] is None:
|
||||||
|
@ -147,11 +157,10 @@ def _show_schedule(schedule):
|
||||||
row.append(time.strftime("%m/%d %H:%M:%S",
|
row.append(time.strftime("%m/%d %H:%M:%S",
|
||||||
time.localtime(v["due_date"])))
|
time.localtime(v["due_date"])))
|
||||||
row.append(v["expid"]["file"])
|
row.append(v["expid"]["file"])
|
||||||
if v["expid"]["experiment"] is None:
|
if v["expid"]["class_name"] is None:
|
||||||
row.append("")
|
row.append("")
|
||||||
else:
|
else:
|
||||||
row.append(v["expid"]["experiment"])
|
row.append(v["expid"]["class_name"])
|
||||||
row.append(format_arguments(v["expid"]["arguments"]))
|
|
||||||
table.add_row(row)
|
table.add_row(row)
|
||||||
print(table)
|
print(table)
|
||||||
else:
|
else:
|
||||||
|
@ -224,6 +233,7 @@ def main():
|
||||||
"del_device": "master_ddb",
|
"del_device": "master_ddb",
|
||||||
"set_parameter": "master_pdb",
|
"set_parameter": "master_pdb",
|
||||||
"del_parameter": "master_pdb",
|
"del_parameter": "master_pdb",
|
||||||
|
"scan_repository": "master_repository"
|
||||||
}[action]
|
}[action]
|
||||||
remote = Client(args.server, port, target_name)
|
remote = Client(args.server, port, target_name)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -4,7 +4,7 @@ import logging
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
from artiq.protocols.file_db import FlatFileDB
|
||||||
from artiq.master.worker_db import DBHub
|
from artiq.master.worker_db import DeviceManager
|
||||||
from artiq.tools import *
|
from artiq.tools import *
|
||||||
|
|
||||||
|
|
||||||
|
@ -36,15 +36,14 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
|
|
||||||
ddb = FlatFileDB(args.ddb)
|
dmgr = DeviceManager(FlatFileDB(args.ddb))
|
||||||
pdb = FlatFileDB(args.pdb)
|
pdb = FlatFileDB(args.pdb)
|
||||||
dbh = DBHub(ddb, pdb, rdb=None, read_only=True)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
module = file_import(args.file)
|
module = file_import(args.file)
|
||||||
exp = get_experiment(module, args.experiment)
|
exp = get_experiment(module, args.experiment)
|
||||||
arguments = parse_arguments(args.arguments)
|
arguments = parse_arguments(args.arguments)
|
||||||
exp_inst = exp(dbh, **arguments)
|
exp_inst = exp(dmgr, pdb, **arguments)
|
||||||
|
|
||||||
if (not hasattr(exp.run, "k_function_info")
|
if (not hasattr(exp.run, "k_function_info")
|
||||||
or not exp.run.k_function_info):
|
or not exp.run.k_function_info):
|
||||||
|
@ -56,7 +55,7 @@ def main():
|
||||||
[exp_inst], {},
|
[exp_inst], {},
|
||||||
with_attr_writeback=False)
|
with_attr_writeback=False)
|
||||||
finally:
|
finally:
|
||||||
dbh.close_devices()
|
dmgr.close_devices()
|
||||||
|
|
||||||
if rpc_map:
|
if rpc_map:
|
||||||
raise ValueError("Experiment must not use RPC")
|
raise ValueError("Experiment must not use RPC")
|
||||||
|
|
|
@ -0,0 +1,72 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
from artiq.master.worker_db import DeviceManager
|
||||||
|
from artiq.protocols.file_db import FlatFileDB
|
||||||
|
|
||||||
|
|
||||||
|
def to_bytes(string):
|
||||||
|
return bytes(string, encoding="ascii")
|
||||||
|
|
||||||
|
|
||||||
|
def get_argparser():
|
||||||
|
parser = argparse.ArgumentParser(description="ARTIQ core device config "
|
||||||
|
"remote access")
|
||||||
|
subparsers = parser.add_subparsers(dest="action")
|
||||||
|
subparsers.required = True
|
||||||
|
p_read = subparsers.add_parser("read",
|
||||||
|
help="read key from core device config")
|
||||||
|
p_read.add_argument("key", type=to_bytes,
|
||||||
|
help="key to be read from core device config")
|
||||||
|
p_write = subparsers.add_parser("write",
|
||||||
|
help="write key-value records to core "
|
||||||
|
"device config")
|
||||||
|
p_write.add_argument("-s", "--string", nargs=2, action="append",
|
||||||
|
default=[], metavar=("KEY", "STRING"), type=to_bytes,
|
||||||
|
help="key-value records to be written to core device "
|
||||||
|
"config")
|
||||||
|
p_write.add_argument("-f", "--file", nargs=2, action="append",
|
||||||
|
type=to_bytes, default=[],
|
||||||
|
metavar=("KEY", "FILENAME"),
|
||||||
|
help="key and file whose content to be written to "
|
||||||
|
"core device config")
|
||||||
|
subparsers.add_parser("erase", help="erase core device config")
|
||||||
|
p_delete = subparsers.add_parser("delete",
|
||||||
|
help="delete key from core device config")
|
||||||
|
p_delete.add_argument("key", nargs=argparse.REMAINDER,
|
||||||
|
default=[], type=to_bytes,
|
||||||
|
help="key to be deleted from core device config")
|
||||||
|
parser.add_argument("--ddb", default="ddb.pyon",
|
||||||
|
help="device database file")
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
args = get_argparser().parse_args()
|
||||||
|
dmgr = DeviceManager(FlatFileDB(args.ddb))
|
||||||
|
try:
|
||||||
|
comm = dmgr.get("comm")
|
||||||
|
|
||||||
|
if args.action == "read":
|
||||||
|
value = comm.flash_storage_read(args.key)
|
||||||
|
if not value:
|
||||||
|
print("Key {} does not exist".format(args.key))
|
||||||
|
else:
|
||||||
|
print(value)
|
||||||
|
elif args.action == "erase":
|
||||||
|
comm.flash_storage_erase()
|
||||||
|
elif args.action == "delete":
|
||||||
|
for key in args.key:
|
||||||
|
comm.flash_storage_remove(key)
|
||||||
|
elif args.action == "write":
|
||||||
|
for key, value in args.string:
|
||||||
|
comm.flash_storage_write(key, value)
|
||||||
|
for key, filename in args.file:
|
||||||
|
with open(filename, "rb") as fi:
|
||||||
|
comm.flash_storage_write(key, fi.read())
|
||||||
|
finally:
|
||||||
|
dmgr.close_devices()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -1,13 +1,16 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
# exit on error
|
||||||
set -e
|
set -e
|
||||||
|
# print commands
|
||||||
|
#set -x
|
||||||
|
|
||||||
ARTIQ_PREFIX=$(python3 -c "import artiq; print(artiq.__path__[0])")
|
ARTIQ_PREFIX=$(python3 -c "import artiq; print(artiq.__path__[0])")
|
||||||
|
|
||||||
# Default is kc705
|
# Default is kc705
|
||||||
BOARD=kc705
|
BOARD=kc705
|
||||||
|
|
||||||
while getopts "bBrht:d:" opt
|
while getopts "bBrht:d:f:" opt
|
||||||
do
|
do
|
||||||
case $opt in
|
case $opt in
|
||||||
b)
|
b)
|
||||||
|
@ -19,6 +22,15 @@ do
|
||||||
r)
|
r)
|
||||||
FLASH_RUNTIME=1
|
FLASH_RUNTIME=1
|
||||||
;;
|
;;
|
||||||
|
f)
|
||||||
|
if [ -f $OPTARG ]
|
||||||
|
then
|
||||||
|
FILENAME=$OPTARG
|
||||||
|
else
|
||||||
|
echo "You specified a non-existing file to flash: $OPTARG"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
t)
|
t)
|
||||||
if [ "$OPTARG" == "kc705" ]
|
if [ "$OPTARG" == "kc705" ]
|
||||||
then
|
then
|
||||||
|
@ -52,6 +64,7 @@ do
|
||||||
echo "-r Flash ARTIQ runtime"
|
echo "-r Flash ARTIQ runtime"
|
||||||
echo "-h Show this help message"
|
echo "-h Show this help message"
|
||||||
echo "-t Target (kc705, pipistrello, default is: kc705)"
|
echo "-t Target (kc705, pipistrello, default is: kc705)"
|
||||||
|
echo "-f Flash storage image generated with artiq_mkfs"
|
||||||
echo "-d Directory containing the binaries to be flashed"
|
echo "-d Directory containing the binaries to be flashed"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
|
@ -95,22 +108,24 @@ then
|
||||||
PROXY=bscan_spi_kc705.bit
|
PROXY=bscan_spi_kc705.bit
|
||||||
BIOS_ADDR=0xaf0000
|
BIOS_ADDR=0xaf0000
|
||||||
RUNTIME_ADDR=0xb00000
|
RUNTIME_ADDR=0xb00000
|
||||||
|
FS_ADDR=0xb40000
|
||||||
if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/kc705; fi
|
if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/kc705; fi
|
||||||
search_for_proxy $PROXY
|
search_for_proxy $PROXY
|
||||||
elif [ "$BOARD" == "pipistrello" ]
|
elif [ "$BOARD" == "pipistrello" ]
|
||||||
then
|
then
|
||||||
UDEV_RULES=99-papilio.rules
|
UDEV_RULES=99-papilio.rules
|
||||||
BITSTREAM=artiq_pipistrello-nist_qc1-pipistrello.bin
|
BITSTREAM=artiq_pipistrello-nist_qc1-pipistrello.bit
|
||||||
CABLE=papilio
|
CABLE=papilio
|
||||||
PROXY=bscan_spi_lx45_csg324.bit
|
PROXY=bscan_spi_lx45_csg324.bit
|
||||||
BIOS_ADDR=0x170000
|
BIOS_ADDR=0x170000
|
||||||
RUNTIME_ADDR=0x180000
|
RUNTIME_ADDR=0x180000
|
||||||
|
FS_ADDR=0x1c0000
|
||||||
if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/pipistrello; fi
|
if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/pipistrello; fi
|
||||||
search_for_proxy $PROXY
|
search_for_proxy $PROXY
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Check if neither of -b|-B|-r have been used
|
# Check if neither of -b|-B|-r have been used
|
||||||
if [ -z "$FLASH_RUNTIME" -a -z "$FLASH_BIOS" -a -z "$FLASH_BITSTREAM" ]
|
if [ -z "$FLASH_RUNTIME" -a -z "$FLASH_BIOS" -a -z "$FLASH_BITSTREAM" -a -z "$FILENAME" ]
|
||||||
then
|
then
|
||||||
FLASH_RUNTIME=1
|
FLASH_RUNTIME=1
|
||||||
FLASH_BIOS=1
|
FLASH_BIOS=1
|
||||||
|
@ -132,10 +147,16 @@ then
|
||||||
fi
|
fi
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
if [ ! -z "$FILENAME" ]
|
||||||
|
then
|
||||||
|
echo "Flashing file $FILENAME at address $FS_ADDR"
|
||||||
|
xc3sprog -v -c $CABLE -I$PROXY_PATH/$PROXY $FILENAME:w:$FS_ADDR:BIN
|
||||||
|
fi
|
||||||
|
|
||||||
if [ "${FLASH_BITSTREAM}" == "1" ]
|
if [ "${FLASH_BITSTREAM}" == "1" ]
|
||||||
then
|
then
|
||||||
echo "Flashing FPGA bitstream..."
|
echo "Flashing FPGA bitstream..."
|
||||||
xc3sprog -v -c $CABLE -I$PROXY_PATH/$PROXY $BIN_PREFIX/$BITSTREAM:w:0x0:BIN
|
xc3sprog -v -c $CABLE -I$PROXY_PATH/$PROXY $BIN_PREFIX/$BITSTREAM:w:0x0:BIT
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "${FLASH_BIOS}" == "1" ]
|
if [ "${FLASH_BIOS}" == "1" ]
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
import argparse
|
import argparse
|
||||||
import asyncio
|
import asyncio
|
||||||
import atexit
|
import atexit
|
||||||
|
import os
|
||||||
|
|
||||||
# Quamash must be imported first so that pyqtgraph picks up the Qt binding
|
# Quamash must be imported first so that pyqtgraph picks up the Qt binding
|
||||||
# it has chosen.
|
# it has chosen.
|
||||||
|
@ -12,9 +13,15 @@ from pyqtgraph import dockarea
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
from artiq.protocols.file_db import FlatFileDB
|
||||||
from artiq.protocols.pc_rpc import AsyncioClient
|
from artiq.protocols.pc_rpc import AsyncioClient
|
||||||
from artiq.gui.explorer import ExplorerDock
|
from artiq.gui.explorer import ExplorerDock
|
||||||
|
from artiq.gui.moninj import MonInj
|
||||||
|
from artiq.gui.results import ResultsDock
|
||||||
from artiq.gui.parameters import ParametersDock
|
from artiq.gui.parameters import ParametersDock
|
||||||
from artiq.gui.log import LogDock
|
|
||||||
from artiq.gui.schedule import ScheduleDock
|
from artiq.gui.schedule import ScheduleDock
|
||||||
|
from artiq.gui.log import LogDock
|
||||||
|
|
||||||
|
|
||||||
|
data_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),
|
||||||
|
"..", "gui")
|
||||||
|
|
||||||
|
|
||||||
def get_argparser():
|
def get_argparser():
|
||||||
|
@ -34,6 +41,17 @@ def get_argparser():
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
class _MainWindow(QtGui.QMainWindow):
|
||||||
|
def __init__(self, app):
|
||||||
|
QtGui.QMainWindow.__init__(self)
|
||||||
|
self.setWindowIcon(QtGui.QIcon(os.path.join(data_dir, "icon.png")))
|
||||||
|
self.resize(1400, 800)
|
||||||
|
self.setWindowTitle("ARTIQ")
|
||||||
|
self.exit_request = asyncio.Event()
|
||||||
|
|
||||||
|
def closeEvent(self, *args):
|
||||||
|
self.exit_request.set()
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
|
|
||||||
|
@ -49,38 +67,53 @@ def main():
|
||||||
args.server, args.port_control, "master_schedule"))
|
args.server, args.port_control, "master_schedule"))
|
||||||
atexit.register(lambda: schedule_ctl.close_rpc())
|
atexit.register(lambda: schedule_ctl.close_rpc())
|
||||||
|
|
||||||
win = QtGui.QMainWindow()
|
win = _MainWindow(app)
|
||||||
area = dockarea.DockArea()
|
area = dockarea.DockArea()
|
||||||
win.setCentralWidget(area)
|
win.setCentralWidget(area)
|
||||||
status_bar = QtGui.QStatusBar()
|
status_bar = QtGui.QStatusBar()
|
||||||
status_bar.showMessage("Connected to {}".format(args.server))
|
status_bar.showMessage("Connected to {}".format(args.server))
|
||||||
win.setStatusBar(status_bar)
|
win.setStatusBar(status_bar)
|
||||||
win.resize(1400, 800)
|
|
||||||
win.setWindowTitle("ARTIQ")
|
|
||||||
|
|
||||||
d_explorer = ExplorerDock(status_bar, schedule_ctl)
|
d_explorer = ExplorerDock(win, status_bar, schedule_ctl)
|
||||||
area.addDock(d_explorer, "top")
|
|
||||||
loop.run_until_complete(d_explorer.sub_connect(
|
loop.run_until_complete(d_explorer.sub_connect(
|
||||||
args.server, args.port_notify))
|
args.server, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close()))
|
atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close()))
|
||||||
|
|
||||||
|
d_results = ResultsDock(win, area)
|
||||||
|
loop.run_until_complete(d_results.sub_connect(
|
||||||
|
args.server, args.port_notify))
|
||||||
|
atexit.register(lambda: loop.run_until_complete(d_results.sub_close()))
|
||||||
|
|
||||||
|
d_ttl_dds = MonInj()
|
||||||
|
loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify))
|
||||||
|
atexit.register(lambda: loop.run_until_complete(d_ttl_dds.stop()))
|
||||||
|
|
||||||
d_params = ParametersDock()
|
d_params = ParametersDock()
|
||||||
area.addDock(d_params, "right", d_explorer)
|
|
||||||
loop.run_until_complete(d_params.sub_connect(
|
loop.run_until_complete(d_params.sub_connect(
|
||||||
args.server, args.port_notify))
|
args.server, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(d_params.sub_close()))
|
atexit.register(lambda: loop.run_until_complete(d_params.sub_close()))
|
||||||
|
|
||||||
d_log = LogDock()
|
area.addDock(d_ttl_dds.dds_dock, "top")
|
||||||
area.addDock(d_log, "bottom")
|
area.addDock(d_ttl_dds.ttl_dock, "above", d_ttl_dds.dds_dock)
|
||||||
|
area.addDock(d_results, "above", d_ttl_dds.ttl_dock)
|
||||||
|
area.addDock(d_params, "above", d_results)
|
||||||
|
area.addDock(d_explorer, "above", d_params)
|
||||||
|
|
||||||
d_schedule = ScheduleDock(schedule_ctl)
|
d_schedule = ScheduleDock(schedule_ctl)
|
||||||
area.addDock(d_schedule, "above", d_log)
|
|
||||||
loop.run_until_complete(d_schedule.sub_connect(
|
loop.run_until_complete(d_schedule.sub_connect(
|
||||||
args.server, args.port_notify))
|
args.server, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(d_schedule.sub_close()))
|
atexit.register(lambda: loop.run_until_complete(d_schedule.sub_close()))
|
||||||
|
|
||||||
|
d_log = LogDock()
|
||||||
|
loop.run_until_complete(d_log.sub_connect(
|
||||||
|
args.server, args.port_notify))
|
||||||
|
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))
|
||||||
|
|
||||||
|
area.addDock(d_log, "bottom")
|
||||||
|
area.addDock(d_schedule, "above", d_log)
|
||||||
|
|
||||||
win.show()
|
win.show()
|
||||||
loop.run_forever()
|
loop.run_until_complete(win.exit_request.wait())
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -6,10 +6,10 @@ import atexit
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from artiq.protocols.pc_rpc import Server
|
from artiq.protocols.pc_rpc import Server
|
||||||
from artiq.protocols.sync_struct import Publisher
|
from artiq.protocols.sync_struct import Notifier, Publisher, process_mod
|
||||||
from artiq.protocols.file_db import FlatFileDB, SimpleHistory
|
from artiq.protocols.file_db import FlatFileDB
|
||||||
from artiq.master.scheduler import Scheduler
|
from artiq.master.scheduler import Scheduler
|
||||||
from artiq.master.results import RTResults, get_last_rid
|
from artiq.master.worker_db import get_last_rid
|
||||||
from artiq.master.repository import Repository
|
from artiq.master.repository import Repository
|
||||||
from artiq.tools import verbosity_args, init_logger
|
from artiq.tools import verbosity_args, init_logger
|
||||||
|
|
||||||
|
@ -30,17 +30,21 @@ def get_argparser():
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
|
class Log:
|
||||||
|
def __init__(self, depth):
|
||||||
|
self.depth = depth
|
||||||
|
self.data = Notifier([])
|
||||||
|
|
||||||
|
def log(self, rid, message):
|
||||||
|
if len(self.data.read) >= self.depth:
|
||||||
|
del self.data[0]
|
||||||
|
self.data.append((rid, message))
|
||||||
|
log.worker_pass_rid = True
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
|
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
ddb = FlatFileDB("ddb.pyon")
|
|
||||||
pdb = FlatFileDB("pdb.pyon")
|
|
||||||
simplephist = SimpleHistory(30)
|
|
||||||
pdb.hooks.append(simplephist)
|
|
||||||
rtr = RTResults()
|
|
||||||
repository = Repository()
|
|
||||||
|
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
loop = asyncio.ProactorEventLoop()
|
loop = asyncio.ProactorEventLoop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
|
@ -48,23 +52,31 @@ def main():
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
atexit.register(lambda: loop.close())
|
atexit.register(lambda: loop.close())
|
||||||
|
|
||||||
|
ddb = FlatFileDB("ddb.pyon")
|
||||||
|
pdb = FlatFileDB("pdb.pyon")
|
||||||
|
rtr = Notifier(dict())
|
||||||
|
log = Log(1000)
|
||||||
|
|
||||||
worker_handlers = {
|
worker_handlers = {
|
||||||
"req_device": ddb.request,
|
"get_device": ddb.get,
|
||||||
"req_parameter": pdb.request,
|
"get_parameter": pdb.get,
|
||||||
"set_parameter": pdb.set,
|
"set_parameter": pdb.set,
|
||||||
"init_rt_results": rtr.init,
|
"update_rt_results": lambda mod: process_mod(rtr, mod),
|
||||||
"update_rt_results": rtr.update,
|
"log": log.log
|
||||||
}
|
}
|
||||||
scheduler = Scheduler(get_last_rid() + 1, worker_handlers)
|
scheduler = Scheduler(get_last_rid() + 1, worker_handlers)
|
||||||
worker_handlers["scheduler_submit"] = scheduler.submit
|
worker_handlers["scheduler_submit"] = scheduler.submit
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
|
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
|
||||||
|
|
||||||
|
repository = Repository(log.log)
|
||||||
|
repository.scan_async()
|
||||||
|
|
||||||
server_control = Server({
|
server_control = Server({
|
||||||
"master_ddb": ddb,
|
"master_ddb": ddb,
|
||||||
"master_pdb": pdb,
|
"master_pdb": pdb,
|
||||||
"master_schedule": scheduler,
|
"master_schedule": scheduler,
|
||||||
"master_repository": repository,
|
"master_repository": repository
|
||||||
})
|
})
|
||||||
loop.run_until_complete(server_control.start(
|
loop.run_until_complete(server_control.start(
|
||||||
args.bind, args.port_control))
|
args.bind, args.port_control))
|
||||||
|
@ -74,9 +86,9 @@ def main():
|
||||||
"schedule": scheduler.notifier,
|
"schedule": scheduler.notifier,
|
||||||
"devices": ddb.data,
|
"devices": ddb.data,
|
||||||
"parameters": pdb.data,
|
"parameters": pdb.data,
|
||||||
"parameters_simplehist": simplephist.history,
|
"rt_results": rtr,
|
||||||
"rt_results": rtr.groups,
|
"explist": repository.explist,
|
||||||
"explist": repository.explist
|
"log": log.data
|
||||||
})
|
})
|
||||||
loop.run_until_complete(server_notify.start(
|
loop.run_until_complete(server_notify.start(
|
||||||
args.bind, args.port_notify))
|
args.bind, args.port_notify))
|
||||||
|
|
|
@ -20,16 +20,13 @@ def get_argparser():
|
||||||
|
|
||||||
|
|
||||||
def write_record(f, key, value):
|
def write_record(f, key, value):
|
||||||
|
key_size = len(key) + 1
|
||||||
|
value_size = len(value)
|
||||||
|
record_size = key_size + value_size + 4
|
||||||
|
f.write(struct.pack(">l", record_size))
|
||||||
f.write(key.encode())
|
f.write(key.encode())
|
||||||
f.write(b"\x00")
|
f.write(b"\x00")
|
||||||
key_size = len(key) + 1
|
|
||||||
if key_size % 4:
|
|
||||||
f.write(bytes(4 - (key_size % 4)))
|
|
||||||
f.write(struct.pack(">l", len(value)))
|
|
||||||
f.write(value)
|
f.write(value)
|
||||||
value_size = len(value)
|
|
||||||
if value_size % 4:
|
|
||||||
f.write(bytes(4 - (value_size % 4)))
|
|
||||||
|
|
||||||
|
|
||||||
def write_end_marker(f):
|
def write_end_marker(f):
|
||||||
|
|
|
@ -36,8 +36,11 @@ def list_targets(target_names, id_parameters):
|
||||||
|
|
||||||
|
|
||||||
def list_methods(remote):
|
def list_methods(remote):
|
||||||
methods = remote.get_rpc_method_list()
|
doc = remote.get_rpc_method_list()
|
||||||
for name, (argspec, docstring) in sorted(methods.items()):
|
if doc["docstring"] is not None:
|
||||||
|
print(doc["docstring"])
|
||||||
|
print()
|
||||||
|
for name, (argspec, docstring) in sorted(doc["methods"].items()):
|
||||||
args = ""
|
args = ""
|
||||||
for arg in argspec["args"]:
|
for arg in argspec["args"]:
|
||||||
args += arg
|
args += arg
|
||||||
|
|
|
@ -9,20 +9,19 @@ import logging
|
||||||
|
|
||||||
import h5py
|
import h5py
|
||||||
|
|
||||||
from artiq.language.db import *
|
from artiq.language.environment import EnvExperiment
|
||||||
from artiq.language.experiment import Experiment
|
|
||||||
from artiq.protocols.file_db import FlatFileDB
|
from artiq.protocols.file_db import FlatFileDB
|
||||||
from artiq.master.worker_db import DBHub, ResultDB
|
from artiq.master.worker_db import DeviceManager, ResultDB
|
||||||
from artiq.tools import *
|
from artiq.tools import *
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ELFRunner(Experiment, AutoDB):
|
class ELFRunner(EnvExperiment):
|
||||||
class DBKeys:
|
def build(self):
|
||||||
core = Device()
|
self.attr_device("core")
|
||||||
file = Argument()
|
self.attr_argument("file")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
with open(self.file, "rb") as f:
|
with open(self.file, "rb") as f:
|
||||||
|
@ -36,42 +35,21 @@ class SimpleParamLogger:
|
||||||
logger.info("Parameter change: {} = {}".format(name, value))
|
logger.info("Parameter change: {} = {}".format(name, value))
|
||||||
|
|
||||||
|
|
||||||
class DummyWatchdog:
|
|
||||||
def __init__(self, t):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __exit__(self, type, value, traceback):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class DummyScheduler:
|
class DummyScheduler:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.next_rid = 0
|
self.next_rid = 0
|
||||||
self.next_trid = 0
|
self.pipeline_name = "main"
|
||||||
|
self.priority = 0
|
||||||
|
self.expid = None
|
||||||
|
|
||||||
def run_queued(self, run_params):
|
def submit(self, pipeline_name, expid, priority, due_date, flush):
|
||||||
rid = self.next_rid
|
rid = self.next_rid
|
||||||
self.next_rid += 1
|
self.next_rid += 1
|
||||||
logger.info("Queuing: %s, RID=%s", run_params, rid)
|
logger.info("Submitting: %s, RID=%s", expid, rid)
|
||||||
return rid
|
return rid
|
||||||
|
|
||||||
def cancel_queued(self, rid):
|
def delete(self, rid):
|
||||||
logger.info("Cancelling RID %s", rid)
|
logger.info("Deleting RID %s", rid)
|
||||||
|
|
||||||
def run_timed(self, run_params, next_run):
|
|
||||||
trid = self.next_trid
|
|
||||||
self.next_trid += 1
|
|
||||||
next_run_s = time.strftime("%m/%d %H:%M:%S", time.localtime(next_run))
|
|
||||||
logger.info("Timing: %s at %s, TRID=%s", run_params, next_run_s, trid)
|
|
||||||
return trid
|
|
||||||
|
|
||||||
def cancel_timed(self, trid):
|
|
||||||
logger.info("Cancelling TRID %s", trid)
|
|
||||||
|
|
||||||
watchdog = DummyWatchdog
|
|
||||||
|
|
||||||
|
|
||||||
def get_argparser(with_file=True):
|
def get_argparser(with_file=True):
|
||||||
|
@ -98,7 +76,7 @@ def get_argparser(with_file=True):
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def _build_experiment(dbh, args):
|
def _build_experiment(dmgr, pdb, rdb, args):
|
||||||
if hasattr(args, "file"):
|
if hasattr(args, "file"):
|
||||||
if args.file.endswith(".elf"):
|
if args.file.endswith(".elf"):
|
||||||
if args.arguments:
|
if args.arguments:
|
||||||
|
@ -106,7 +84,7 @@ def _build_experiment(dbh, args):
|
||||||
if args.experiment:
|
if args.experiment:
|
||||||
raise ValueError("experiment-by-name not supported "
|
raise ValueError("experiment-by-name not supported "
|
||||||
"for ELF kernels")
|
"for ELF kernels")
|
||||||
return ELFRunner(dbh, file=args.file)
|
return ELFRunner(dmgr, pdb, rdb, file=args.file)
|
||||||
else:
|
else:
|
||||||
module = file_import(args.file)
|
module = file_import(args.file)
|
||||||
file = args.file
|
file = args.file
|
||||||
|
@ -115,37 +93,38 @@ def _build_experiment(dbh, args):
|
||||||
file = getattr(module, "__file__")
|
file = getattr(module, "__file__")
|
||||||
exp = get_experiment(module, args.experiment)
|
exp = get_experiment(module, args.experiment)
|
||||||
arguments = parse_arguments(args.arguments)
|
arguments = parse_arguments(args.arguments)
|
||||||
return exp(dbh,
|
expid = {
|
||||||
scheduler=DummyScheduler(),
|
"file": file,
|
||||||
run_params=dict(file=file,
|
"experiment": args.experiment,
|
||||||
experiment=args.experiment,
|
"arguments": arguments
|
||||||
arguments=arguments),
|
}
|
||||||
**arguments)
|
dmgr.virtual_devices["scheduler"].expid = expid
|
||||||
|
return exp(dmgr, pdb, rdb, **arguments)
|
||||||
|
|
||||||
|
|
||||||
def run(with_file=False):
|
def run(with_file=False):
|
||||||
args = get_argparser(with_file).parse_args()
|
args = get_argparser(with_file).parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
|
|
||||||
ddb = FlatFileDB(args.ddb)
|
dmgr = DeviceManager(FlatFileDB(args.ddb),
|
||||||
|
virtual_devices={"scheduler": DummyScheduler()})
|
||||||
pdb = FlatFileDB(args.pdb)
|
pdb = FlatFileDB(args.pdb)
|
||||||
pdb.hooks.append(SimpleParamLogger())
|
pdb.hooks.append(SimpleParamLogger())
|
||||||
rdb = ResultDB(lambda description: None, lambda mod: None)
|
rdb = ResultDB()
|
||||||
dbh = DBHub(ddb, pdb, rdb)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
exp_inst = _build_experiment(dbh, args)
|
exp_inst = _build_experiment(dmgr, pdb, rdb, args)
|
||||||
rdb.build()
|
exp_inst.prepare()
|
||||||
exp_inst.run()
|
exp_inst.run()
|
||||||
exp_inst.analyze()
|
exp_inst.analyze()
|
||||||
finally:
|
finally:
|
||||||
dbh.close_devices()
|
dmgr.close_devices()
|
||||||
|
|
||||||
if args.hdf5 is not None:
|
if args.hdf5 is not None:
|
||||||
with h5py.File(args.hdf5, "w") as f:
|
with h5py.File(args.hdf5, "w") as f:
|
||||||
rdb.write_hdf5(f)
|
rdb.write_hdf5(f)
|
||||||
elif rdb.data.read or rdb.realtime_data.read:
|
elif rdb.rt.read or rdb.nrt:
|
||||||
r = chain(rdb.realtime_data.read.items(), rdb.data.read.items())
|
r = chain(rdb.rt.read.items(), rdb.nrt.items())
|
||||||
for k, v in sorted(r, key=itemgetter(0)):
|
for k, v in sorted(r, key=itemgetter(0)):
|
||||||
print("{}: {}".format(k, v))
|
print("{}: {}".format(k, v))
|
||||||
|
|
||||||
|
|
|
@ -15,8 +15,14 @@ def get_argparser():
|
||||||
choices=["LDA-102", "LDA-602"])
|
choices=["LDA-102", "LDA-602"])
|
||||||
simple_network_args(parser, 3253)
|
simple_network_args(parser, 3253)
|
||||||
parser.add_argument("-d", "--device", default=None,
|
parser.add_argument("-d", "--device", default=None,
|
||||||
help="USB serial number of the device."
|
help="USB serial number of the device. "
|
||||||
" Omit for simulation mode.")
|
"The serial number is written on a sticker under "
|
||||||
|
"the device, you should write for example "
|
||||||
|
"-d \"SN:03461\". You must prepend enough 0s for "
|
||||||
|
"it to be 5 digits. If omitted, the first "
|
||||||
|
"available device will be used.")
|
||||||
|
parser.add_argument("--simulation", action="store_true",
|
||||||
|
help="Put the driver in simulation mode.")
|
||||||
verbosity_args(parser)
|
verbosity_args(parser)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
@ -24,7 +30,7 @@ def get_argparser():
|
||||||
def main():
|
def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
if args.device is None:
|
if args.simulation:
|
||||||
lda = Ldasim()
|
lda = Ldasim()
|
||||||
else:
|
else:
|
||||||
lda = Lda(args.device, args.product)
|
lda = Lda(args.device, args.product)
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
from artiq.devices.novatech409b.driver import Novatech409B
|
from artiq.devices.novatech409b.driver import Novatech409B
|
||||||
from artiq.protocols.pc_rpc import simple_server_loop
|
from artiq.protocols.pc_rpc import simple_server_loop
|
||||||
|
@ -19,7 +20,10 @@ def get_argparser():
|
||||||
simple_network_args(parser, 3254)
|
simple_network_args(parser, 3254)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-d", "--device", default=None,
|
"-d", "--device", default=None,
|
||||||
help="serial port. Omit for simulation mode.")
|
help="serial port.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--simulation", action="store_true",
|
||||||
|
help="Put the driver in simulation mode, even if --device is used.")
|
||||||
verbosity_args(parser)
|
verbosity_args(parser)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
@ -28,7 +32,12 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
|
|
||||||
dev = Novatech409B(args.device)
|
if not args.simulation and args.device is None:
|
||||||
|
print("You need to specify either --simulation or -d/--device "
|
||||||
|
"argument. Use --help for more information.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
dev = Novatech409B(args.device if not args.simulation else None)
|
||||||
try:
|
try:
|
||||||
simple_server_loop(
|
simple_server_loop(
|
||||||
{"novatech409b": dev}, args.bind, args.port)
|
{"novatech409b": dev}, args.bind, args.port)
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import sys
|
||||||
|
|
||||||
from artiq.devices.pdq2.driver import Pdq2
|
from artiq.devices.pdq2.driver import Pdq2
|
||||||
from artiq.protocols.pc_rpc import simple_server_loop
|
from artiq.protocols.pc_rpc import simple_server_loop
|
||||||
|
@ -12,7 +13,10 @@ def get_argparser():
|
||||||
simple_network_args(parser, 3252)
|
simple_network_args(parser, 3252)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-d", "--device", default=None,
|
"-d", "--device", default=None,
|
||||||
help="serial port. Omit for simulation mode.")
|
help="serial port.")
|
||||||
|
parser.add_argument(
|
||||||
|
"--simulation", action="store_true",
|
||||||
|
help="Put the driver in simulation mode, even if --device is used.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--dump", default="pdq2_dump.bin",
|
"--dump", default="pdq2_dump.bin",
|
||||||
help="file to dump pdq2 data into, for later simulation")
|
help="file to dump pdq2 data into, for later simulation")
|
||||||
|
@ -24,7 +28,13 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
port = None
|
port = None
|
||||||
if args.device is None:
|
|
||||||
|
if not args.simulation and args.device is None:
|
||||||
|
print("You need to specify either --simulation or -d/--device "
|
||||||
|
"argument. Use --help for more information.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if args.simulation:
|
||||||
port = open(args.dump, "wb")
|
port = open(args.dump, "wb")
|
||||||
dev = Pdq2(url=args.device, dev=port)
|
dev = Pdq2(url=args.device, dev=port)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
# Yann Sionneau <ys@m-labs.hk>, 2015
|
# Yann Sionneau <ys@m-labs.hk>, 2015
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import sys
|
||||||
|
|
||||||
from artiq.protocols.pc_rpc import simple_server_loop
|
from artiq.protocols.pc_rpc import simple_server_loop
|
||||||
from artiq.devices.pxi6733.driver import DAQmx, DAQmxSim
|
from artiq.devices.pxi6733.driver import DAQmx, DAQmxSim
|
||||||
|
@ -11,13 +12,13 @@ from artiq.tools import verbosity_args, init_logger, simple_network_args
|
||||||
def get_argparser():
|
def get_argparser():
|
||||||
parser = argparse.ArgumentParser(description="NI PXI 6733 controller")
|
parser = argparse.ArgumentParser(description="NI PXI 6733 controller")
|
||||||
simple_network_args(parser, 3256)
|
simple_network_args(parser, 3256)
|
||||||
parser.add_argument("-d", "--device", default=None,
|
parser.add_argument("-C", "--channels", default=None,
|
||||||
help="Device name (e.g. Dev1)."
|
help="List of channels (e.g. Dev1/ao0, Dev1/ao1:3).")
|
||||||
" Omit for simulation mode.")
|
|
||||||
parser.add_argument("-c", "--clock", default="PFI5",
|
parser.add_argument("-c", "--clock", default="PFI5",
|
||||||
help="Input clock pin name (default: PFI5)")
|
help="Input clock pin name (default: PFI5)")
|
||||||
parser.add_argument("-a", "--analog-output", default="ao0",
|
parser.add_argument("--simulation", action='store_true',
|
||||||
help="Analog output pin name (default: ao0)")
|
help="Put the driver in simulation mode, even if "
|
||||||
|
"--channels is used.")
|
||||||
verbosity_args(parser)
|
verbosity_args(parser)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
@ -26,12 +27,16 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
|
|
||||||
if args.device is None:
|
if not args.simulation and args.channels is None:
|
||||||
|
print("You need to specify either --simulation or -C/--channels "
|
||||||
|
"argument. Use --help for more information.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if args.simulation:
|
||||||
daq = DAQmxSim()
|
daq = DAQmxSim()
|
||||||
else:
|
else:
|
||||||
daq = DAQmx(bytes(args.device, "ascii"),
|
daq = DAQmx(args.channels,
|
||||||
bytes(args.analog_output, "ascii"),
|
args.clock)
|
||||||
bytes(args.clock, "ascii"))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
simple_server_loop({"pxi6733": daq},
|
simple_server_loop({"pxi6733": daq},
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import sys
|
||||||
|
|
||||||
from artiq.devices.thorlabs_tcube.driver import Tdc, Tpz, TdcSim, TpzSim
|
from artiq.devices.thorlabs_tcube.driver import Tdc, Tpz, TdcSim, TpzSim
|
||||||
from artiq.protocols.pc_rpc import simple_server_loop
|
from artiq.protocols.pc_rpc import simple_server_loop
|
||||||
|
@ -13,7 +14,11 @@ def get_argparser():
|
||||||
help="type of the Thorlabs T-Cube device to control",
|
help="type of the Thorlabs T-Cube device to control",
|
||||||
choices=["TDC001", "TPZ001"])
|
choices=["TDC001", "TPZ001"])
|
||||||
parser.add_argument("-d", "--device", default=None,
|
parser.add_argument("-d", "--device", default=None,
|
||||||
help="serial port. Omit for simulation mode.")
|
help="serial device. See documentation for how to "
|
||||||
|
"specify a USB Serial Number.")
|
||||||
|
parser.add_argument("--simulation", action="store_true",
|
||||||
|
help="Put the driver in simulation mode, even if "
|
||||||
|
"--device is used.")
|
||||||
simple_network_args(parser, 3255)
|
simple_network_args(parser, 3255)
|
||||||
verbosity_args(parser)
|
verbosity_args(parser)
|
||||||
return parser
|
return parser
|
||||||
|
@ -23,7 +28,12 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
|
|
||||||
if args.device is None:
|
if not args.simulation and args.device is None:
|
||||||
|
print("You need to specify either --simulation or -d/--device "
|
||||||
|
"argument. Use --help for more information.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if args.simulation:
|
||||||
if args.product == "TDC001":
|
if args.product == "TDC001":
|
||||||
dev = TdcSim()
|
dev = TdcSim()
|
||||||
elif args.product == "TPZ001":
|
elif args.product == "TPZ001":
|
||||||
|
|
|
@ -6,15 +6,14 @@ from migen.bus.transactions import *
|
||||||
from migen.sim.generic import run_simulation
|
from migen.sim.generic import run_simulation
|
||||||
|
|
||||||
|
|
||||||
class AD9858(Module):
|
class AD9xxx(Module):
|
||||||
"""Wishbone interface to the AD9858 DDS chip.
|
"""Wishbone interface to the AD9858 and AD9914 DDS chips.
|
||||||
|
|
||||||
Addresses 0-63 map the AD9858 registers.
|
Addresses 0-2**flen(pads.a)-1 map the AD9xxx registers.
|
||||||
Data is zero-padded.
|
|
||||||
|
|
||||||
Write to address 64 to pulse the FUD signal.
|
Write to address 2**flen(pads.a) to pulse the FUD signal.
|
||||||
Address 65 is a GPIO register that controls the sel, p and reset signals.
|
Address 2**flen(pads.a)+1 is a GPIO register that controls the
|
||||||
sel is mapped to the lower bits, followed by p and reset.
|
sel and reset signals. rst is mapped to bit 0, followed by sel.
|
||||||
|
|
||||||
Write timing:
|
Write timing:
|
||||||
Address is set one cycle before assertion of we_n.
|
Address is set one cycle before assertion of we_n.
|
||||||
|
@ -28,6 +27,7 @@ class AD9858(Module):
|
||||||
Design:
|
Design:
|
||||||
All IO pads are registered.
|
All IO pads are registered.
|
||||||
|
|
||||||
|
With QC1 adapter:
|
||||||
LVDS driver/receiver propagation delays are 3.6+4.5 ns max
|
LVDS driver/receiver propagation delays are 3.6+4.5 ns max
|
||||||
LVDS state transition delays are 20, 15 ns max
|
LVDS state transition delays are 20, 15 ns max
|
||||||
Schmitt trigger delays are 6.4ns max
|
Schmitt trigger delays are 6.4ns max
|
||||||
|
@ -38,15 +38,15 @@ class AD9858(Module):
|
||||||
read_wait_cycles=10, hiz_wait_cycles=3,
|
read_wait_cycles=10, hiz_wait_cycles=3,
|
||||||
bus=None):
|
bus=None):
|
||||||
if bus is None:
|
if bus is None:
|
||||||
bus = wishbone.Interface()
|
bus = wishbone.Interface(data_width=flen(pads.d))
|
||||||
self.bus = bus
|
self.bus = bus
|
||||||
|
|
||||||
# # #
|
# # #
|
||||||
|
|
||||||
dts = TSTriple(8)
|
dts = TSTriple(flen(pads.d))
|
||||||
self.specials += dts.get_tristate(pads.d)
|
self.specials += dts.get_tristate(pads.d)
|
||||||
hold_address = Signal()
|
hold_address = Signal()
|
||||||
dr = Signal(8)
|
dr = Signal(flen(pads.d))
|
||||||
rx = Signal()
|
rx = Signal()
|
||||||
self.sync += [
|
self.sync += [
|
||||||
If(~hold_address, pads.a.eq(bus.adr)),
|
If(~hold_address, pads.a.eq(bus.adr)),
|
||||||
|
@ -55,13 +55,14 @@ class AD9858(Module):
|
||||||
dts.oe.eq(~rx)
|
dts.oe.eq(~rx)
|
||||||
]
|
]
|
||||||
|
|
||||||
gpio = Signal(flen(pads.sel) + flen(pads.p) + 1)
|
gpio = Signal(flen(pads.sel) + 1)
|
||||||
gpio_load = Signal()
|
gpio_load = Signal()
|
||||||
self.sync += If(gpio_load, gpio.eq(bus.dat_w))
|
self.sync += If(gpio_load, gpio.eq(bus.dat_w))
|
||||||
self.comb += [
|
if hasattr(pads, "rst"):
|
||||||
Cat(pads.sel, pads.p).eq(gpio),
|
self.comb += pads.rst.eq(gpio[0])
|
||||||
pads.rst_n.eq(~gpio[-1]),
|
else:
|
||||||
]
|
self.comb += pads.rst_n.eq(~gpio[0])
|
||||||
|
self.comb += pads.sel.eq(gpio[1:])
|
||||||
|
|
||||||
bus_r_gpio = Signal()
|
bus_r_gpio = Signal()
|
||||||
self.comb += If(bus_r_gpio,
|
self.comb += If(bus_r_gpio,
|
||||||
|
@ -71,7 +72,10 @@ class AD9858(Module):
|
||||||
)
|
)
|
||||||
|
|
||||||
fud = Signal()
|
fud = Signal()
|
||||||
self.sync += pads.fud_n.eq(~fud)
|
if hasattr(pads, "fud"):
|
||||||
|
self.sync += pads.fud.eq(fud)
|
||||||
|
else:
|
||||||
|
self.sync += pads.fud_n.eq(~fud)
|
||||||
|
|
||||||
pads.wr_n.reset = 1
|
pads.wr_n.reset = 1
|
||||||
pads.rd_n.reset = 1
|
pads.rd_n.reset = 1
|
||||||
|
@ -87,7 +91,7 @@ class AD9858(Module):
|
||||||
|
|
||||||
fsm.act("IDLE",
|
fsm.act("IDLE",
|
||||||
If(bus.cyc & bus.stb,
|
If(bus.cyc & bus.stb,
|
||||||
If(bus.adr[6],
|
If(bus.adr[flen(pads.a)],
|
||||||
If(bus.adr[0],
|
If(bus.adr[0],
|
||||||
NextState("GPIO")
|
NextState("GPIO")
|
||||||
).Else(
|
).Else(
|
||||||
|
@ -168,7 +172,6 @@ class _TestPads:
|
||||||
self.a = Signal(6)
|
self.a = Signal(6)
|
||||||
self.d = Signal(8)
|
self.d = Signal(8)
|
||||||
self.sel = Signal(5)
|
self.sel = Signal(5)
|
||||||
self.p = Signal(2)
|
|
||||||
self.fud_n = Signal()
|
self.fud_n = Signal()
|
||||||
self.wr_n = Signal()
|
self.wr_n = Signal()
|
||||||
self.rd_n = Signal()
|
self.rd_n = Signal()
|
||||||
|
@ -178,11 +181,11 @@ class _TestPads:
|
||||||
class _TB(Module):
|
class _TB(Module):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
pads = _TestPads()
|
pads = _TestPads()
|
||||||
self.submodules.dut = AD9858(pads, drive_fud=True)
|
self.submodules.dut = AD9xxx(pads, drive_fud=True)
|
||||||
self.submodules.initiator = wishbone.Initiator(_test_gen())
|
self.submodules.initiator = wishbone.Initiator(_test_gen())
|
||||||
self.submodules.interconnect = wishbone.InterconnectPointToPoint(
|
self.submodules.interconnect = wishbone.InterconnectPointToPoint(
|
||||||
self.initiator.bus, self.dut.bus)
|
self.initiator.bus, self.dut.bus)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
run_simulation(_TB(), vcd_name="ad9858.vcd")
|
run_simulation(_TB(), vcd_name="ad9xxx.vcd")
|
|
@ -3,12 +3,11 @@ from migen.bank.description import *
|
||||||
from migen.bus import wishbone
|
from migen.bus import wishbone
|
||||||
|
|
||||||
from misoclib.cpu import mor1kx
|
from misoclib.cpu import mor1kx
|
||||||
from misoclib.mem.sdram.frontend.wishbone2lasmi import WB2LASMI
|
|
||||||
from misoclib.soc import mem_decoder
|
from misoclib.soc import mem_decoder
|
||||||
|
|
||||||
|
|
||||||
class KernelCPU(Module):
|
class KernelCPU(Module):
|
||||||
def __init__(self, platform, lasmim,
|
def __init__(self, platform,
|
||||||
exec_address=0x40400000,
|
exec_address=0x40400000,
|
||||||
main_mem_origin=0x40000000,
|
main_mem_origin=0x40000000,
|
||||||
l2_size=8192):
|
l2_size=8192):
|
||||||
|
@ -29,16 +28,8 @@ class KernelCPU(Module):
|
||||||
"sys_kernel")
|
"sys_kernel")
|
||||||
|
|
||||||
# DRAM access
|
# DRAM access
|
||||||
# XXX Vivado 2014.X workaround
|
self.wb_sdram = wishbone.Interface()
|
||||||
from mibuild.xilinx.vivado import XilinxVivadoToolchain
|
self.add_wb_slave(mem_decoder(main_mem_origin), self.wb_sdram)
|
||||||
if isinstance(platform.toolchain, XilinxVivadoToolchain):
|
|
||||||
from migen.fhdl.simplify import FullMemoryWE
|
|
||||||
self.submodules.wishbone2lasmi = FullMemoryWE()(
|
|
||||||
WB2LASMI(l2_size//4, lasmim))
|
|
||||||
else:
|
|
||||||
self.submodules.wishbone2lasmi = WB2LASMI(l2_size//4, lasmim)
|
|
||||||
self.add_wb_slave(mem_decoder(main_mem_origin),
|
|
||||||
self.wishbone2lasmi.wishbone)
|
|
||||||
|
|
||||||
def get_csrs(self):
|
def get_csrs(self):
|
||||||
return [self._reset]
|
return [self._reset]
|
||||||
|
|
|
@ -4,10 +4,22 @@ from mibuild.generic_platform import *
|
||||||
papilio_adapter_io = [
|
papilio_adapter_io = [
|
||||||
("ext_led", 0, Pins("B:7"), IOStandard("LVTTL")),
|
("ext_led", 0, Pins("B:7"), IOStandard("LVTTL")),
|
||||||
|
|
||||||
|
# to feed the 125 MHz clock (preferrably from DDS SYNC_CLK)
|
||||||
|
# to the FPGA, use the xtrig pair.
|
||||||
|
#
|
||||||
|
# on papiliopro-adapter, xtrig (C:12) is connected to a GCLK
|
||||||
|
#
|
||||||
|
# on pipistrello, C:15 is the only GCLK in proximity, used as a button
|
||||||
|
# input, BTN2/PMT2 in papiliopro-adapter
|
||||||
|
# either improve the DDS box to feed 125MHz into the PMT2 pair, or:
|
||||||
|
#
|
||||||
|
# * disconnect C:15 from its periphery on the adapter board
|
||||||
|
# * bridge C:15 to the xtrig output of the transciever
|
||||||
|
# * optionally, disconnect C:12 from its periphery
|
||||||
|
("xtrig", 0, Pins("C:12"), IOStandard("LVTTL")),
|
||||||
("pmt", 0, Pins("C:13"), IOStandard("LVTTL")),
|
("pmt", 0, Pins("C:13"), IOStandard("LVTTL")),
|
||||||
("pmt", 1, Pins("C:14"), IOStandard("LVTTL")),
|
("pmt", 1, Pins("C:14"), IOStandard("LVTTL")),
|
||||||
("xtrig", 0, Pins("C:12"), IOStandard("LVTTL")),
|
("pmt", 2, Pins("C:15"), IOStandard("LVTTL")), # rarely equipped
|
||||||
("dds_clock", 0, Pins("C:15"), IOStandard("LVTTL")), # PMT2
|
|
||||||
|
|
||||||
("ttl", 0, Pins("C:11"), IOStandard("LVTTL")),
|
("ttl", 0, Pins("C:11"), IOStandard("LVTTL")),
|
||||||
("ttl", 1, Pins("C:10"), IOStandard("LVTTL")),
|
("ttl", 1, Pins("C:10"), IOStandard("LVTTL")),
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
from mibuild.generic_platform import *
|
||||||
|
|
||||||
|
|
||||||
|
fmc_adapter_io = [
|
||||||
|
("ttl", 0, Pins("LPC:LA00_CC_P"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 1, Pins("LPC:LA02_P"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 2, Pins("LPC:LA00_CC_N"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 3, Pins("LPC:LA02_N"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 4, Pins("LPC:LA01_CC_P"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 5, Pins("LPC:LA01_CC_N"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 6, Pins("LPC:LA06_P"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 7, Pins("LPC:LA06_N"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 8, Pins("LPC:LA05_P"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 9, Pins("LPC:LA05_N"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 10, Pins("LPC:LA10_P"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 11, Pins("LPC:LA09_P"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 12, Pins("LPC:LA10_N"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 13, Pins("LPC:LA09_N"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 14, Pins("LPC:LA13_P"), IOStandard("LVTTL")),
|
||||||
|
("ttl", 15, Pins("LPC:LA14_P"), IOStandard("LVTTL")),
|
||||||
|
|
||||||
|
("dds", 0,
|
||||||
|
Subsignal("a", Pins("LPC:LA22_N LPC:LA21_P LPC:LA22_P LPC:LA19_N "
|
||||||
|
"LPC:LA20_N LPC:LA19_P LPC:LA20_P")),
|
||||||
|
Subsignal("d", Pins("LPC:LA15_N LPC:LA16_N LPC:LA15_P LPC:LA16_P "
|
||||||
|
"LPC:LA11_N LPC:LA12_N LPC:LA11_P LPC:LA12_P "
|
||||||
|
"LPC:LA07_N LPC:LA08_N LPC:LA07_P LPC:LA08_P "
|
||||||
|
"LPC:LA04_N LPC:LA03_N LPC:LA04_P LPC:LA03_P")),
|
||||||
|
Subsignal("sel", Pins("LPC:LA24_N LPC:LA29_P LPC:LA28_P LPC:LA29_N "
|
||||||
|
"LPC:LA28_N LPC:LA31_P LPC:LA30_P LPC:LA31_N "
|
||||||
|
"LPC:LA30_N LPC:LA33_P LPC:LA33_N")),
|
||||||
|
Subsignal("fud", Pins("LPC:LA21_N")),
|
||||||
|
Subsignal("wr_n", Pins("LPC:LA24_P")),
|
||||||
|
Subsignal("rd_n", Pins("LPC:LA25_N")),
|
||||||
|
Subsignal("rst", Pins("LPC:LA25_P")),
|
||||||
|
IOStandard("LVTTL")),
|
||||||
|
|
||||||
|
("i2c", 0,
|
||||||
|
Subsignal("scl", Pins("LPC:IIC_SLC")),
|
||||||
|
Subsignal("sda", Pins("LPC:IIC_SDA")),
|
||||||
|
IOStandard("LVCMOS25")),
|
||||||
|
|
||||||
|
("clk_m2c", 0,
|
||||||
|
Subsignal("p", Pins("LPC:CLK0_M2C_P")),
|
||||||
|
Subsignal("n", Pins("LPC:CLK0_M2C_N")),
|
||||||
|
IOStandard("LVDS")),
|
||||||
|
|
||||||
|
("clk_m2c", 1,
|
||||||
|
Subsignal("p", Pins("LPC:CLK1_M2C_P")),
|
||||||
|
Subsignal("n", Pins("LPC:CLK1_M2C_N")),
|
||||||
|
IOStandard("LVDS")),
|
||||||
|
|
||||||
|
("la32", 0,
|
||||||
|
Subsignal("p", Pins("LPC:LA32_P")),
|
||||||
|
Subsignal("n", Pins("LPC:LA32_N")),
|
||||||
|
IOStandard("LVDS")),
|
||||||
|
|
||||||
|
("spi", 0,
|
||||||
|
Subsignal("clk", Pins("LPC:LA13_N")),
|
||||||
|
Subsignal("ce", Pins("LPC:LA14_N")),
|
||||||
|
Subsignal("mosi", Pins("LPC:LA17_CC_P")),
|
||||||
|
Subsignal("miso", Pins("LPC:LA17_CC_N")),
|
||||||
|
IOStandard("LVTTL")),
|
||||||
|
|
||||||
|
("spi", 1,
|
||||||
|
Subsignal("clk", Pins("LPC:LA18_CC_P")),
|
||||||
|
Subsignal("ce", Pins("LPC:LA18_CC_N")),
|
||||||
|
Subsignal("mosi", Pins("LPC:LA23_P")),
|
||||||
|
Subsignal("miso", Pins("LPC:LA23_N")),
|
||||||
|
IOStandard("LVTTL")),
|
||||||
|
|
||||||
|
("spi", 2,
|
||||||
|
Subsignal("clk", Pins("LPC:LA27_P")),
|
||||||
|
Subsignal("ce", Pins("LPC:LA26_P")),
|
||||||
|
Subsignal("mosi", Pins("LPC:LA27_N")),
|
||||||
|
Subsignal("miso", Pins("LPC:LA26_N")),
|
||||||
|
IOStandard("LVTTL")),
|
||||||
|
]
|
|
@ -1 +1,2 @@
|
||||||
from artiq.gateware.rtio.core import Channel, RTIO
|
from artiq.gateware.rtio.core import Channel, RTIO
|
||||||
|
from artiq.gateware.rtio.moninj import MonInj
|
||||||
|
|
|
@ -44,7 +44,8 @@ class _RTIOCounter(Module):
|
||||||
|
|
||||||
# # #
|
# # #
|
||||||
|
|
||||||
self.sync.rio += self.value_rio.eq(self.value_rio + 1),
|
# note: counter is in rtio domain and never affected by the reset CSRs
|
||||||
|
self.sync.rtio += self.value_rio.eq(self.value_rio + 1)
|
||||||
gt = _GrayCodeTransfer(width)
|
gt = _GrayCodeTransfer(width)
|
||||||
self.submodules += gt
|
self.submodules += gt
|
||||||
self.comb += gt.i.eq(self.value_rio), self.value_sys.eq(gt.o)
|
self.comb += gt.i.eq(self.value_rio), self.value_sys.eq(gt.o)
|
||||||
|
@ -121,12 +122,20 @@ class _OutputManager(Module):
|
||||||
sequence_error.eq(self.ev.timestamp < buf.timestamp[fine_ts_width:])
|
sequence_error.eq(self.ev.timestamp < buf.timestamp[fine_ts_width:])
|
||||||
]
|
]
|
||||||
if interface.suppress_nop:
|
if interface.suppress_nop:
|
||||||
self.sync.rsys += nop.eq(
|
# disable NOP at reset: do not suppress a first write with all 0s
|
||||||
optree("&",
|
nop_en = Signal(reset=0)
|
||||||
[getattr(self.ev, a) == getattr(buf, a)
|
self.sync.rsys += [
|
||||||
for a in ("data", "address")
|
nop.eq(nop_en &
|
||||||
if hasattr(self.ev, a)],
|
optree("&",
|
||||||
default=0))
|
[getattr(self.ev, a) == getattr(buf, a)
|
||||||
|
for a in ("data", "address")
|
||||||
|
if hasattr(self.ev, a)],
|
||||||
|
default=0)),
|
||||||
|
# buf now contains valid data. enable NOP.
|
||||||
|
If(self.we & ~sequence_error, nop_en.eq(1)),
|
||||||
|
# underflows cancel the write. allow it to be retried.
|
||||||
|
If(self.underflow, nop_en.eq(0))
|
||||||
|
]
|
||||||
self.comb += self.sequence_error.eq(self.we & sequence_error)
|
self.comb += self.sequence_error.eq(self.we & sequence_error)
|
||||||
|
|
||||||
# Buffer read and FIFO write
|
# Buffer read and FIFO write
|
||||||
|
@ -247,11 +256,20 @@ class _InputManager(Module):
|
||||||
|
|
||||||
|
|
||||||
class Channel:
|
class Channel:
|
||||||
def __init__(self, interface, ofifo_depth=64, ififo_depth=64):
|
def __init__(self, interface, probes=[], overrides=[],
|
||||||
|
ofifo_depth=64, ififo_depth=64):
|
||||||
self.interface = interface
|
self.interface = interface
|
||||||
|
self.probes = probes
|
||||||
|
self.overrides = overrides
|
||||||
self.ofifo_depth = ofifo_depth
|
self.ofifo_depth = ofifo_depth
|
||||||
self.ififo_depth = ififo_depth
|
self.ififo_depth = ififo_depth
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_phy(cls, phy, **kwargs):
|
||||||
|
probes = getattr(phy, "probes", [])
|
||||||
|
overrides = getattr(phy, "overrides", [])
|
||||||
|
return cls(phy.rtlink, probes, overrides, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class _KernelCSRs(AutoCSR):
|
class _KernelCSRs(AutoCSR):
|
||||||
def __init__(self, chan_sel_width,
|
def __init__(self, chan_sel_width,
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
from migen.fhdl.std import *
|
||||||
|
from migen.bank.description import *
|
||||||
|
from migen.genlib.cdc import BusSynchronizer, MultiReg
|
||||||
|
|
||||||
|
|
||||||
|
class Monitor(Module, AutoCSR):
|
||||||
|
def __init__(self, channels):
|
||||||
|
chan_probes = [c.probes for c in channels]
|
||||||
|
|
||||||
|
max_chan_probes = max(len(cp) for cp in chan_probes)
|
||||||
|
max_probe_len = max(flen(p) for cp in chan_probes for p in cp)
|
||||||
|
self.chan_sel = CSRStorage(bits_for(len(chan_probes)-1))
|
||||||
|
self.probe_sel = CSRStorage(bits_for(max_chan_probes-1))
|
||||||
|
self.value_update = CSR()
|
||||||
|
self.value = CSRStatus(max_probe_len)
|
||||||
|
|
||||||
|
# # #
|
||||||
|
|
||||||
|
chan_probes_sys = []
|
||||||
|
for cp in chan_probes:
|
||||||
|
cp_sys = []
|
||||||
|
for p in cp:
|
||||||
|
vs = BusSynchronizer(flen(p), "rio", "rsys")
|
||||||
|
self.submodules += vs
|
||||||
|
self.comb += vs.i.eq(p)
|
||||||
|
cp_sys.append(vs.o)
|
||||||
|
cp_sys += [0]*(max_chan_probes-len(cp))
|
||||||
|
chan_probes_sys.append(Array(cp_sys)[self.probe_sel.storage])
|
||||||
|
self.sync += If(self.value_update.re,
|
||||||
|
self.value.status.eq(
|
||||||
|
Array(chan_probes_sys)[self.chan_sel.storage]))
|
||||||
|
|
||||||
|
|
||||||
|
class Injector(Module, AutoCSR):
|
||||||
|
def __init__(self, channels):
|
||||||
|
chan_overrides = [c.overrides for c in channels]
|
||||||
|
|
||||||
|
max_chan_overrides = max(len(co) for co in chan_overrides)
|
||||||
|
max_override_len = max(flen(o) for co in chan_overrides for o in co)
|
||||||
|
self.chan_sel = CSRStorage(bits_for(len(chan_overrides)-1))
|
||||||
|
self.override_sel = CSRStorage(bits_for(max_chan_overrides-1))
|
||||||
|
self.value = CSR(max_override_len)
|
||||||
|
|
||||||
|
# # #
|
||||||
|
|
||||||
|
chan_overrides_sys = []
|
||||||
|
for n_channel, co in enumerate(chan_overrides):
|
||||||
|
co_sys = []
|
||||||
|
for n_override, o in enumerate(co):
|
||||||
|
# We do the clock domain transfer with a simple double-latch.
|
||||||
|
# Software has to ensure proper timing of any strobe signal etc.
|
||||||
|
# to avoid problematic glitches.
|
||||||
|
o_sys = Signal.like(o)
|
||||||
|
self.specials += MultiReg(o_sys, o, "rio")
|
||||||
|
self.sync += If(self.value.re & (self.chan_sel.storage == n_channel)
|
||||||
|
& (self.override_sel.storage == n_override),
|
||||||
|
o_sys.eq(self.value.r))
|
||||||
|
co_sys.append(o_sys)
|
||||||
|
co_sys += [0]*(max_chan_overrides-len(co))
|
||||||
|
chan_overrides_sys.append(Array(co_sys)[self.override_sel.storage])
|
||||||
|
self.comb += self.value.w.eq(
|
||||||
|
Array(chan_overrides_sys)[self.chan_sel.storage])
|
||||||
|
|
||||||
|
|
||||||
|
class MonInj(Module, AutoCSR):
|
||||||
|
def __init__(self, channels):
|
||||||
|
self.submodules.mon = Monitor(channels)
|
||||||
|
self.submodules.inj = Injector(channels)
|
|
@ -0,0 +1,60 @@
|
||||||
|
from migen.fhdl.std import *
|
||||||
|
|
||||||
|
from artiq.gateware import ad9xxx
|
||||||
|
from artiq.gateware.rtio.phy.wishbone import RT2WB
|
||||||
|
|
||||||
|
|
||||||
|
class _AD9xxx(Module):
|
||||||
|
def __init__(self, ftw_base, pads, nchannels, **kwargs):
|
||||||
|
self.submodules._ll = RenameClockDomains(
|
||||||
|
ad9xxx.AD9xxx(pads, **kwargs), "rio")
|
||||||
|
self.submodules._rt2wb = RT2WB(flen(pads.a)+1, self._ll.bus)
|
||||||
|
self.rtlink = self._rt2wb.rtlink
|
||||||
|
self.probes = [Signal(32) for i in range(nchannels)]
|
||||||
|
|
||||||
|
# # #
|
||||||
|
|
||||||
|
# buffer the current address/data on the rtlink output
|
||||||
|
current_address = Signal.like(self.rtlink.o.address)
|
||||||
|
current_data = Signal.like(self.rtlink.o.data)
|
||||||
|
self.sync.rio += If(self.rtlink.o.stb,
|
||||||
|
current_address.eq(self.rtlink.o.address),
|
||||||
|
current_data.eq(self.rtlink.o.data))
|
||||||
|
|
||||||
|
# keep track of the currently selected channel
|
||||||
|
current_channel = Signal(max=nchannels)
|
||||||
|
self.sync.rio += If(current_address == 2**flen(pads.a) + 1,
|
||||||
|
current_channel.eq(current_data))
|
||||||
|
|
||||||
|
# keep track of frequency tuning words, before they are FUDed
|
||||||
|
ftws = [Signal(32) for i in range(nchannels)]
|
||||||
|
for c, ftw in enumerate(ftws):
|
||||||
|
if flen(pads.d) == 8:
|
||||||
|
self.sync.rio += \
|
||||||
|
If(current_channel == c, [
|
||||||
|
If(current_address == ftw_base+i,
|
||||||
|
ftw[i*8:(i+1)*8].eq(current_data))
|
||||||
|
for i in range(4)])
|
||||||
|
elif flen(pads.d) == 16:
|
||||||
|
self.sync.rio += \
|
||||||
|
If(current_channel == c, [
|
||||||
|
If(current_address == ftw_base+2*i,
|
||||||
|
ftw[i*16:(i+1)*16].eq(current_data))
|
||||||
|
for i in range(2)])
|
||||||
|
else:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
# FTW to probe on FUD
|
||||||
|
self.sync.rio += If(current_address == 2**flen(pads.a), [
|
||||||
|
If(current_channel == c, probe.eq(ftw))
|
||||||
|
for c, (probe, ftw) in enumerate(zip(self.probes, ftws))])
|
||||||
|
|
||||||
|
|
||||||
|
class AD9858(_AD9xxx):
|
||||||
|
def __init__(self, pads, nchannels, **kwargs):
|
||||||
|
_AD9xxx.__init__(self, 0x0a, pads, nchannels, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class AD9914(_AD9xxx):
|
||||||
|
def __init__(self, pads, nchannels, **kwargs):
|
||||||
|
_AD9xxx.__init__(self, 0x2d, pads, nchannels, **kwargs)
|
|
@ -7,10 +7,24 @@ from artiq.gateware.rtio import rtlink
|
||||||
class Output(Module):
|
class Output(Module):
|
||||||
def __init__(self, pad):
|
def __init__(self, pad):
|
||||||
self.rtlink = rtlink.Interface(rtlink.OInterface(1))
|
self.rtlink = rtlink.Interface(rtlink.OInterface(1))
|
||||||
|
self.probes = [pad]
|
||||||
|
override_en = Signal()
|
||||||
|
override_o = Signal()
|
||||||
|
self.overrides = [override_en, override_o]
|
||||||
|
|
||||||
# # #
|
# # #
|
||||||
|
|
||||||
self.sync.rio_phy += If(self.rtlink.o.stb, pad.eq(self.rtlink.o.data))
|
pad_k = Signal()
|
||||||
|
self.sync.rio_phy += [
|
||||||
|
If(self.rtlink.o.stb,
|
||||||
|
pad_k.eq(self.rtlink.o.data)
|
||||||
|
),
|
||||||
|
If(override_en,
|
||||||
|
pad.eq(override_o)
|
||||||
|
).Else(
|
||||||
|
pad.eq(pad_k)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class Inout(Module):
|
class Inout(Module):
|
||||||
|
@ -18,6 +32,11 @@ class Inout(Module):
|
||||||
self.rtlink = rtlink.Interface(
|
self.rtlink = rtlink.Interface(
|
||||||
rtlink.OInterface(2, 2),
|
rtlink.OInterface(2, 2),
|
||||||
rtlink.IInterface(1))
|
rtlink.IInterface(1))
|
||||||
|
override_en = Signal()
|
||||||
|
override_o = Signal()
|
||||||
|
override_oe = Signal()
|
||||||
|
self.overrides = [override_en, override_o, override_oe]
|
||||||
|
self.probes = []
|
||||||
|
|
||||||
# # #
|
# # #
|
||||||
|
|
||||||
|
@ -25,10 +44,21 @@ class Inout(Module):
|
||||||
self.specials += ts.get_tristate(pad)
|
self.specials += ts.get_tristate(pad)
|
||||||
sensitivity = Signal(2)
|
sensitivity = Signal(2)
|
||||||
|
|
||||||
self.sync.rio_phy += If(self.rtlink.o.stb,
|
o_k = Signal()
|
||||||
If(self.rtlink.o.address == 0, ts.o.eq(self.rtlink.o.data[0])),
|
oe_k = Signal()
|
||||||
If(self.rtlink.o.address == 1, ts.oe.eq(self.rtlink.o.data[0])),
|
self.sync.rio_phy += [
|
||||||
|
If(self.rtlink.o.stb,
|
||||||
|
If(self.rtlink.o.address == 0, o_k.eq(self.rtlink.o.data[0])),
|
||||||
|
If(self.rtlink.o.address == 1, oe_k.eq(self.rtlink.o.data[0])),
|
||||||
|
),
|
||||||
|
If(override_en,
|
||||||
|
ts.o.eq(override_o),
|
||||||
|
ts.oe.eq(override_oe)
|
||||||
|
).Else(
|
||||||
|
ts.o.eq(o_k),
|
||||||
|
ts.oe.eq(oe_k)
|
||||||
)
|
)
|
||||||
|
]
|
||||||
self.sync.rio += If(self.rtlink.o.stb & (self.rtlink.o.address == 2),
|
self.sync.rio += If(self.rtlink.o.stb & (self.rtlink.o.address == 2),
|
||||||
sensitivity.eq(self.rtlink.o.data))
|
sensitivity.eq(self.rtlink.o.data))
|
||||||
|
|
||||||
|
@ -43,3 +73,31 @@ class Inout(Module):
|
||||||
),
|
),
|
||||||
self.rtlink.i.data.eq(i)
|
self.rtlink.i.data.eq(i)
|
||||||
]
|
]
|
||||||
|
|
||||||
|
self.probes += [i, ts.oe]
|
||||||
|
|
||||||
|
|
||||||
|
class ClockGen(Module):
|
||||||
|
def __init__(self, pad, ftw_width=24):
|
||||||
|
self.rtlink = rtlink.Interface(
|
||||||
|
rtlink.OInterface(ftw_width, suppress_nop=False))
|
||||||
|
|
||||||
|
# # #
|
||||||
|
|
||||||
|
ftw = Signal(ftw_width)
|
||||||
|
acc = Signal(ftw_width)
|
||||||
|
self.sync.rio += If(self.rtlink.o.stb, ftw.eq(self.rtlink.o.data))
|
||||||
|
self.sync.rio_phy += [
|
||||||
|
acc.eq(acc + ftw),
|
||||||
|
# rtlink takes precedence over regular acc increments
|
||||||
|
If(self.rtlink.o.stb,
|
||||||
|
If(self.rtlink.o.data != 0,
|
||||||
|
# known phase on frequency write: at rising edge
|
||||||
|
acc.eq(2**(ftw_width - 1))
|
||||||
|
).Else(
|
||||||
|
# set output to 0 on stop
|
||||||
|
acc.eq(0)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
pad.eq(acc[-1])
|
||||||
|
]
|
||||||
|
|
|
@ -19,8 +19,9 @@ class AMPSoC:
|
||||||
|
|
||||||
self.submodules.timer0 = timer.Timer(width=64)
|
self.submodules.timer0 = timer.Timer(width=64)
|
||||||
|
|
||||||
self.submodules.kernel_cpu = amp.KernelCPU(
|
self.submodules.kernel_cpu = amp.KernelCPU(self.platform)
|
||||||
self.platform, self.sdram.crossbar.get_master())
|
self.add_wb_sdram_if(self.kernel_cpu.wb_sdram)
|
||||||
|
|
||||||
self.submodules.mailbox = amp.Mailbox()
|
self.submodules.mailbox = amp.Mailbox()
|
||||||
self.add_wb_slave(mem_decoder(self.mem_map["mailbox"]),
|
self.add_wb_slave(mem_decoder(self.mem_map["mailbox"]),
|
||||||
self.mailbox.i1)
|
self.mailbox.i1)
|
||||||
|
|
|
@ -0,0 +1,130 @@
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from quamash import QtGui
|
||||||
|
import pyqtgraph as pg
|
||||||
|
from pyqtgraph import dockarea
|
||||||
|
|
||||||
|
|
||||||
|
class _SimpleSettings(QtGui.QDialog):
|
||||||
|
def __init__(self, parent, prev_name, prev_settings,
|
||||||
|
result_list, create_cb):
|
||||||
|
QtGui.QDialog.__init__(self, parent=parent)
|
||||||
|
self.setWindowTitle(self._window_title)
|
||||||
|
|
||||||
|
grid = QtGui.QGridLayout()
|
||||||
|
self.setLayout(grid)
|
||||||
|
|
||||||
|
grid.addWidget(QtGui.QLabel("Name:"), 0, 0)
|
||||||
|
self.name = name = QtGui.QLineEdit()
|
||||||
|
grid.addWidget(name, 0, 1)
|
||||||
|
if prev_name is not None:
|
||||||
|
name.insert(prev_name)
|
||||||
|
|
||||||
|
grid.addWidget(QtGui.QLabel("Result:"))
|
||||||
|
self.result = result = QtGui.QComboBox()
|
||||||
|
grid.addWidget(result, 1, 1)
|
||||||
|
result.addItems(result_list)
|
||||||
|
result.setEditable(True)
|
||||||
|
if "result" in prev_settings:
|
||||||
|
result.setEditText(prev_settings["result"])
|
||||||
|
|
||||||
|
buttons = QtGui.QDialogButtonBox(
|
||||||
|
QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel)
|
||||||
|
grid.addWidget(buttons, 2, 0, 1, 2)
|
||||||
|
buttons.accepted.connect(self.accept)
|
||||||
|
buttons.rejected.connect(self.reject)
|
||||||
|
|
||||||
|
def on_accept():
|
||||||
|
create_cb(name.text(), {"result": result.currentText()})
|
||||||
|
self.accepted.connect(on_accept)
|
||||||
|
|
||||||
|
def accept(self):
|
||||||
|
if self.name.text() and self.result.currentText():
|
||||||
|
QtGui.QDialog.accept(self)
|
||||||
|
|
||||||
|
|
||||||
|
class NumberDisplaySettings(_SimpleSettings):
|
||||||
|
_window_title = "Number display"
|
||||||
|
|
||||||
|
|
||||||
|
class NumberDisplay(dockarea.Dock):
|
||||||
|
def __init__(self, name, settings):
|
||||||
|
dockarea.Dock.__init__(self, "Display: " + name, size=(250, 250),
|
||||||
|
closable=True)
|
||||||
|
self.settings = settings
|
||||||
|
self.number = QtGui.QLCDNumber()
|
||||||
|
self.number.setDigitCount(10)
|
||||||
|
self.addWidget(self.number)
|
||||||
|
|
||||||
|
def data_sources(self):
|
||||||
|
return {self.settings["result"]}
|
||||||
|
|
||||||
|
def update_data(self, data):
|
||||||
|
result = self.settings["result"]
|
||||||
|
try:
|
||||||
|
n = float(data[result])
|
||||||
|
except:
|
||||||
|
n = "---"
|
||||||
|
self.number.display(n)
|
||||||
|
|
||||||
|
|
||||||
|
class XYDisplaySettings(_SimpleSettings):
|
||||||
|
_window_title = "XY plot"
|
||||||
|
|
||||||
|
|
||||||
|
class XYDisplay(dockarea.Dock):
|
||||||
|
def __init__(self, name, settings):
|
||||||
|
dockarea.Dock.__init__(self, "XY: " + name, size=(640, 480),
|
||||||
|
closable=True)
|
||||||
|
self.settings = settings
|
||||||
|
self.plot = pg.PlotWidget()
|
||||||
|
self.addWidget(self.plot)
|
||||||
|
|
||||||
|
def data_sources(self):
|
||||||
|
return {self.settings["result"]}
|
||||||
|
|
||||||
|
def update_data(self, data):
|
||||||
|
result = self.settings["result"]
|
||||||
|
try:
|
||||||
|
y = data[result]
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
self.plot.clear()
|
||||||
|
if not y:
|
||||||
|
return
|
||||||
|
self.plot.plot(y)
|
||||||
|
|
||||||
|
|
||||||
|
class HistogramDisplaySettings(_SimpleSettings):
|
||||||
|
_window_title = "Histogram"
|
||||||
|
|
||||||
|
|
||||||
|
class HistogramDisplay(dockarea.Dock):
|
||||||
|
def __init__(self, name, settings):
|
||||||
|
dockarea.Dock.__init__(self, "Histogram: " + name, size=(640, 480),
|
||||||
|
closable=True)
|
||||||
|
self.settings = settings
|
||||||
|
self.plot = pg.PlotWidget()
|
||||||
|
self.addWidget(self.plot)
|
||||||
|
|
||||||
|
def data_sources(self):
|
||||||
|
return {self.settings["result"]}
|
||||||
|
|
||||||
|
def update_data(self, data):
|
||||||
|
result = self.settings["result"]
|
||||||
|
try:
|
||||||
|
y = data[result]
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
x = list(range(len(y)+1))
|
||||||
|
self.plot.clear()
|
||||||
|
if not y:
|
||||||
|
return
|
||||||
|
self.plot.plot(x, y, stepMode=True, fillLevel=0, brush=(0, 0, 255, 150))
|
||||||
|
|
||||||
|
|
||||||
|
display_types = OrderedDict([
|
||||||
|
("Number", (NumberDisplaySettings, NumberDisplay)),
|
||||||
|
("XY", (XYDisplaySettings, XYDisplay)),
|
||||||
|
("Histogram", (HistogramDisplaySettings, HistogramDisplay))
|
||||||
|
])
|
|
@ -1,11 +1,14 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import traceback
|
||||||
|
|
||||||
from quamash import QtGui, QtCore
|
from quamash import QtGui, QtCore
|
||||||
from pyqtgraph import dockarea
|
from pyqtgraph import dockarea
|
||||||
from pyqtgraph import LayoutWidget
|
from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
|
from artiq.protocols import pyon
|
||||||
from artiq.gui.tools import DictSyncModel
|
from artiq.gui.tools import DictSyncModel
|
||||||
|
from artiq.gui.scan import ScanController
|
||||||
|
|
||||||
|
|
||||||
class _ExplistModel(DictSyncModel):
|
class _ExplistModel(DictSyncModel):
|
||||||
|
@ -21,46 +24,175 @@ class _ExplistModel(DictSyncModel):
|
||||||
return k
|
return k
|
||||||
|
|
||||||
|
|
||||||
class ExplorerDock(dockarea.Dock):
|
class _FreeValueEntry(QtGui.QLineEdit):
|
||||||
def __init__(self, status_bar, schedule_ctl):
|
def __init__(self, procdesc):
|
||||||
dockarea.Dock.__init__(self, "Explorer", size=(1100, 400))
|
QtGui.QLineEdit.__init__(self)
|
||||||
|
if "default" in procdesc:
|
||||||
|
self.insert(pyon.encode(procdesc["default"]))
|
||||||
|
|
||||||
|
def get_argument_value(self):
|
||||||
|
return pyon.decode(self.text())
|
||||||
|
|
||||||
|
|
||||||
|
class _BooleanEntry(QtGui.QCheckBox):
|
||||||
|
def __init__(self, procdesc):
|
||||||
|
QtGui.QCheckBox.__init__(self)
|
||||||
|
if "default" in procdesc:
|
||||||
|
self.setChecked(procdesc["default"])
|
||||||
|
|
||||||
|
def get_argument_value(self):
|
||||||
|
return self.isChecked()
|
||||||
|
|
||||||
|
|
||||||
|
class _EnumerationEntry(QtGui.QComboBox):
|
||||||
|
def __init__(self, procdesc):
|
||||||
|
QtGui.QComboBox.__init__(self)
|
||||||
|
self.choices = procdesc["choices"]
|
||||||
|
self.addItems(self.choices)
|
||||||
|
if "default" in procdesc:
|
||||||
|
try:
|
||||||
|
idx = self.choices.index(procdesc["default"])
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self.setCurrentIndex(idx)
|
||||||
|
|
||||||
|
def get_argument_value(self):
|
||||||
|
return self.choices[self.currentIndex()]
|
||||||
|
|
||||||
|
|
||||||
|
class _NumberEntry(QtGui.QDoubleSpinBox):
|
||||||
|
def __init__(self, procdesc):
|
||||||
|
QtGui.QDoubleSpinBox.__init__(self)
|
||||||
|
if procdesc["step"] is not None:
|
||||||
|
self.setSingleStep(procdesc["step"])
|
||||||
|
if procdesc["min"] is not None:
|
||||||
|
self.setMinimum(procdesc["min"])
|
||||||
|
if procdesc["max"] is not None:
|
||||||
|
self.setMinimum(procdesc["max"])
|
||||||
|
if procdesc["unit"]:
|
||||||
|
self.setSuffix(" " + procdesc["unit"])
|
||||||
|
if "default" in procdesc:
|
||||||
|
self.setValue(procdesc["default"])
|
||||||
|
|
||||||
|
def get_argument_value(self):
|
||||||
|
return self.value()
|
||||||
|
|
||||||
|
|
||||||
|
class _StringEntry(QtGui.QLineEdit):
|
||||||
|
def __init__(self, procdesc):
|
||||||
|
QtGui.QLineEdit.__init__(self)
|
||||||
|
if "default" in procdesc:
|
||||||
|
self.insert(procdesc["default"])
|
||||||
|
|
||||||
|
def get_argument_value(self):
|
||||||
|
return self.text()
|
||||||
|
|
||||||
|
|
||||||
|
_procty_to_entry = {
|
||||||
|
"FreeValue": _FreeValueEntry,
|
||||||
|
"BooleanValue": _BooleanEntry,
|
||||||
|
"EnumerationValue": _EnumerationEntry,
|
||||||
|
"NumberValue": _NumberEntry,
|
||||||
|
"StringValue": _StringEntry,
|
||||||
|
"Scannable": ScanController
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class _ArgumentSetter(LayoutWidget):
|
||||||
|
def __init__(self, dialog_parent, arguments):
|
||||||
|
LayoutWidget.__init__(self)
|
||||||
|
self.dialog_parent = dialog_parent
|
||||||
|
|
||||||
|
if not arguments:
|
||||||
|
self.addWidget(QtGui.QLabel("No arguments"), 0, 0)
|
||||||
|
|
||||||
|
self._args_to_entries = dict()
|
||||||
|
for n, (name, procdesc) in enumerate(arguments):
|
||||||
|
self.addWidget(QtGui.QLabel(name), n, 0)
|
||||||
|
entry = _procty_to_entry[procdesc["ty"]](procdesc)
|
||||||
|
self.addWidget(entry, n, 1)
|
||||||
|
self._args_to_entries[name] = entry
|
||||||
|
|
||||||
|
def get_argument_values(self):
|
||||||
|
r = dict()
|
||||||
|
for arg, entry in self._args_to_entries.items():
|
||||||
|
try:
|
||||||
|
r[arg] = entry.get_argument_value()
|
||||||
|
except:
|
||||||
|
msgbox = QtGui.QMessageBox(self.dialog_parent)
|
||||||
|
msgbox.setWindowTitle("Error")
|
||||||
|
msgbox.setText("Failed to obtain value for argument '{}'.\n{}"
|
||||||
|
.format(arg, traceback.format_exc()))
|
||||||
|
msgbox.setStandardButtons(QtGui.QMessageBox.Ok)
|
||||||
|
msgbox.show()
|
||||||
|
return None
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
class ExplorerDock(dockarea.Dock):
|
||||||
|
def __init__(self, dialog_parent, status_bar, schedule_ctl):
|
||||||
|
dockarea.Dock.__init__(self, "Explorer", size=(1500, 500))
|
||||||
|
|
||||||
|
self.dialog_parent = dialog_parent
|
||||||
self.status_bar = status_bar
|
self.status_bar = status_bar
|
||||||
self.schedule_ctl = schedule_ctl
|
self.schedule_ctl = schedule_ctl
|
||||||
|
|
||||||
splitter = QtGui.QSplitter(QtCore.Qt.Horizontal)
|
self.splitter = QtGui.QSplitter(QtCore.Qt.Horizontal)
|
||||||
self.addWidget(splitter)
|
self.addWidget(self.splitter)
|
||||||
|
|
||||||
grid = LayoutWidget()
|
grid = LayoutWidget()
|
||||||
splitter.addWidget(grid)
|
self.splitter.addWidget(grid)
|
||||||
|
|
||||||
self.el = QtGui.QListView()
|
self.el = QtGui.QListView()
|
||||||
|
self.el.selectionChanged = self.update_argsetter
|
||||||
grid.addWidget(self.el, 0, 0, colspan=4)
|
grid.addWidget(self.el, 0, 0, colspan=4)
|
||||||
|
|
||||||
self.datetime = QtGui.QDateTimeEdit()
|
self.datetime = QtGui.QDateTimeEdit()
|
||||||
self.datetime.setDisplayFormat("MMM d yyyy hh:mm:ss")
|
self.datetime.setDisplayFormat("MMM d yyyy hh:mm:ss")
|
||||||
self.datetime.setCalendarPopup(True)
|
self.datetime.setCalendarPopup(True)
|
||||||
self.datetime.setDate(QtCore.QDate.currentDate())
|
self.datetime.setDate(QtCore.QDate.currentDate())
|
||||||
self.datetime_en = QtGui.QCheckBox("Set due date:")
|
self.datetime.dateTimeChanged.connect(self.enable_duedate)
|
||||||
|
self.datetime_en = QtGui.QCheckBox("Due date:")
|
||||||
grid.addWidget(self.datetime_en, 1, 0)
|
grid.addWidget(self.datetime_en, 1, 0)
|
||||||
grid.addWidget(self.datetime, 1, 1, colspan=3)
|
grid.addWidget(self.datetime, 1, 1)
|
||||||
|
|
||||||
self.pipeline = QtGui.QLineEdit()
|
|
||||||
self.pipeline.insert("main")
|
|
||||||
grid.addLabel("Pipeline:", 2, 0)
|
|
||||||
grid.addWidget(self.pipeline, 2, 1)
|
|
||||||
|
|
||||||
self.priority = QtGui.QSpinBox()
|
self.priority = QtGui.QSpinBox()
|
||||||
self.priority.setRange(-99, 99)
|
self.priority.setRange(-99, 99)
|
||||||
grid.addLabel("Priority:", 2, 2)
|
grid.addWidget(QtGui.QLabel("Priority:"), 1, 2)
|
||||||
grid.addWidget(self.priority, 2, 3)
|
grid.addWidget(self.priority, 1, 3)
|
||||||
|
|
||||||
|
self.pipeline = QtGui.QLineEdit()
|
||||||
|
self.pipeline.insert("main")
|
||||||
|
grid.addWidget(QtGui.QLabel("Pipeline:"), 2, 0)
|
||||||
|
grid.addWidget(self.pipeline, 2, 1)
|
||||||
|
|
||||||
|
self.flush = QtGui.QCheckBox("Flush")
|
||||||
|
grid.addWidget(self.flush, 2, 2, colspan=2)
|
||||||
|
|
||||||
submit = QtGui.QPushButton("Submit")
|
submit = QtGui.QPushButton("Submit")
|
||||||
grid.addWidget(submit, 3, 0, colspan=4)
|
grid.addWidget(submit, 3, 0, colspan=4)
|
||||||
submit.clicked.connect(self.submit_clicked)
|
submit.clicked.connect(self.submit_clicked)
|
||||||
|
|
||||||
placeholder = QtGui.QWidget()
|
self.argsetter = _ArgumentSetter(self.dialog_parent, [])
|
||||||
splitter.addWidget(placeholder)
|
self.splitter.addWidget(self.argsetter)
|
||||||
|
self.splitter.setSizes([grid.minimumSizeHint().width(), 1000])
|
||||||
|
|
||||||
|
def update_argsetter(self, selected, deselected):
|
||||||
|
selected = selected.indexes()
|
||||||
|
if selected:
|
||||||
|
row = selected[0].row()
|
||||||
|
key = self.explist_model.row_to_key[row]
|
||||||
|
expinfo = self.explist_model.backing_store[key]
|
||||||
|
arguments = expinfo["arguments"]
|
||||||
|
sizes = self.splitter.sizes()
|
||||||
|
self.argsetter.deleteLater()
|
||||||
|
self.argsetter = _ArgumentSetter(self.dialog_parent, arguments)
|
||||||
|
self.splitter.insertWidget(1, self.argsetter)
|
||||||
|
self.splitter.setSizes(sizes)
|
||||||
|
|
||||||
|
def enable_duedate(self):
|
||||||
|
self.datetime_en.setChecked(True)
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def sub_connect(self, host, port):
|
def sub_connect(self, host, port):
|
||||||
|
@ -78,15 +210,15 @@ class ExplorerDock(dockarea.Dock):
|
||||||
return self.explist_model
|
return self.explist_model
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def submit(self, pipeline_name, file, experiment, arguments,
|
def submit(self, pipeline_name, file, class_name, arguments,
|
||||||
priority, due_date):
|
priority, due_date, flush):
|
||||||
expid = {
|
expid = {
|
||||||
"file": file,
|
"file": file,
|
||||||
"experiment": experiment,
|
"class_name": class_name,
|
||||||
"arguments": arguments,
|
"arguments": arguments,
|
||||||
}
|
}
|
||||||
rid = yield from self.schedule_ctl.submit(pipeline_name, expid,
|
rid = yield from self.schedule_ctl.submit(pipeline_name, expid,
|
||||||
priority, due_date)
|
priority, due_date, flush)
|
||||||
self.status_bar.showMessage("Submitted RID {}".format(rid))
|
self.status_bar.showMessage("Submitted RID {}".format(rid))
|
||||||
|
|
||||||
def submit_clicked(self):
|
def submit_clicked(self):
|
||||||
|
@ -94,11 +226,15 @@ class ExplorerDock(dockarea.Dock):
|
||||||
if idx:
|
if idx:
|
||||||
row = idx[0].row()
|
row = idx[0].row()
|
||||||
key = self.explist_model.row_to_key[row]
|
key = self.explist_model.row_to_key[row]
|
||||||
expinfo = self.explist_model.data[key]
|
expinfo = self.explist_model.backing_store[key]
|
||||||
if self.datetime_en.isChecked():
|
if self.datetime_en.isChecked():
|
||||||
due_date = self.datetime.dateTime().toMSecsSinceEpoch()/1000
|
due_date = self.datetime.dateTime().toMSecsSinceEpoch()/1000
|
||||||
else:
|
else:
|
||||||
due_date = None
|
due_date = None
|
||||||
|
arguments = self.argsetter.get_argument_values()
|
||||||
|
if arguments is None:
|
||||||
|
return
|
||||||
asyncio.async(self.submit(self.pipeline.text(),
|
asyncio.async(self.submit(self.pipeline.text(),
|
||||||
expinfo["file"], expinfo["experiment"],
|
expinfo["file"], expinfo["class_name"],
|
||||||
dict(), self.priority.value(), due_date))
|
arguments, self.priority.value(),
|
||||||
|
due_date, self.flush.isChecked()))
|
||||||
|
|
Binary file not shown.
After Width: | Height: | Size: 13 KiB |
|
@ -1,7 +1,57 @@
|
||||||
from quamash import QtGui
|
import asyncio
|
||||||
|
|
||||||
|
from quamash import QtGui, QtCore
|
||||||
from pyqtgraph import dockarea
|
from pyqtgraph import dockarea
|
||||||
|
|
||||||
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
|
from artiq.gui.tools import ListSyncModel
|
||||||
|
|
||||||
|
|
||||||
|
class _LogModel(ListSyncModel):
|
||||||
|
def __init__(self, parent, init):
|
||||||
|
ListSyncModel.__init__(self,
|
||||||
|
["RID", "Message"],
|
||||||
|
parent, init)
|
||||||
|
|
||||||
|
def convert(self, v, column):
|
||||||
|
return v[column]
|
||||||
|
|
||||||
|
|
||||||
class LogDock(dockarea.Dock):
|
class LogDock(dockarea.Dock):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
dockarea.Dock.__init__(self, "Log", size=(1000, 300))
|
dockarea.Dock.__init__(self, "Log", size=(1000, 300))
|
||||||
|
|
||||||
|
self.log = QtGui.QTableView()
|
||||||
|
self.log.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
||||||
|
self.log.horizontalHeader().setResizeMode(
|
||||||
|
QtGui.QHeaderView.ResizeToContents)
|
||||||
|
self.log.setHorizontalScrollMode(
|
||||||
|
QtGui.QAbstractItemView.ScrollPerPixel)
|
||||||
|
self.log.setShowGrid(False)
|
||||||
|
self.log.setTextElideMode(QtCore.Qt.ElideNone)
|
||||||
|
self.addWidget(self.log)
|
||||||
|
self.scroll_at_bottom = False
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def sub_connect(self, host, port):
|
||||||
|
self.subscriber = Subscriber("log", self.init_log_model)
|
||||||
|
yield from self.subscriber.connect(host, port)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def sub_close(self):
|
||||||
|
yield from self.subscriber.close()
|
||||||
|
|
||||||
|
def rows_inserted_before(self):
|
||||||
|
scrollbar = self.log.verticalScrollBar()
|
||||||
|
self.scroll_at_bottom = scrollbar.value() == scrollbar.maximum()
|
||||||
|
|
||||||
|
def rows_inserted_after(self):
|
||||||
|
if self.scroll_at_bottom:
|
||||||
|
self.log.scrollToBottom()
|
||||||
|
|
||||||
|
def init_log_model(self, init):
|
||||||
|
table_model = _LogModel(self.log, init)
|
||||||
|
self.log.setModel(table_model)
|
||||||
|
table_model.rowsAboutToBeInserted.connect(self.rows_inserted_before)
|
||||||
|
table_model.rowsInserted.connect(self.rows_inserted_after)
|
||||||
|
return table_model
|
||||||
|
|
|
@ -0,0 +1,300 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import socket
|
||||||
|
import struct
|
||||||
|
from operator import itemgetter
|
||||||
|
|
||||||
|
from quamash import QtGui, QtCore
|
||||||
|
from pyqtgraph import dockarea
|
||||||
|
|
||||||
|
from artiq.tools import TaskObject
|
||||||
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_mode_enc = {
|
||||||
|
"exp": 0,
|
||||||
|
"1": 1,
|
||||||
|
"0": 2,
|
||||||
|
"in": 3
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class _TTLWidget(QtGui.QFrame):
|
||||||
|
def __init__(self, send_to_device, channel, force_out, name):
|
||||||
|
self.send_to_device = send_to_device
|
||||||
|
self.channel = channel
|
||||||
|
self.force_out = force_out
|
||||||
|
|
||||||
|
QtGui.QFrame.__init__(self)
|
||||||
|
|
||||||
|
self.setFrameShape(QtGui.QFrame.Panel)
|
||||||
|
self.setFrameShadow(QtGui.QFrame.Raised)
|
||||||
|
|
||||||
|
grid = QtGui.QGridLayout()
|
||||||
|
self.setLayout(grid)
|
||||||
|
label = QtGui.QLabel(name)
|
||||||
|
label.setAlignment(QtCore.Qt.AlignCenter)
|
||||||
|
grid.addWidget(label, 1, 1)
|
||||||
|
|
||||||
|
self._direction = QtGui.QLabel()
|
||||||
|
self._direction.setAlignment(QtCore.Qt.AlignCenter)
|
||||||
|
grid.addWidget(self._direction, 2, 1)
|
||||||
|
self._override = QtGui.QLabel()
|
||||||
|
self._override.setAlignment(QtCore.Qt.AlignCenter)
|
||||||
|
grid.addWidget(self._override, 3, 1)
|
||||||
|
self._value = QtGui.QLabel()
|
||||||
|
self._value.setAlignment(QtCore.Qt.AlignCenter)
|
||||||
|
grid.addWidget(self._value, 4, 1, 6, 1)
|
||||||
|
|
||||||
|
self._value.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
||||||
|
menu = QtGui.QActionGroup(self._value)
|
||||||
|
menu.setExclusive(True)
|
||||||
|
self._expctl_action = QtGui.QAction("Experiment controlled", self._value)
|
||||||
|
self._expctl_action.setCheckable(True)
|
||||||
|
menu.addAction(self._expctl_action)
|
||||||
|
self._value.addAction(self._expctl_action)
|
||||||
|
self._expctl_action.triggered.connect(lambda: self.set_mode("exp"))
|
||||||
|
separator = QtGui.QAction(self._value)
|
||||||
|
separator.setSeparator(True)
|
||||||
|
self._value.addAction(separator)
|
||||||
|
self._force1_action = QtGui.QAction("Force 1", self._value)
|
||||||
|
self._force1_action.setCheckable(True)
|
||||||
|
menu.addAction(self._force1_action)
|
||||||
|
self._value.addAction(self._force1_action)
|
||||||
|
self._force1_action.triggered.connect(lambda: self.set_mode("1"))
|
||||||
|
self._force0_action = QtGui.QAction("Force 0", self._value)
|
||||||
|
self._force0_action.setCheckable(True)
|
||||||
|
menu.addAction(self._force0_action)
|
||||||
|
self._value.addAction(self._force0_action)
|
||||||
|
self._force0_action.triggered.connect(lambda: self.set_mode("0"))
|
||||||
|
self._forcein_action = QtGui.QAction("Force input", self._value)
|
||||||
|
self._forcein_action.setCheckable(True)
|
||||||
|
self._forcein_action.setEnabled(not force_out)
|
||||||
|
menu.addAction(self._forcein_action)
|
||||||
|
self._value.addAction(self._forcein_action)
|
||||||
|
self._forcein_action.triggered.connect(lambda: self.set_mode("in"))
|
||||||
|
|
||||||
|
self.set_value(0, False, False)
|
||||||
|
|
||||||
|
def set_mode(self, mode):
|
||||||
|
data = struct.pack("bbb",
|
||||||
|
2, # MONINJ_REQ_TTLSET
|
||||||
|
self.channel, _mode_enc[mode])
|
||||||
|
self.send_to_device(data)
|
||||||
|
|
||||||
|
def set_value(self, value, oe, override):
|
||||||
|
value_s = "1" if value else "0"
|
||||||
|
if override:
|
||||||
|
value_s = "<b>" + value_s + "</b>"
|
||||||
|
color = " color=\"red\""
|
||||||
|
self._override.setText("<font size=\"1\" color=\"red\">OVERRIDE</font>")
|
||||||
|
else:
|
||||||
|
color = ""
|
||||||
|
self._override.setText("")
|
||||||
|
self._value.setText("<font size=\"9\"{}>{}</font>".format(
|
||||||
|
color, value_s))
|
||||||
|
oe = oe or self.force_out
|
||||||
|
direction = "OUT" if oe else "IN"
|
||||||
|
self._direction.setText("<font size=\"1\">" + direction + "</font>")
|
||||||
|
if override:
|
||||||
|
if oe:
|
||||||
|
if value:
|
||||||
|
self._force1_action.setChecked(True)
|
||||||
|
else:
|
||||||
|
self._force0_action.setChecked(True)
|
||||||
|
else:
|
||||||
|
self._forcein_action.setChecked(True)
|
||||||
|
else:
|
||||||
|
self._expctl_action.setChecked(True)
|
||||||
|
|
||||||
|
|
||||||
|
class _DDSWidget(QtGui.QFrame):
|
||||||
|
def __init__(self, send_to_device, channel, sysclk, name):
|
||||||
|
self.send_to_device = send_to_device
|
||||||
|
self.channel = channel
|
||||||
|
self.sysclk = sysclk
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
QtGui.QFrame.__init__(self)
|
||||||
|
|
||||||
|
self.setFrameShape(QtGui.QFrame.Panel)
|
||||||
|
self.setFrameShadow(QtGui.QFrame.Raised)
|
||||||
|
|
||||||
|
grid = QtGui.QGridLayout()
|
||||||
|
self.setLayout(grid)
|
||||||
|
label = QtGui.QLabel(name)
|
||||||
|
label.setAlignment(QtCore.Qt.AlignCenter)
|
||||||
|
grid.addWidget(label, 1, 1)
|
||||||
|
|
||||||
|
self._value = QtGui.QLabel()
|
||||||
|
self._value.setAlignment(QtCore.Qt.AlignCenter)
|
||||||
|
grid.addWidget(self._value, 2, 1, 6, 1)
|
||||||
|
|
||||||
|
self.set_value(0)
|
||||||
|
|
||||||
|
def set_value(self, ftw):
|
||||||
|
frequency = ftw*self.sysclk/2**32
|
||||||
|
self._value.setText("<font size=\"6\">{:.7f} MHz</font>"
|
||||||
|
.format(float(frequency)/1e6))
|
||||||
|
|
||||||
|
|
||||||
|
class _DeviceManager:
|
||||||
|
def __init__(self, send_to_device, init):
|
||||||
|
self.send_to_device = send_to_device
|
||||||
|
self.ddb = dict()
|
||||||
|
self.ttl_cb = lambda: None
|
||||||
|
self.ttl_widgets = dict()
|
||||||
|
self.dds_cb = lambda: None
|
||||||
|
self.dds_widgets = dict()
|
||||||
|
for k, v in init.items():
|
||||||
|
self[k] = v
|
||||||
|
|
||||||
|
def __setitem__(self, k, v):
|
||||||
|
self.ddb[k] = v
|
||||||
|
if k in self.ttl_widgets:
|
||||||
|
del self[k]
|
||||||
|
if not isinstance(v, dict):
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
if v["type"] == "local":
|
||||||
|
if v["module"] == "artiq.coredevice.ttl":
|
||||||
|
channel = v["arguments"]["channel"]
|
||||||
|
force_out = v["class"] == "TTLOut"
|
||||||
|
self.ttl_widgets[channel] = _TTLWidget(
|
||||||
|
self.send_to_device, channel, force_out, k)
|
||||||
|
self.ttl_cb()
|
||||||
|
if (v["module"] == "artiq.coredevice.dds"
|
||||||
|
and v["class"] in {"AD9858", "AD9914"}):
|
||||||
|
channel = v["arguments"]["channel"]
|
||||||
|
sysclk = v["arguments"]["sysclk"]
|
||||||
|
self.dds_widgets[channel] = _DDSWidget(
|
||||||
|
self.send_to_device, channel, sysclk, k)
|
||||||
|
self.dds_cb()
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __delitem__(self, k):
|
||||||
|
del self.ddb[k]
|
||||||
|
if k in self.ttl_widgets:
|
||||||
|
del self.ttl_widgets[k]
|
||||||
|
self.ttl_cb()
|
||||||
|
|
||||||
|
def get_core_addr(self):
|
||||||
|
try:
|
||||||
|
comm = self.ddb["comm"]
|
||||||
|
while isinstance(comm, str):
|
||||||
|
comm = self.ddb[comm]
|
||||||
|
return comm["arguments"]["host"]
|
||||||
|
except KeyError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class _MonInjDock(dockarea.Dock):
|
||||||
|
def __init__(self, name):
|
||||||
|
dockarea.Dock.__init__(self, name, size=(1500, 500))
|
||||||
|
|
||||||
|
self.grid = QtGui.QGridLayout()
|
||||||
|
gridw = QtGui.QWidget()
|
||||||
|
gridw.setLayout(self.grid)
|
||||||
|
self.addWidget(gridw)
|
||||||
|
|
||||||
|
def layout_widgets(self, widgets):
|
||||||
|
w = self.grid.itemAt(0)
|
||||||
|
while w is not None:
|
||||||
|
self.grid.removeItem(w)
|
||||||
|
w = self.grid.itemAt(0)
|
||||||
|
for i, (_, w) in enumerate(sorted(widgets, key=itemgetter(0))):
|
||||||
|
self.grid.addWidget(w, i // 4, i % 4)
|
||||||
|
|
||||||
|
|
||||||
|
class MonInj(TaskObject):
|
||||||
|
def __init__(self):
|
||||||
|
self.ttl_dock = _MonInjDock("TTL")
|
||||||
|
self.dds_dock = _MonInjDock("DDS")
|
||||||
|
|
||||||
|
self.subscriber = Subscriber("devices", self.init_devices)
|
||||||
|
self.dm = _DeviceManager(self.send_to_device, dict())
|
||||||
|
self.transport = None
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def start(self, server, port):
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
yield from loop.create_datagram_endpoint(lambda: self,
|
||||||
|
family=socket.AF_INET)
|
||||||
|
try:
|
||||||
|
yield from self.subscriber.connect(server, port)
|
||||||
|
try:
|
||||||
|
TaskObject.start(self)
|
||||||
|
except:
|
||||||
|
yield from self.subscriber.close()
|
||||||
|
raise
|
||||||
|
except:
|
||||||
|
self.transport.close()
|
||||||
|
raise
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def stop(self):
|
||||||
|
yield from TaskObject.stop(self)
|
||||||
|
yield from self.subscriber.close()
|
||||||
|
if self.transport is not None:
|
||||||
|
self.transport.close()
|
||||||
|
self.transport = None
|
||||||
|
|
||||||
|
def connection_made(self, transport):
|
||||||
|
self.transport = transport
|
||||||
|
|
||||||
|
def datagram_received(self, data, addr):
|
||||||
|
try:
|
||||||
|
ttl_levels, ttl_oes, ttl_overrides = \
|
||||||
|
struct.unpack(">QQQ", data[:8*3])
|
||||||
|
for channel, w in self.dm.ttl_widgets.items():
|
||||||
|
w.set_value(ttl_levels & (1 << channel),
|
||||||
|
ttl_oes & (1 << channel),
|
||||||
|
ttl_overrides & (1 << channel))
|
||||||
|
dds_data = data[8*3:]
|
||||||
|
ndds = len(dds_data)//4
|
||||||
|
ftws = struct.unpack(">" + "I"*ndds, dds_data)
|
||||||
|
for channel, w in self.dm.dds_widgets.items():
|
||||||
|
try:
|
||||||
|
ftw = ftws[channel]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
w.set_value(ftw)
|
||||||
|
except:
|
||||||
|
logger.warning("failed to process datagram", exc_info=True)
|
||||||
|
|
||||||
|
def error_received(self, exc):
|
||||||
|
logger.warning("datagram endpoint error")
|
||||||
|
|
||||||
|
def connection_lost(self, exc):
|
||||||
|
self.transport = None
|
||||||
|
|
||||||
|
def send_to_device(self, data):
|
||||||
|
ca = self.dm.get_core_addr()
|
||||||
|
if ca is None:
|
||||||
|
logger.warning("could not find core device address")
|
||||||
|
elif self.transport is None:
|
||||||
|
logger.warning("datagram endpoint not available")
|
||||||
|
else:
|
||||||
|
self.transport.sendto(data, (ca, 3250))
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def _do(self):
|
||||||
|
while True:
|
||||||
|
yield from asyncio.sleep(0.2)
|
||||||
|
# MONINJ_REQ_MONITOR
|
||||||
|
self.send_to_device(b"\x01")
|
||||||
|
|
||||||
|
def init_devices(self, d):
|
||||||
|
self.dm = _DeviceManager(self.send_to_device, d)
|
||||||
|
self.dm.ttl_cb = lambda: self.ttl_dock.layout_widgets(
|
||||||
|
self.dm.ttl_widgets.items())
|
||||||
|
self.dm.dds_cb = lambda: self.dds_dock.layout_widgets(
|
||||||
|
self.dm.dds_widgets.items())
|
||||||
|
self.dm.ttl_cb()
|
||||||
|
self.dm.dds_cb()
|
||||||
|
return self.dm
|
|
@ -1,10 +1,11 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
from quamash import QtGui
|
from quamash import QtGui, QtCore
|
||||||
from pyqtgraph import dockarea
|
from pyqtgraph import dockarea
|
||||||
|
from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.gui.tools import DictSyncModel
|
from artiq.gui.tools import DictSyncModel, short_format
|
||||||
|
|
||||||
|
|
||||||
class ParametersModel(DictSyncModel):
|
class ParametersModel(DictSyncModel):
|
||||||
|
@ -19,18 +20,41 @@ class ParametersModel(DictSyncModel):
|
||||||
if column == 0:
|
if column == 0:
|
||||||
return k
|
return k
|
||||||
elif column == 1:
|
elif column == 1:
|
||||||
return str(v)
|
return short_format(v)
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
|
|
||||||
class ParametersDock(dockarea.Dock):
|
class ParametersDock(dockarea.Dock):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
dockarea.Dock.__init__(self, "Parameters", size=(500, 300))
|
dockarea.Dock.__init__(self, "Parameters", size=(400, 300))
|
||||||
|
|
||||||
|
grid = LayoutWidget()
|
||||||
|
self.addWidget(grid)
|
||||||
|
|
||||||
|
self.search = QtGui.QLineEdit()
|
||||||
|
self.search.setPlaceholderText("search...")
|
||||||
|
self.search.editingFinished.connect(self.search_parameters)
|
||||||
|
grid.addWidget(self.search, 0, 0)
|
||||||
|
|
||||||
self.table = QtGui.QTableView()
|
self.table = QtGui.QTableView()
|
||||||
self.table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
|
self.table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
|
||||||
self.addWidget(self.table)
|
self.table.horizontalHeader().setResizeMode(
|
||||||
|
QtGui.QHeaderView.ResizeToContents)
|
||||||
|
grid.addWidget(self.table, 1, 0)
|
||||||
|
|
||||||
|
def search_parameters(self):
|
||||||
|
model = self.table.model()
|
||||||
|
parentIndex = model.index(0, 0)
|
||||||
|
numRows = model.rowCount(parentIndex)
|
||||||
|
|
||||||
|
for row in range(numRows):
|
||||||
|
index = model.index(row, 0)
|
||||||
|
parameter = model.data(index, QtCore.Qt.DisplayRole)
|
||||||
|
if parameter.startswith(self.search.displayText()):
|
||||||
|
self.table.showRow(row)
|
||||||
|
else:
|
||||||
|
self.table.hideRow(row)
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def sub_connect(self, host, port):
|
def sub_connect(self, host, port):
|
||||||
|
|
|
@ -0,0 +1,109 @@
|
||||||
|
import asyncio
|
||||||
|
from collections import OrderedDict
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from quamash import QtGui, QtCore
|
||||||
|
from pyqtgraph import dockarea
|
||||||
|
from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
|
from artiq.gui.tools import DictSyncModel, short_format
|
||||||
|
from artiq.gui.displays import *
|
||||||
|
|
||||||
|
|
||||||
|
class ResultsModel(DictSyncModel):
|
||||||
|
def __init__(self, parent, init):
|
||||||
|
DictSyncModel.__init__(self, ["Result", "Value"],
|
||||||
|
parent, init)
|
||||||
|
|
||||||
|
def sort_key(self, k, v):
|
||||||
|
return k
|
||||||
|
|
||||||
|
def convert(self, k, v, column):
|
||||||
|
if column == 0:
|
||||||
|
return k
|
||||||
|
elif column == 1:
|
||||||
|
return short_format(v)
|
||||||
|
else:
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
|
||||||
|
class ResultsDock(dockarea.Dock):
|
||||||
|
def __init__(self, dialog_parent, dock_area):
|
||||||
|
dockarea.Dock.__init__(self, "Results", size=(1500, 500))
|
||||||
|
self.dialog_parent = dialog_parent
|
||||||
|
self.dock_area = dock_area
|
||||||
|
|
||||||
|
grid = LayoutWidget()
|
||||||
|
self.addWidget(grid)
|
||||||
|
|
||||||
|
self.table = QtGui.QTableView()
|
||||||
|
self.table.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
||||||
|
self.table.horizontalHeader().setResizeMode(
|
||||||
|
QtGui.QHeaderView.ResizeToContents)
|
||||||
|
grid.addWidget(self.table, 0, 0)
|
||||||
|
|
||||||
|
add_display_box = QtGui.QGroupBox("Add display")
|
||||||
|
grid.addWidget(add_display_box, 0, 1)
|
||||||
|
display_grid = QtGui.QGridLayout()
|
||||||
|
add_display_box.setLayout(display_grid)
|
||||||
|
|
||||||
|
for n, name in enumerate(display_types.keys()):
|
||||||
|
btn = QtGui.QPushButton(name)
|
||||||
|
display_grid.addWidget(btn, n, 0)
|
||||||
|
btn.clicked.connect(partial(self.create_dialog, name))
|
||||||
|
|
||||||
|
self.displays = dict()
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def sub_connect(self, host, port):
|
||||||
|
self.subscriber = Subscriber("rt_results", self.init_results_model,
|
||||||
|
self.on_mod)
|
||||||
|
yield from self.subscriber.connect(host, port)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def sub_close(self):
|
||||||
|
yield from self.subscriber.close()
|
||||||
|
|
||||||
|
def init_results_model(self, init):
|
||||||
|
self.table_model = ResultsModel(self.table, init)
|
||||||
|
self.table.setModel(self.table_model)
|
||||||
|
return self.table_model
|
||||||
|
|
||||||
|
def on_mod(self, mod):
|
||||||
|
if mod["action"] == "init":
|
||||||
|
for display in self.displays.values():
|
||||||
|
display.update_data(self.table_model.backing_store)
|
||||||
|
return
|
||||||
|
|
||||||
|
if mod["action"] == "setitem":
|
||||||
|
source = mod["key"]
|
||||||
|
elif mod["path"]:
|
||||||
|
source = mod["path"][0]
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
for display in self.displays.values():
|
||||||
|
if source in display.data_sources():
|
||||||
|
display.update_data(self.table_model.backing_store)
|
||||||
|
|
||||||
|
def create_dialog(self, ty):
|
||||||
|
dlg_class = display_types[ty][0]
|
||||||
|
dlg = dlg_class(self.dialog_parent, None, dict(),
|
||||||
|
sorted(self.table_model.backing_store.keys()),
|
||||||
|
partial(self.create_display, ty, None))
|
||||||
|
dlg.open()
|
||||||
|
|
||||||
|
def create_display(self, ty, prev_name, name, settings):
|
||||||
|
if prev_name is not None and prev_name in self.displays:
|
||||||
|
raise NotImplementedError
|
||||||
|
dsp_class = display_types[ty][1]
|
||||||
|
dsp = dsp_class(name, settings)
|
||||||
|
self.displays[name] = dsp
|
||||||
|
dsp.update_data(self.table_model.backing_store)
|
||||||
|
|
||||||
|
def on_close():
|
||||||
|
del self.displays[name]
|
||||||
|
dsp.sigClosed.connect(on_close)
|
||||||
|
self.dock_area.addDock(dsp)
|
||||||
|
self.dock_area.floatDock(dsp)
|
|
@ -0,0 +1,137 @@
|
||||||
|
from quamash import QtGui
|
||||||
|
from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
|
|
||||||
|
class _Range(LayoutWidget):
|
||||||
|
def __init__(self, global_min, global_max, global_step, unit):
|
||||||
|
LayoutWidget.__init__(self)
|
||||||
|
|
||||||
|
def apply_properties(spinbox):
|
||||||
|
if global_min is not None:
|
||||||
|
spinbox.setMinimum(global_min)
|
||||||
|
if global_max is not None:
|
||||||
|
spinbox.setMaximum(global_max)
|
||||||
|
if global_step is not None:
|
||||||
|
spinbox.setSingleStep(global_step)
|
||||||
|
if unit:
|
||||||
|
spinbox.setSuffix(" " + unit)
|
||||||
|
|
||||||
|
self.addWidget(QtGui.QLabel("Min:"), 0, 0)
|
||||||
|
self.min = QtGui.QDoubleSpinBox()
|
||||||
|
apply_properties(self.min)
|
||||||
|
self.addWidget(self.min, 0, 1)
|
||||||
|
|
||||||
|
self.addWidget(QtGui.QLabel("Max:"), 0, 2)
|
||||||
|
self.max = QtGui.QDoubleSpinBox()
|
||||||
|
apply_properties(self.max)
|
||||||
|
self.addWidget(self.max, 0, 3)
|
||||||
|
|
||||||
|
self.addWidget(QtGui.QLabel("#Points:"), 0, 4)
|
||||||
|
self.npoints = QtGui.QSpinBox()
|
||||||
|
self.npoints.setMinimum(2)
|
||||||
|
self.npoints.setValue(10)
|
||||||
|
self.addWidget(self.npoints, 0, 5)
|
||||||
|
|
||||||
|
def set_values(self, min, max, npoints):
|
||||||
|
self.min.setValue(min)
|
||||||
|
self.max.setValue(max)
|
||||||
|
self.npoints.setValue(npoints)
|
||||||
|
|
||||||
|
def get_values(self):
|
||||||
|
return {
|
||||||
|
"min": self.min.value(),
|
||||||
|
"max": self.max.value(),
|
||||||
|
"npoints": self.npoints.value()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ScanController(LayoutWidget):
|
||||||
|
def __init__(self, procdesc):
|
||||||
|
LayoutWidget.__init__(self)
|
||||||
|
|
||||||
|
self.stack = QtGui.QStackedWidget()
|
||||||
|
self.addWidget(self.stack, 1, 0, colspan=4)
|
||||||
|
|
||||||
|
gmin, gmax = procdesc["global_min"], procdesc["global_max"]
|
||||||
|
gstep = procdesc["global_step"]
|
||||||
|
unit = procdesc["unit"]
|
||||||
|
|
||||||
|
self.v_noscan = QtGui.QDoubleSpinBox()
|
||||||
|
if gmin is not None:
|
||||||
|
self.v_noscan.setMinimum(gmin)
|
||||||
|
if gmax is not None:
|
||||||
|
self.v_noscan.setMaximum(gmax)
|
||||||
|
if gstep is not None:
|
||||||
|
self.v_noscan.setSingleStep(gstep)
|
||||||
|
if unit:
|
||||||
|
self.v_noscan.setSuffix(" " + unit)
|
||||||
|
self.v_noscan_gr = LayoutWidget()
|
||||||
|
self.v_noscan_gr.addWidget(QtGui.QLabel("Value:"), 0, 0)
|
||||||
|
self.v_noscan_gr.addWidget(self.v_noscan, 0, 1)
|
||||||
|
self.stack.addWidget(self.v_noscan_gr)
|
||||||
|
|
||||||
|
self.v_linear = _Range(gmin, gmax, gstep, unit)
|
||||||
|
self.stack.addWidget(self.v_linear)
|
||||||
|
|
||||||
|
self.v_random = _Range(gmin, gmax, gstep, unit)
|
||||||
|
self.stack.addWidget(self.v_random)
|
||||||
|
|
||||||
|
self.v_explicit = QtGui.QLineEdit()
|
||||||
|
self.v_explicit_gr = LayoutWidget()
|
||||||
|
self.v_explicit_gr.addWidget(QtGui.QLabel("Sequence:"), 0, 0)
|
||||||
|
self.v_explicit_gr.addWidget(self.v_explicit, 0, 1)
|
||||||
|
self.stack.addWidget(self.v_explicit_gr)
|
||||||
|
|
||||||
|
self.noscan = QtGui.QRadioButton("No scan")
|
||||||
|
self.linear = QtGui.QRadioButton("Linear")
|
||||||
|
self.random = QtGui.QRadioButton("Random")
|
||||||
|
self.explicit = QtGui.QRadioButton("Explicit")
|
||||||
|
radiobuttons = QtGui.QButtonGroup()
|
||||||
|
for n, b in enumerate([self.noscan, self.linear,
|
||||||
|
self.random, self.explicit]):
|
||||||
|
self.addWidget(b, 0, n)
|
||||||
|
radiobuttons.addButton(b)
|
||||||
|
b.toggled.connect(self.select_page)
|
||||||
|
|
||||||
|
if "default" in procdesc:
|
||||||
|
d = procdesc["default"]
|
||||||
|
if d["ty"] == "NoScan":
|
||||||
|
self.noscan.setChecked(True)
|
||||||
|
self.v_noscan.setValue(d["value"])
|
||||||
|
elif d["ty"] == "LinearScan":
|
||||||
|
self.linear.setChecked(True)
|
||||||
|
self.v_linear.set_values(d["min"], d["max"], d["step"])
|
||||||
|
elif d["ty"] == "RandomScan":
|
||||||
|
self.random.setChecked(True)
|
||||||
|
self.v_random.set_values(d["min"], d["max"], d["step"])
|
||||||
|
elif d["ty"] == "ExplicitScan":
|
||||||
|
self.explicit.setChecked(True)
|
||||||
|
self.v_explicit.insert(" ".join(
|
||||||
|
[str(x) for x in d["sequence"]]))
|
||||||
|
else:
|
||||||
|
self.noscan.setChecked(True)
|
||||||
|
|
||||||
|
def select_page(self):
|
||||||
|
if self.noscan.isChecked():
|
||||||
|
self.stack.setCurrentWidget(self.v_noscan_gr)
|
||||||
|
elif self.linear.isChecked():
|
||||||
|
self.stack.setCurrentWidget(self.v_linear)
|
||||||
|
elif self.random.isChecked():
|
||||||
|
self.stack.setCurrentWidget(self.v_random)
|
||||||
|
elif self.explicit.isChecked():
|
||||||
|
self.stack.setCurrentWidget(self.v_explicit_gr)
|
||||||
|
|
||||||
|
def get_argument_value(self):
|
||||||
|
if self.noscan.isChecked():
|
||||||
|
return {"ty": "NoScan", "value": self.v_noscan.value()}
|
||||||
|
elif self.linear.isChecked():
|
||||||
|
d = {"ty": "LinearScan"}
|
||||||
|
d.update(self.v_linear.get_values())
|
||||||
|
return d
|
||||||
|
elif self.random.isChecked():
|
||||||
|
d = {"ty": "RandomScan"}
|
||||||
|
d.update(self.v_random.get_values())
|
||||||
|
return d
|
||||||
|
elif self.explicit.isChecked():
|
||||||
|
sequence = [float(x) for x in self.v_explicit.text().split()]
|
||||||
|
return {"ty": "ExplicitScan", "sequence": sequence}
|
|
@ -6,19 +6,18 @@ from pyqtgraph import dockarea
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.gui.tools import DictSyncModel
|
from artiq.gui.tools import DictSyncModel
|
||||||
from artiq.tools import format_arguments
|
|
||||||
|
|
||||||
|
|
||||||
class _ScheduleModel(DictSyncModel):
|
class _ScheduleModel(DictSyncModel):
|
||||||
def __init__(self, parent, init):
|
def __init__(self, parent, init):
|
||||||
DictSyncModel.__init__(self,
|
DictSyncModel.__init__(self,
|
||||||
["RID", "Pipeline", "Status", "Prio", "Due date",
|
["RID", "Pipeline", "Status", "Prio", "Due date",
|
||||||
"File", "Experiment", "Arguments"],
|
"File", "Class name"],
|
||||||
parent, init)
|
parent, init)
|
||||||
|
|
||||||
def sort_key(self, k, v):
|
def sort_key(self, k, v):
|
||||||
# order by due date, and then by priority and RID
|
# order by priority, and then by due date and RID
|
||||||
return (v["due_date"] or 0, -v["priority"], k)
|
return (-v["priority"], v["due_date"] or 0, k)
|
||||||
|
|
||||||
def convert(self, k, v, column):
|
def convert(self, k, v, column):
|
||||||
if column == 0:
|
if column == 0:
|
||||||
|
@ -38,12 +37,10 @@ class _ScheduleModel(DictSyncModel):
|
||||||
elif column == 5:
|
elif column == 5:
|
||||||
return v["expid"]["file"]
|
return v["expid"]["file"]
|
||||||
elif column == 6:
|
elif column == 6:
|
||||||
if v["expid"]["experiment"] is None:
|
if v["expid"]["class_name"] is None:
|
||||||
return ""
|
return ""
|
||||||
else:
|
else:
|
||||||
return v["expid"]["experiment"]
|
return v["expid"]["class_name"]
|
||||||
elif column == 7:
|
|
||||||
return format_arguments(v["expid"]["arguments"])
|
|
||||||
else:
|
else:
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
|
@ -56,6 +53,9 @@ class ScheduleDock(dockarea.Dock):
|
||||||
|
|
||||||
self.table = QtGui.QTableView()
|
self.table = QtGui.QTableView()
|
||||||
self.table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
|
self.table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
|
||||||
|
self.table.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
|
||||||
|
self.table.horizontalHeader().setResizeMode(
|
||||||
|
QtGui.QHeaderView.ResizeToContents)
|
||||||
self.addWidget(self.table)
|
self.addWidget(self.table)
|
||||||
|
|
||||||
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
||||||
|
|
|
@ -1,29 +1,56 @@
|
||||||
from quamash import QtCore
|
from quamash import QtCore
|
||||||
|
|
||||||
|
|
||||||
class _DictSyncSubstruct:
|
def short_format(v):
|
||||||
|
t = type(v)
|
||||||
|
if t is int or t is float:
|
||||||
|
return str(v)
|
||||||
|
else:
|
||||||
|
r = t.__name__
|
||||||
|
if t is list or t is dict or t is set:
|
||||||
|
r += " ({})".format(len(v))
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
class _SyncSubstruct:
|
||||||
def __init__(self, update_cb, ref):
|
def __init__(self, update_cb, ref):
|
||||||
self.update_cb = update_cb
|
self.update_cb = update_cb
|
||||||
self.ref = ref
|
self.ref = ref
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def append(self, x):
|
||||||
return _DictSyncSubstruct(self.update_cb, self.ref[key])
|
self.ref.append(x)
|
||||||
|
self.update_cb()
|
||||||
|
|
||||||
|
def insert(self, i, x):
|
||||||
|
self.ref.insert(i, x)
|
||||||
|
self.update_cb()
|
||||||
|
|
||||||
|
def pop(self, i=-1):
|
||||||
|
self.ref.pop(i)
|
||||||
|
self.update_cb()
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
self.ref[key] = value
|
self.ref[key] = value
|
||||||
self.update_cb()
|
self.update_cb()
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
self.ref.__delitem__(key)
|
||||||
|
self.update_cb()
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return _SyncSubstruct(self.update_cb, self.ref[key])
|
||||||
|
|
||||||
|
|
||||||
class DictSyncModel(QtCore.QAbstractTableModel):
|
class DictSyncModel(QtCore.QAbstractTableModel):
|
||||||
def __init__(self, headers, parent, init):
|
def __init__(self, headers, parent, init):
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
self.data = init
|
self.backing_store = init
|
||||||
self.row_to_key = sorted(self.data.keys(),
|
self.row_to_key = sorted(self.backing_store.keys(),
|
||||||
key=lambda k: self.sort_key(k, self.data[k]))
|
key=lambda k: self.sort_key(k, self.backing_store[k]))
|
||||||
QtCore.QAbstractTableModel.__init__(self, parent)
|
QtCore.QAbstractTableModel.__init__(self, parent)
|
||||||
|
|
||||||
def rowCount(self, parent):
|
def rowCount(self, parent):
|
||||||
return len(self.data)
|
return len(self.backing_store)
|
||||||
|
|
||||||
def columnCount(self, parent):
|
def columnCount(self, parent):
|
||||||
return len(self.headers)
|
return len(self.headers)
|
||||||
|
@ -34,7 +61,7 @@ class DictSyncModel(QtCore.QAbstractTableModel):
|
||||||
elif role != QtCore.Qt.DisplayRole:
|
elif role != QtCore.Qt.DisplayRole:
|
||||||
return None
|
return None
|
||||||
k = self.row_to_key[index.row()]
|
k = self.row_to_key[index.row()]
|
||||||
return self.convert(k, self.data[k], index.column())
|
return self.convert(k, self.backing_store[k], index.column())
|
||||||
|
|
||||||
def headerData(self, col, orientation, role):
|
def headerData(self, col, orientation, role):
|
||||||
if (orientation == QtCore.Qt.Horizontal
|
if (orientation == QtCore.Qt.Horizontal
|
||||||
|
@ -48,7 +75,7 @@ class DictSyncModel(QtCore.QAbstractTableModel):
|
||||||
while lo < hi:
|
while lo < hi:
|
||||||
mid = (lo + hi)//2
|
mid = (lo + hi)//2
|
||||||
if (self.sort_key(self.row_to_key[mid],
|
if (self.sort_key(self.row_to_key[mid],
|
||||||
self.data[self.row_to_key[mid]])
|
self.backing_store[self.row_to_key[mid]])
|
||||||
< self.sort_key(k, v)):
|
< self.sort_key(k, v)):
|
||||||
lo = mid + 1
|
lo = mid + 1
|
||||||
else:
|
else:
|
||||||
|
@ -56,7 +83,7 @@ class DictSyncModel(QtCore.QAbstractTableModel):
|
||||||
return lo
|
return lo
|
||||||
|
|
||||||
def __setitem__(self, k, v):
|
def __setitem__(self, k, v):
|
||||||
if k in self.data:
|
if k in self.backing_store:
|
||||||
old_row = self.row_to_key.index(k)
|
old_row = self.row_to_key.index(k)
|
||||||
new_row = self._find_row(k, v)
|
new_row = self._find_row(k, v)
|
||||||
if old_row == new_row:
|
if old_row == new_row:
|
||||||
|
@ -65,7 +92,7 @@ class DictSyncModel(QtCore.QAbstractTableModel):
|
||||||
else:
|
else:
|
||||||
self.beginMoveRows(QtCore.QModelIndex(), old_row, old_row,
|
self.beginMoveRows(QtCore.QModelIndex(), old_row, old_row,
|
||||||
QtCore.QModelIndex(), new_row)
|
QtCore.QModelIndex(), new_row)
|
||||||
self.data[k] = v
|
self.backing_store[k] = v
|
||||||
self.row_to_key[old_row], self.row_to_key[new_row] = \
|
self.row_to_key[old_row], self.row_to_key[new_row] = \
|
||||||
self.row_to_key[new_row], self.row_to_key[old_row]
|
self.row_to_key[new_row], self.row_to_key[old_row]
|
||||||
if old_row != new_row:
|
if old_row != new_row:
|
||||||
|
@ -73,7 +100,7 @@ class DictSyncModel(QtCore.QAbstractTableModel):
|
||||||
else:
|
else:
|
||||||
row = self._find_row(k, v)
|
row = self._find_row(k, v)
|
||||||
self.beginInsertRows(QtCore.QModelIndex(), row, row)
|
self.beginInsertRows(QtCore.QModelIndex(), row, row)
|
||||||
self.data[k] = v
|
self.backing_store[k] = v
|
||||||
self.row_to_key.insert(row, k)
|
self.row_to_key.insert(row, k)
|
||||||
self.endInsertRows()
|
self.endInsertRows()
|
||||||
|
|
||||||
|
@ -81,16 +108,66 @@ class DictSyncModel(QtCore.QAbstractTableModel):
|
||||||
row = self.row_to_key.index(k)
|
row = self.row_to_key.index(k)
|
||||||
self.beginRemoveRows(QtCore.QModelIndex(), row, row)
|
self.beginRemoveRows(QtCore.QModelIndex(), row, row)
|
||||||
del self.row_to_key[row]
|
del self.row_to_key[row]
|
||||||
del self.data[k]
|
del self.backing_store[k]
|
||||||
self.endRemoveRows()
|
self.endRemoveRows()
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, k):
|
||||||
def update():
|
def update():
|
||||||
self[key] = self.data[key]
|
self[k] = self.backing_store[k]
|
||||||
return _DictSyncSubstruct(update, self.data[key])
|
return _SyncSubstruct(update, self.backing_store[k])
|
||||||
|
|
||||||
def sort_key(self, k, v):
|
def sort_key(self, k, v):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def convert(self, k, v, column):
|
def convert(self, k, v, column):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class ListSyncModel(QtCore.QAbstractTableModel):
|
||||||
|
def __init__(self, headers, parent, init):
|
||||||
|
self.headers = headers
|
||||||
|
self.backing_store = init
|
||||||
|
QtCore.QAbstractTableModel.__init__(self, parent)
|
||||||
|
|
||||||
|
def rowCount(self, parent):
|
||||||
|
return len(self.backing_store)
|
||||||
|
|
||||||
|
def columnCount(self, parent):
|
||||||
|
return len(self.headers)
|
||||||
|
|
||||||
|
def data(self, index, role):
|
||||||
|
if not index.isValid():
|
||||||
|
return None
|
||||||
|
elif role != QtCore.Qt.DisplayRole:
|
||||||
|
return None
|
||||||
|
return self.convert(self.backing_store[index.row()], index.column())
|
||||||
|
|
||||||
|
def headerData(self, col, orientation, role):
|
||||||
|
if (orientation == QtCore.Qt.Horizontal
|
||||||
|
and role == QtCore.Qt.DisplayRole):
|
||||||
|
return self.headers[col]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __setitem__(self, k, v):
|
||||||
|
self.dataChanged.emit(self.index(k, 0),
|
||||||
|
self.index(k, len(self.headers)))
|
||||||
|
self.backing_store[k] = v
|
||||||
|
|
||||||
|
def __delitem__(self, k):
|
||||||
|
self.beginRemoveRows(QtCore.QModelIndex(), k, k)
|
||||||
|
del self.backing_store[k]
|
||||||
|
self.endRemoveRows()
|
||||||
|
|
||||||
|
def __getitem__(self, k):
|
||||||
|
def update():
|
||||||
|
self[k] = self.backing_store[k]
|
||||||
|
return _SyncSubstruct(update, self.backing_store[k])
|
||||||
|
|
||||||
|
def append(self, v):
|
||||||
|
row = len(self.backing_store)
|
||||||
|
self.beginInsertRows(QtCore.QModelIndex(), row, row)
|
||||||
|
self.backing_store.append(v)
|
||||||
|
self.endInsertRows()
|
||||||
|
|
||||||
|
def convert(self, v, column):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
from artiq.language import core, environment, units, scan
|
||||||
|
from artiq.language.core import *
|
||||||
|
from artiq.language.environment import *
|
||||||
|
from artiq.language.units import *
|
||||||
|
from artiq.language.scan import *
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = []
|
||||||
|
__all__.extend(core.__all__)
|
||||||
|
__all__.extend(environment.__all__)
|
||||||
|
__all__.extend(units.__all__)
|
||||||
|
__all__.extend(scan.__all__)
|
|
@ -2,8 +2,20 @@
|
||||||
Core ARTIQ extensions to the Python language.
|
Core ARTIQ extensions to the Python language.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from collections import namedtuple as _namedtuple
|
from collections import namedtuple
|
||||||
from functools import wraps as _wraps
|
from functools import wraps
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["int64", "round64", "kernel", "portable",
|
||||||
|
"set_time_manager", "set_syscall_manager", "set_watchdog_factory",
|
||||||
|
"RuntimeException", "EncodedException"]
|
||||||
|
|
||||||
|
# global namespace for kernels
|
||||||
|
kernel_globals = ("sequential", "parallel",
|
||||||
|
"delay_mu", "now_mu", "at_mu", "delay",
|
||||||
|
"seconds_to_mu", "mu_to_seconds",
|
||||||
|
"syscall", "watchdog")
|
||||||
|
__all__.extend(kernel_globals)
|
||||||
|
|
||||||
|
|
||||||
class int64(int):
|
class int64(int):
|
||||||
|
@ -28,7 +40,6 @@ class int64(int):
|
||||||
True
|
True
|
||||||
>>> a + b
|
>>> a + b
|
||||||
6
|
6
|
||||||
|
|
||||||
"""
|
"""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -62,12 +73,11 @@ def round64(x):
|
||||||
This function is equivalent to ``int64(round(x))`` but, when targeting
|
This function is equivalent to ``int64(round(x))`` but, when targeting
|
||||||
static compilation, prevents overflow when the rounded value is too large
|
static compilation, prevents overflow when the rounded value is too large
|
||||||
to fit in a 32-bit integer.
|
to fit in a 32-bit integer.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return int64(round(x))
|
return int64(round(x))
|
||||||
|
|
||||||
|
|
||||||
_KernelFunctionInfo = _namedtuple("_KernelFunctionInfo", "core_name k_function")
|
_KernelFunctionInfo = namedtuple("_KernelFunctionInfo", "core_name k_function")
|
||||||
|
|
||||||
|
|
||||||
def kernel(arg):
|
def kernel(arg):
|
||||||
|
@ -88,11 +98,10 @@ def kernel(arg):
|
||||||
|
|
||||||
The decorator takes an optional parameter that defaults to ``core`` and
|
The decorator takes an optional parameter that defaults to ``core`` and
|
||||||
specifies the name of the attribute to use as core device driver.
|
specifies the name of the attribute to use as core device driver.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if isinstance(arg, str):
|
if isinstance(arg, str):
|
||||||
def real_decorator(k_function):
|
def real_decorator(k_function):
|
||||||
@_wraps(k_function)
|
@wraps(k_function)
|
||||||
def run_on_core(exp, *k_args, **k_kwargs):
|
def run_on_core(exp, *k_args, **k_kwargs):
|
||||||
return getattr(exp, arg).run(k_function,
|
return getattr(exp, arg).run(k_function,
|
||||||
((exp,) + k_args), k_kwargs)
|
((exp,) + k_args), k_kwargs)
|
||||||
|
@ -101,7 +110,7 @@ def kernel(arg):
|
||||||
return run_on_core
|
return run_on_core
|
||||||
return real_decorator
|
return real_decorator
|
||||||
else:
|
else:
|
||||||
@_wraps(arg)
|
@wraps(arg)
|
||||||
def run_on_core(exp, *k_args, **k_kwargs):
|
def run_on_core(exp, *k_args, **k_kwargs):
|
||||||
return exp.core.run(arg, ((exp,) + k_args), k_kwargs)
|
return exp.core.run(arg, ((exp,) + k_args), k_kwargs)
|
||||||
run_on_core.k_function_info = _KernelFunctionInfo(
|
run_on_core.k_function_info = _KernelFunctionInfo(
|
||||||
|
@ -117,7 +126,6 @@ def portable(f):
|
||||||
host will be executed on the host (no compilation and execution on the
|
host will be executed on the host (no compilation and execution on the
|
||||||
core device). A decorated function called from a kernel will be executed
|
core device). A decorated function called from a kernel will be executed
|
||||||
on the core device (no RPC).
|
on the core device (no RPC).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
f.k_function_info = _KernelFunctionInfo(core_name="", k_function=f)
|
f.k_function_info = _KernelFunctionInfo(core_name="", k_function=f)
|
||||||
return f
|
return f
|
||||||
|
@ -131,9 +139,10 @@ class _DummyTimeManager:
|
||||||
enter_sequential = _not_implemented
|
enter_sequential = _not_implemented
|
||||||
enter_parallel = _not_implemented
|
enter_parallel = _not_implemented
|
||||||
exit = _not_implemented
|
exit = _not_implemented
|
||||||
|
take_time_mu = _not_implemented
|
||||||
|
get_time_mu = _not_implemented
|
||||||
|
set_time_mu = _not_implemented
|
||||||
take_time = _not_implemented
|
take_time = _not_implemented
|
||||||
get_time = _not_implemented
|
|
||||||
set_time = _not_implemented
|
|
||||||
|
|
||||||
_time_manager = _DummyTimeManager()
|
_time_manager = _DummyTimeManager()
|
||||||
|
|
||||||
|
@ -143,7 +152,6 @@ def set_time_manager(time_manager):
|
||||||
directly inside the Python interpreter. The time manager responds to the
|
directly inside the Python interpreter. The time manager responds to the
|
||||||
entering and leaving of parallel/sequential blocks, delays, etc. and
|
entering and leaving of parallel/sequential blocks, delays, etc. and
|
||||||
provides a time-stamped logging facility for events.
|
provides a time-stamped logging facility for events.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
global _time_manager
|
global _time_manager
|
||||||
_time_manager = time_manager
|
_time_manager = time_manager
|
||||||
|
@ -160,17 +168,10 @@ _syscall_manager = _DummySyscallManager()
|
||||||
def set_syscall_manager(syscall_manager):
|
def set_syscall_manager(syscall_manager):
|
||||||
"""Set the system call manager used for simulating the core device's
|
"""Set the system call manager used for simulating the core device's
|
||||||
runtime in the Python interpreter.
|
runtime in the Python interpreter.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
global _syscall_manager
|
global _syscall_manager
|
||||||
_syscall_manager = syscall_manager
|
_syscall_manager = syscall_manager
|
||||||
|
|
||||||
# global namespace for kernels
|
|
||||||
|
|
||||||
kernel_globals = ("sequential", "parallel",
|
|
||||||
"delay", "now", "at", "time_to_cycles", "cycles_to_time",
|
|
||||||
"syscall", "watchdog")
|
|
||||||
|
|
||||||
|
|
||||||
class _Sequential:
|
class _Sequential:
|
||||||
"""In a sequential block, statements are executed one after another, with
|
"""In a sequential block, statements are executed one after another, with
|
||||||
|
@ -190,7 +191,6 @@ class _Parallel:
|
||||||
The execution time of a parallel block is the execution time of its longest
|
The execution time of a parallel block is the execution time of its longest
|
||||||
statement. A parallel block may contain sequential blocks, which themselves
|
statement. A parallel block may contain sequential blocks, which themselves
|
||||||
may contain parallel blocks, etc.
|
may contain parallel blocks, etc.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
_time_manager.enter_parallel()
|
_time_manager.enter_parallel()
|
||||||
|
@ -200,50 +200,49 @@ class _Parallel:
|
||||||
parallel = _Parallel()
|
parallel = _Parallel()
|
||||||
|
|
||||||
|
|
||||||
def delay(duration):
|
def delay_mu(duration):
|
||||||
"""Increases the RTIO time by the given amount.
|
"""Increases the RTIO time by the given amount (in machine units)."""
|
||||||
|
_time_manager.take_time_mu(duration)
|
||||||
|
|
||||||
"""
|
|
||||||
|
def now_mu():
|
||||||
|
"""Retrieves the current RTIO time, in machine units."""
|
||||||
|
return _time_manager.get_time_mu()
|
||||||
|
|
||||||
|
|
||||||
|
def at_mu(time):
|
||||||
|
"""Sets the RTIO time to the specified absolute value, in machine units."""
|
||||||
|
_time_manager.set_time_mu(time)
|
||||||
|
|
||||||
|
|
||||||
|
def delay(duration):
|
||||||
|
"""Increases the RTIO time by the given amount (in seconds)."""
|
||||||
_time_manager.take_time(duration)
|
_time_manager.take_time(duration)
|
||||||
|
|
||||||
|
|
||||||
def now():
|
def seconds_to_mu(seconds, core=None):
|
||||||
"""Retrieves the current RTIO time, in seconds.
|
"""Converts seconds to the corresponding number of machine units
|
||||||
|
(RTIO cycles).
|
||||||
|
|
||||||
"""
|
:param seconds: time (in seconds) to convert.
|
||||||
return _time_manager.get_time()
|
:param core: core device for which to perform the conversion. Specify only
|
||||||
|
|
||||||
|
|
||||||
def at(time):
|
|
||||||
"""Sets the RTIO time to the specified absolute value.
|
|
||||||
|
|
||||||
"""
|
|
||||||
_time_manager.set_time(time)
|
|
||||||
|
|
||||||
|
|
||||||
def time_to_cycles(time, core=None):
|
|
||||||
"""Converts time to the corresponding number of RTIO cycles.
|
|
||||||
|
|
||||||
:param time: Time (in seconds) to convert.
|
|
||||||
:param core: Core device for which to perform the conversion. Specify only
|
|
||||||
when running in the interpreter (not in kernel).
|
|
||||||
|
|
||||||
"""
|
|
||||||
if core is None:
|
|
||||||
raise ValueError("Core device must be specified for time conversion")
|
|
||||||
return round64(time.amount//core.ref_period)
|
|
||||||
|
|
||||||
|
|
||||||
def cycles_to_time(cycles, core=None):
|
|
||||||
"""Converts RTIO cycles to the corresponding time.
|
|
||||||
|
|
||||||
:param time: Cycle count to convert.
|
|
||||||
:param core: Core device for which to perform the conversion. Specify only
|
|
||||||
when running in the interpreter (not in kernel).
|
when running in the interpreter (not in kernel).
|
||||||
"""
|
"""
|
||||||
if core is None:
|
if core is None:
|
||||||
raise ValueError("Core device must be specified for time conversion")
|
raise ValueError("Core device must be specified for time conversion")
|
||||||
return cycles*core.ref_period
|
return round64(seconds//core.ref_period)
|
||||||
|
|
||||||
|
|
||||||
|
def mu_to_seconds(mu, core=None):
|
||||||
|
"""Converts machine units (RTIO cycles) to seconds.
|
||||||
|
|
||||||
|
:param mu: cycle count to convert.
|
||||||
|
:param core: core device for which to perform the conversion. Specify only
|
||||||
|
when running in the interpreter (not in kernel).
|
||||||
|
"""
|
||||||
|
if core is None:
|
||||||
|
raise ValueError("Core device must be specified for time conversion")
|
||||||
|
return mu*core.ref_period
|
||||||
|
|
||||||
|
|
||||||
def syscall(*args):
|
def syscall(*args):
|
||||||
|
|
|
@ -1,130 +0,0 @@
|
||||||
"""
|
|
||||||
Connection to device, parameter and result database.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class _AttributeKind:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Device(_AttributeKind):
|
|
||||||
"""Represents a device for ``AutoDB`` to process."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NoDefault:
|
|
||||||
"""Represents the absence of a default value for ``Parameter``
|
|
||||||
and ``Argument``.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Parameter(_AttributeKind):
|
|
||||||
"""Represents a parameter (from the database) for ``AutoDB``
|
|
||||||
to process.
|
|
||||||
|
|
||||||
:param default: Default value of the parameter to be used if not found
|
|
||||||
in the database.
|
|
||||||
"""
|
|
||||||
def __init__(self, default=NoDefault):
|
|
||||||
self.default = default
|
|
||||||
|
|
||||||
|
|
||||||
class Argument(_AttributeKind):
|
|
||||||
"""Represents an argument (specifiable at instance creation) for
|
|
||||||
``AutoDB`` to process.
|
|
||||||
|
|
||||||
:param default: Default value of the argument to be used if not specified
|
|
||||||
at instance creation.
|
|
||||||
"""
|
|
||||||
def __init__(self, default=NoDefault):
|
|
||||||
self.default = default
|
|
||||||
|
|
||||||
|
|
||||||
class Result(_AttributeKind):
|
|
||||||
"""Represents a result for ``AutoDB`` to process."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class AutoDB:
|
|
||||||
"""Base class to automate device, parameter and result database access.
|
|
||||||
|
|
||||||
Drivers and experiments should in most cases overload this class to
|
|
||||||
obtain the parameters and devices (including the core device) that they
|
|
||||||
need, report results, and modify parameters.
|
|
||||||
|
|
||||||
:param dbh: database hub to use. If ``None``, all devices and parameters
|
|
||||||
must be supplied as keyword arguments, and reporting results and
|
|
||||||
modifying parameters is not supported.
|
|
||||||
"""
|
|
||||||
class DBKeys:
|
|
||||||
pass
|
|
||||||
|
|
||||||
realtime_results = dict()
|
|
||||||
|
|
||||||
def __init__(self, dbh=None, **kwargs):
|
|
||||||
self.dbh = dbh
|
|
||||||
|
|
||||||
for k, v in kwargs.items():
|
|
||||||
object.__setattr__(self, k, v)
|
|
||||||
|
|
||||||
for k in dir(self.DBKeys):
|
|
||||||
if k not in self.__dict__:
|
|
||||||
ak = getattr(self.DBKeys, k)
|
|
||||||
if isinstance(ak, Argument):
|
|
||||||
if ak.default is NoDefault:
|
|
||||||
raise AttributeError(
|
|
||||||
"No value specified for argument '{}'".format(k))
|
|
||||||
object.__setattr__(self, k, ak.default)
|
|
||||||
elif isinstance(ak, Device):
|
|
||||||
try:
|
|
||||||
dev = self.dbh.get_device(k)
|
|
||||||
except KeyError:
|
|
||||||
raise KeyError("Device '{}' not found".format(k))
|
|
||||||
object.__setattr__(self, k, dev)
|
|
||||||
self.build()
|
|
||||||
if self.dbh is not None and self.realtime_results:
|
|
||||||
self.dbh.add_rt_results(self.realtime_results)
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
ak = getattr(self.DBKeys, name)
|
|
||||||
if isinstance(ak, Parameter):
|
|
||||||
try:
|
|
||||||
if self.dbh is None:
|
|
||||||
raise KeyError
|
|
||||||
return self.dbh.get_parameter(name)
|
|
||||||
except KeyError:
|
|
||||||
if ak.default is not NoDefault:
|
|
||||||
return ak.default
|
|
||||||
else:
|
|
||||||
raise AttributeError("Parameter '{}' not in database"
|
|
||||||
" and without default value"
|
|
||||||
.format(name))
|
|
||||||
elif isinstance(ak, Result):
|
|
||||||
try:
|
|
||||||
return self.dbh.get_result(name)
|
|
||||||
except KeyError:
|
|
||||||
raise AttributeError("Result '{}' not found".format(name))
|
|
||||||
else:
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
def __setattr__(self, name, value):
|
|
||||||
try:
|
|
||||||
ak = getattr(self.DBKeys, name)
|
|
||||||
except AttributeError:
|
|
||||||
object.__setattr__(self, name, value)
|
|
||||||
else:
|
|
||||||
if isinstance(ak, Parameter):
|
|
||||||
self.dbh.set_parameter(name, value)
|
|
||||||
elif isinstance(ak, Result):
|
|
||||||
self.dbh.set_result(name, value)
|
|
||||||
else:
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
"""This is called by ``__init__`` after the parameter initialization
|
|
||||||
is done.
|
|
||||||
|
|
||||||
The user may overload this method to complete the object's
|
|
||||||
initialization with all parameters available.
|
|
||||||
"""
|
|
||||||
pass
|
|
|
@ -0,0 +1,298 @@
|
||||||
|
from collections import OrderedDict
|
||||||
|
from inspect import isclass
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["NoDefault",
|
||||||
|
"FreeValue", "BooleanValue", "EnumerationValue",
|
||||||
|
"NumberValue", "StringValue",
|
||||||
|
"HasEnvironment",
|
||||||
|
"Experiment", "EnvExperiment", "is_experiment"]
|
||||||
|
|
||||||
|
|
||||||
|
class NoDefault:
|
||||||
|
"""Represents the absence of a default value."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultMissing(Exception):
|
||||||
|
"""Raised by the ``default`` method of argument processors when no default
|
||||||
|
value is available."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _SimpleArgProcessor:
|
||||||
|
def __init__(self, default=NoDefault):
|
||||||
|
if default is not NoDefault:
|
||||||
|
self.default_value = default
|
||||||
|
|
||||||
|
def default(self):
|
||||||
|
if not hasattr(self, "default_value"):
|
||||||
|
raise DefaultMissing
|
||||||
|
return self.default_value
|
||||||
|
|
||||||
|
def process(self, x):
|
||||||
|
return x
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
d = {"ty": self.__class__.__name__}
|
||||||
|
if hasattr(self, "default_value"):
|
||||||
|
d["default"] = self.default_value
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class FreeValue(_SimpleArgProcessor):
|
||||||
|
"""An argument that can be an arbitrary Python value."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BooleanValue(_SimpleArgProcessor):
|
||||||
|
"""A boolean argument."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class EnumerationValue(_SimpleArgProcessor):
|
||||||
|
"""An argument that can take a string value among a predefined set of
|
||||||
|
values.
|
||||||
|
|
||||||
|
:param choices: A list of string representing the possible values of the
|
||||||
|
argument.
|
||||||
|
"""
|
||||||
|
def __init__(self, choices, default=NoDefault):
|
||||||
|
_SimpleArgProcessor.__init__(self, default)
|
||||||
|
assert default is NoDefault or default in choices
|
||||||
|
self.choices = choices
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
d = _SimpleArgProcessor.describe(self)
|
||||||
|
d["choices"] = self.choices
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class NumberValue(_SimpleArgProcessor):
|
||||||
|
"""An argument that can take a numerical value (typically floating point).
|
||||||
|
|
||||||
|
:param unit: A string representing the unit of the value, for user
|
||||||
|
interface (UI) purposes.
|
||||||
|
:param step: The step with with the value should be modified by up/down
|
||||||
|
buttons in a UI.
|
||||||
|
:param min: The minimum value of the argument.
|
||||||
|
:param max: The maximum value of the argument.
|
||||||
|
"""
|
||||||
|
def __init__(self, default=NoDefault, unit="", step=None,
|
||||||
|
min=None, max=None):
|
||||||
|
_SimpleArgProcessor.__init__(self, default)
|
||||||
|
self.unit = unit
|
||||||
|
self.step = step
|
||||||
|
self.min = min
|
||||||
|
self.max = max
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
d = _SimpleArgProcessor.describe(self)
|
||||||
|
d["unit"] = self.unit
|
||||||
|
d["step"] = self.step
|
||||||
|
d["min"] = self.min
|
||||||
|
d["max"] = self.max
|
||||||
|
return d
|
||||||
|
|
||||||
|
|
||||||
|
class StringValue(_SimpleArgProcessor):
|
||||||
|
"""A string argument."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HasEnvironment:
|
||||||
|
"""Provides methods to manage the environment of an experiment (devices,
|
||||||
|
parameters, results, arguments)."""
|
||||||
|
def __init__(self, dmgr=None, pdb=None, rdb=None, *,
|
||||||
|
param_override=dict(), default_arg_none=False, **kwargs):
|
||||||
|
self.requested_args = OrderedDict()
|
||||||
|
|
||||||
|
self.__dmgr = dmgr
|
||||||
|
self.__pdb = pdb
|
||||||
|
self.__rdb = rdb
|
||||||
|
self.__param_override = param_override
|
||||||
|
self.__default_arg_none = default_arg_none
|
||||||
|
|
||||||
|
self.__kwargs = kwargs
|
||||||
|
self.__in_build = True
|
||||||
|
self.build()
|
||||||
|
self.__in_build = False
|
||||||
|
for key in self.__kwargs.keys():
|
||||||
|
if key not in self.requested_args:
|
||||||
|
raise TypeError("Got unexpected argument: " + key)
|
||||||
|
del self.__kwargs
|
||||||
|
|
||||||
|
def build(self):
|
||||||
|
"""Must be implemented by the user to request arguments.
|
||||||
|
|
||||||
|
Other initialization steps such as requesting devices and parameters
|
||||||
|
or initializing real-time results may also be performed here.
|
||||||
|
|
||||||
|
When the repository is scanned, any requested devices and parameters
|
||||||
|
are set to ``None``."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def dbs(self):
|
||||||
|
return self.__dmgr, self.__pdb, self.__rdb
|
||||||
|
|
||||||
|
def get_argument(self, key, processor=None):
|
||||||
|
"""Retrieves and returns the value of an argument.
|
||||||
|
|
||||||
|
:param key: Name of the argument.
|
||||||
|
:param processor: A description of how to process the argument, such
|
||||||
|
as instances of ``BooleanValue`` and ``NumberValue``.
|
||||||
|
"""
|
||||||
|
if not self.__in_build:
|
||||||
|
raise TypeError("get_argument() should only "
|
||||||
|
"be called from build()")
|
||||||
|
if processor is None:
|
||||||
|
processor = FreeValue()
|
||||||
|
self.requested_args[key] = processor
|
||||||
|
try:
|
||||||
|
argval = self.__kwargs[key]
|
||||||
|
except KeyError:
|
||||||
|
try:
|
||||||
|
return processor.default()
|
||||||
|
except DefaultMissing:
|
||||||
|
if self.__default_arg_none:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
return processor.process(argval)
|
||||||
|
|
||||||
|
def attr_argument(self, key, processor=None):
|
||||||
|
"""Sets an argument as attribute. The names of the argument and of the
|
||||||
|
attribute are the same."""
|
||||||
|
setattr(self, key, self.get_argument(key, processor))
|
||||||
|
|
||||||
|
def get_device(self, key):
|
||||||
|
"""Creates and returns a device driver."""
|
||||||
|
if self.__dmgr is None:
|
||||||
|
raise ValueError("Device manager not present")
|
||||||
|
return self.__dmgr.get(key)
|
||||||
|
|
||||||
|
def attr_device(self, key):
|
||||||
|
"""Sets a device driver as attribute. The names of the device driver
|
||||||
|
and of the attribute are the same."""
|
||||||
|
setattr(self, key, self.get_device(key))
|
||||||
|
|
||||||
|
def get_parameter(self, key, default=NoDefault):
|
||||||
|
"""Retrieves and returns a parameter."""
|
||||||
|
if self.__pdb is None:
|
||||||
|
raise ValueError("Parameter database not present")
|
||||||
|
if key in self.__param_override:
|
||||||
|
return self.__param_override[key]
|
||||||
|
try:
|
||||||
|
return self.__pdb.get(key)
|
||||||
|
except KeyError:
|
||||||
|
if default is not NoDefault:
|
||||||
|
return default
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
def attr_parameter(self, key, default=NoDefault):
|
||||||
|
"""Sets a parameter as attribute. The names of the argument and of the
|
||||||
|
parameter are the same."""
|
||||||
|
setattr(self, key, self.get_parameter(key, default))
|
||||||
|
|
||||||
|
def set_parameter(self, key, value):
|
||||||
|
"""Writes the value of a parameter into the parameter database."""
|
||||||
|
if self.__pdb is None:
|
||||||
|
raise ValueError("Parameter database not present")
|
||||||
|
self.__pdb.set(key, value)
|
||||||
|
|
||||||
|
def set_result(self, key, value, realtime=False):
|
||||||
|
"""Writes the value of a result.
|
||||||
|
|
||||||
|
:param realtime: Marks the result as real-time, making it immediately
|
||||||
|
available to clients such as the user interface. Returns a
|
||||||
|
``Notifier`` instance that can be used to modify mutable results
|
||||||
|
(such as lists) and synchronize the modifications with the clients.
|
||||||
|
"""
|
||||||
|
if self.__rdb is None:
|
||||||
|
raise ValueError("Result database not present")
|
||||||
|
if realtime:
|
||||||
|
if key in self.__rdb.nrt:
|
||||||
|
raise ValueError("Result is already non-realtime")
|
||||||
|
self.__rdb.rt[key] = value
|
||||||
|
notifier = self.__rdb.rt[key]
|
||||||
|
notifier.kernel_attr_init = False
|
||||||
|
return notifier
|
||||||
|
else:
|
||||||
|
if key in self.__rdb.rt.read:
|
||||||
|
raise ValueError("Result is already realtime")
|
||||||
|
self.__rdb.nrt[key] = value
|
||||||
|
|
||||||
|
def attr_rtresult(self, key, init_value):
|
||||||
|
"""Writes the value of a real-time result and sets the corresponding
|
||||||
|
``Notifier`` as attribute. The names of the result and of the
|
||||||
|
attribute are the same."""
|
||||||
|
setattr(self, key, set_result(key, init_value, True))
|
||||||
|
|
||||||
|
def get_result(self, key):
|
||||||
|
"""Retrieves the value of a result.
|
||||||
|
|
||||||
|
There is no difference between real-time and non-real-time results
|
||||||
|
(this function does not return ``Notifier`` instances).
|
||||||
|
"""
|
||||||
|
if self.__rdb is None:
|
||||||
|
raise ValueError("Result database not present")
|
||||||
|
return self.__rdb.get(key)
|
||||||
|
|
||||||
|
|
||||||
|
class Experiment:
|
||||||
|
"""Base class for experiments.
|
||||||
|
|
||||||
|
Deriving from this class enables automatic experiment discovery in
|
||||||
|
Python modules.
|
||||||
|
"""
|
||||||
|
def prepare(self):
|
||||||
|
"""Entry point for pre-computing data necessary for running the
|
||||||
|
experiment.
|
||||||
|
|
||||||
|
Doing such computations outside of ``run`` enables more efficient
|
||||||
|
scheduling of multiple experiments that need to access the shared
|
||||||
|
hardware during part of their execution.
|
||||||
|
|
||||||
|
This method must not interact with the hardware.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""The main entry point of the experiment.
|
||||||
|
|
||||||
|
This method must be overloaded by the user to implement the main
|
||||||
|
control flow of the experiment.
|
||||||
|
|
||||||
|
This method may interact with the hardware.
|
||||||
|
|
||||||
|
The experiment may call the scheduler's ``pause`` method while in
|
||||||
|
``run``.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def analyze(self):
|
||||||
|
"""Entry point for analyzing the results of the experiment.
|
||||||
|
|
||||||
|
This method may be overloaded by the user to implement the analysis
|
||||||
|
phase of the experiment, for example fitting curves.
|
||||||
|
|
||||||
|
Splitting this phase from ``run`` enables tweaking the analysis
|
||||||
|
algorithm on pre-existing data, and CPU-bound analyses to be run
|
||||||
|
overlapped with the next experiment in a pipelined manner.
|
||||||
|
|
||||||
|
This method must not interact with the hardware.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class EnvExperiment(Experiment, HasEnvironment):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def is_experiment(o):
|
||||||
|
"""Checks if a Python object is an instantiable user experiment."""
|
||||||
|
return (isclass(o)
|
||||||
|
and issubclass(o, Experiment)
|
||||||
|
and o is not Experiment
|
||||||
|
and o is not EnvExperiment)
|
|
@ -1,40 +0,0 @@
|
||||||
from inspect import isclass
|
|
||||||
|
|
||||||
|
|
||||||
class Experiment:
|
|
||||||
"""Base class for experiments.
|
|
||||||
|
|
||||||
Deriving from this class enables automatic experiment discovery in
|
|
||||||
Python modules.
|
|
||||||
"""
|
|
||||||
def run(self):
|
|
||||||
"""The main entry point of the experiment.
|
|
||||||
|
|
||||||
This method must be overloaded by the user to implement the main
|
|
||||||
control flow of the experiment.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def analyze(self):
|
|
||||||
"""Entry point for analyzing the results of the experiment.
|
|
||||||
|
|
||||||
This method may be overloaded by the user to implement the analysis
|
|
||||||
phase of the experiment, for example fitting curves.
|
|
||||||
|
|
||||||
Splitting this phase from ``run`` enables tweaking the analysis
|
|
||||||
algorithm on pre-existing data, and CPU-bound analyses to be run
|
|
||||||
overlapped with the next experiment in a pipelined manner.
|
|
||||||
|
|
||||||
This method must not interact with the hardware.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def has_analyze(experiment):
|
|
||||||
"""Checks if an experiment instance overloaded its ``analyze`` method."""
|
|
||||||
return experiment.analyze.__func__ is not Experiment.analyze
|
|
||||||
|
|
||||||
|
|
||||||
def is_experiment(o):
|
|
||||||
"""Checks if a Python object is an instantiable experiment."""
|
|
||||||
return isclass(o) and issubclass(o, Experiment) and o is not Experiment
|
|
|
@ -0,0 +1,114 @@
|
||||||
|
from random import Random, shuffle
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
from artiq.language.core import *
|
||||||
|
from artiq.language.environment import NoDefault, DefaultMissing
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["NoScan", "LinearScan", "RandomScan", "ExplicitScan", "Scannable"]
|
||||||
|
|
||||||
|
|
||||||
|
class NoScan:
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def _gen(self):
|
||||||
|
yield self.value
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def __iter__(self):
|
||||||
|
return self._gen()
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
return {"ty": "NoScan", "value": self.value}
|
||||||
|
|
||||||
|
|
||||||
|
class LinearScan:
|
||||||
|
def __init__(self, min, max, npoints):
|
||||||
|
self.min = min
|
||||||
|
self.max = max
|
||||||
|
self.npoints = npoints
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def _gen(self):
|
||||||
|
r = self.max - self.min
|
||||||
|
d = self.npoints - 1
|
||||||
|
for i in range(self.npoints):
|
||||||
|
yield r*i/d + self.min
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def __iter__(self):
|
||||||
|
return self._gen()
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
return {"ty": "LinearScan",
|
||||||
|
"min": self.min, "max": self.max, "npoints": self.npoints}
|
||||||
|
|
||||||
|
|
||||||
|
class RandomScan:
|
||||||
|
def __init__(self, min, max, npoints, seed=0):
|
||||||
|
self.sequence = list(LinearScan(min, max, npoints))
|
||||||
|
shuffle(self.sequence, Random(seed).random)
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.sequence)
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
return {"ty": "RandomScan",
|
||||||
|
"min": self.min, "max": self.max, "npoints": self.npoints}
|
||||||
|
|
||||||
|
|
||||||
|
class ExplicitScan:
|
||||||
|
def __init__(self, sequence):
|
||||||
|
self.sequence = sequence
|
||||||
|
|
||||||
|
@portable
|
||||||
|
def __iter__(self):
|
||||||
|
return iter(self.sequence)
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
return {"ty": "ExplicitScan", "sequence": self.sequence}
|
||||||
|
|
||||||
|
|
||||||
|
_ty_to_scan = {
|
||||||
|
"NoScan": NoScan,
|
||||||
|
"LinearScan": LinearScan,
|
||||||
|
"RandomScan": RandomScan,
|
||||||
|
"ExplicitScan": ExplicitScan
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Scannable:
|
||||||
|
def __init__(self, global_min=None, global_max=None, global_step=None,
|
||||||
|
unit="", default=NoDefault):
|
||||||
|
self.global_min = global_min
|
||||||
|
self.global_max = global_max
|
||||||
|
self.global_step = global_step
|
||||||
|
self.unit = unit
|
||||||
|
if default is not NoDefault:
|
||||||
|
self.default_value = default
|
||||||
|
|
||||||
|
def default(self):
|
||||||
|
if not hasattr(self, "default_value"):
|
||||||
|
raise DefaultMissing
|
||||||
|
return self.default_value
|
||||||
|
|
||||||
|
def process(self, x):
|
||||||
|
cls = _ty_to_scan[x["ty"]]
|
||||||
|
args = dict()
|
||||||
|
for arg in inspect.getargspec(cls).args[1:]:
|
||||||
|
if arg in x:
|
||||||
|
args[arg] = x[arg]
|
||||||
|
return cls(**args)
|
||||||
|
|
||||||
|
def describe(self):
|
||||||
|
d = {"ty": "Scannable"}
|
||||||
|
d["global_min"] = self.global_min
|
||||||
|
d["global_max"] = self.global_max
|
||||||
|
d["global_step"] = self.global_step
|
||||||
|
d["unit"] = self.unit
|
||||||
|
if hasattr(self, "default_value"):
|
||||||
|
d["default"] = self.default_value.describe()
|
||||||
|
return d
|
|
@ -1,266 +1,20 @@
|
||||||
"""
|
__all__ = []
|
||||||
Definition and management of physical units.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from fractions import Fraction as _Fraction
|
|
||||||
|
|
||||||
|
|
||||||
class DimensionError(Exception):
|
|
||||||
"""Raised when attempting an operation with incompatible units.
|
|
||||||
|
|
||||||
When targeting the core device, all units are statically managed at
|
|
||||||
compilation time. Thus, when raised by functions in this module, this
|
|
||||||
exception cannot be caught in the kernel as it is raised by the compiler
|
|
||||||
instead.
|
|
||||||
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def mul_dimension(l, r):
|
|
||||||
"""Returns the unit obtained by multiplying unit ``l`` with unit ``r``.
|
|
||||||
|
|
||||||
Raises ``DimensionError`` if the resulting unit is not implemented.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if l is None:
|
|
||||||
return r
|
|
||||||
if r is None:
|
|
||||||
return l
|
|
||||||
if {l, r} == {"Hz", "s"}:
|
|
||||||
return None
|
|
||||||
raise DimensionError
|
|
||||||
|
|
||||||
|
|
||||||
def _rmul_dimension(l, r):
|
|
||||||
return mul_dimension(r, l)
|
|
||||||
|
|
||||||
|
|
||||||
def div_dimension(l, r):
|
|
||||||
"""Returns the unit obtained by dividing unit ``l`` with unit ``r``.
|
|
||||||
|
|
||||||
Raises ``DimensionError`` if the resulting unit is not implemented.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if l == r:
|
|
||||||
return None
|
|
||||||
if r is None:
|
|
||||||
return l
|
|
||||||
if l is None:
|
|
||||||
if r == "s":
|
|
||||||
return "Hz"
|
|
||||||
if r == "Hz":
|
|
||||||
return "s"
|
|
||||||
raise DimensionError
|
|
||||||
|
|
||||||
|
|
||||||
def _rdiv_dimension(l, r):
|
|
||||||
return div_dimension(r, l)
|
|
||||||
|
|
||||||
|
|
||||||
def addsub_dimension(x, y):
|
|
||||||
"""Returns the unit obtained by adding or subtracting unit ``l`` with
|
|
||||||
unit ``r``.
|
|
||||||
|
|
||||||
Raises ``DimensionError`` if ``l`` and ``r`` are different.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if x == y:
|
|
||||||
return x
|
|
||||||
else:
|
|
||||||
raise DimensionError
|
|
||||||
|
|
||||||
|
|
||||||
_prefixes_str = "pnum_kMG"
|
_prefixes_str = "pnum_kMG"
|
||||||
_smallest_prefix = _Fraction(1, 10**12)
|
_smallest_prefix_exp = -12
|
||||||
|
|
||||||
|
|
||||||
def _format(amount, unit):
|
|
||||||
if amount is NotImplemented:
|
|
||||||
return NotImplemented
|
|
||||||
if unit is None:
|
|
||||||
return amount
|
|
||||||
else:
|
|
||||||
return Quantity(amount, unit)
|
|
||||||
|
|
||||||
|
|
||||||
class Quantity:
|
|
||||||
"""Represents an amount in a given fundamental unit (identified by a
|
|
||||||
string).
|
|
||||||
|
|
||||||
The amount can be of any Python numerical type (integer, float,
|
|
||||||
Fraction, ...).
|
|
||||||
Arithmetic operations and comparisons are directly delegated to the
|
|
||||||
underlying numerical types.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def __init__(self, amount, unit):
|
|
||||||
self.amount = amount
|
|
||||||
self.unit = unit
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
r_amount = self.amount
|
|
||||||
if isinstance(r_amount, int) or isinstance(r_amount, _Fraction):
|
|
||||||
r_prefix = 0
|
|
||||||
r_amount = r_amount/_smallest_prefix
|
|
||||||
if r_amount:
|
|
||||||
numerator = r_amount.numerator
|
|
||||||
while numerator % 1000 == 0 and r_prefix < len(_prefixes_str):
|
|
||||||
numerator /= 1000
|
|
||||||
r_amount /= 1000
|
|
||||||
r_prefix += 1
|
|
||||||
prefix_str = _prefixes_str[r_prefix]
|
|
||||||
if prefix_str == "_":
|
|
||||||
prefix_str = ""
|
|
||||||
return str(r_amount) + " " + prefix_str + self.unit
|
|
||||||
else:
|
|
||||||
return str(r_amount) + " " + self.unit
|
|
||||||
|
|
||||||
def __float__(self):
|
|
||||||
return float(self.amount)
|
|
||||||
|
|
||||||
# mul/div
|
|
||||||
def _binop(self, other, opf_name, dim_function):
|
|
||||||
opf = getattr(self.amount, opf_name)
|
|
||||||
if isinstance(other, Quantity):
|
|
||||||
amount = opf(other.amount)
|
|
||||||
unit = dim_function(self.unit, other.unit)
|
|
||||||
else:
|
|
||||||
amount = opf(other)
|
|
||||||
unit = dim_function(self.unit, None)
|
|
||||||
return _format(amount, unit)
|
|
||||||
|
|
||||||
def __mul__(self, other):
|
|
||||||
return self._binop(other, "__mul__", mul_dimension)
|
|
||||||
|
|
||||||
def __rmul__(self, other):
|
|
||||||
return self._binop(other, "__rmul__", _rmul_dimension)
|
|
||||||
|
|
||||||
def __truediv__(self, other):
|
|
||||||
return self._binop(other, "__truediv__", div_dimension)
|
|
||||||
|
|
||||||
def __rtruediv__(self, other):
|
|
||||||
return self._binop(other, "__rtruediv__", _rdiv_dimension)
|
|
||||||
|
|
||||||
def __floordiv__(self, other):
|
|
||||||
return self._binop(other, "__floordiv__", div_dimension)
|
|
||||||
|
|
||||||
def __rfloordiv__(self, other):
|
|
||||||
return self._binop(other, "__rfloordiv__", _rdiv_dimension)
|
|
||||||
|
|
||||||
# unary ops
|
|
||||||
def __neg__(self):
|
|
||||||
return Quantity(self.amount.__neg__(), self.unit)
|
|
||||||
|
|
||||||
def __pos__(self):
|
|
||||||
return Quantity(self.amount.__pos__(), self.unit)
|
|
||||||
|
|
||||||
def __abs__(self):
|
|
||||||
return Quantity(abs(self.amount), self.unit)
|
|
||||||
|
|
||||||
# add/sub
|
|
||||||
def __add__(self, other):
|
|
||||||
return self._binop(other, "__add__", addsub_dimension)
|
|
||||||
|
|
||||||
def __radd__(self, other):
|
|
||||||
return self._binop(other, "__radd__", addsub_dimension)
|
|
||||||
|
|
||||||
def __sub__(self, other):
|
|
||||||
return self._binop(other, "__sub__", addsub_dimension)
|
|
||||||
|
|
||||||
def __rsub__(self, other):
|
|
||||||
return self._binop(other, "__rsub__", addsub_dimension)
|
|
||||||
|
|
||||||
def __mod__(self, other):
|
|
||||||
return self._binop(other, "__mod__", addsub_dimension)
|
|
||||||
|
|
||||||
def __rmod__(self, other):
|
|
||||||
return self._binop(other, "__rmod__", addsub_dimension)
|
|
||||||
|
|
||||||
# comparisons
|
|
||||||
def _cmp(self, other, opf_name):
|
|
||||||
if not isinstance(other, Quantity) or other.unit != self.unit:
|
|
||||||
raise DimensionError
|
|
||||||
return getattr(self.amount, opf_name)(other.amount)
|
|
||||||
|
|
||||||
def __lt__(self, other):
|
|
||||||
return self._cmp(other, "__lt__")
|
|
||||||
|
|
||||||
def __le__(self, other):
|
|
||||||
return self._cmp(other, "__le__")
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self._cmp(other, "__eq__")
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return self._cmp(other, "__ne__")
|
|
||||||
|
|
||||||
def __gt__(self, other):
|
|
||||||
return self._cmp(other, "__gt__")
|
|
||||||
|
|
||||||
def __ge__(self, other):
|
|
||||||
return self._cmp(other, "__ge__")
|
|
||||||
|
|
||||||
|
|
||||||
def _register_unit(unit, prefixes):
|
def _register_unit(unit, prefixes):
|
||||||
amount = _smallest_prefix
|
exponent = _smallest_prefix_exp
|
||||||
for prefix in _prefixes_str:
|
for prefix in _prefixes_str:
|
||||||
if prefix in prefixes:
|
if prefix in prefixes:
|
||||||
quantity = Quantity(amount, unit)
|
|
||||||
full_name = prefix + unit if prefix != "_" else unit
|
full_name = prefix + unit if prefix != "_" else unit
|
||||||
globals()[full_name] = quantity
|
globals()[full_name] = 10.**exponent
|
||||||
amount *= 1000
|
__all__.append(full_name)
|
||||||
|
exponent += 3
|
||||||
|
|
||||||
|
|
||||||
_register_unit("s", "pnum_")
|
_register_unit("s", "pnum_")
|
||||||
_register_unit("Hz", "_kMG")
|
_register_unit("Hz", "_kMG")
|
||||||
_register_unit("dB", "_")
|
_register_unit("dB", "_")
|
||||||
_register_unit("V", "um_k")
|
_register_unit("V", "um_k")
|
||||||
|
|
||||||
|
|
||||||
def check_unit(value, unit):
|
|
||||||
"""Checks that the value has the specified unit. Unit specification is
|
|
||||||
a string representing the unit without any prefix (e.g. ``s``, ``Hz``).
|
|
||||||
Checking for a dimensionless value (not a ``Quantity`` instance) is done
|
|
||||||
by setting ``unit`` to ``None``.
|
|
||||||
|
|
||||||
If the units do not match, ``DimensionError`` is raised.
|
|
||||||
|
|
||||||
This function can be used in kernels and is executed at compilation time.
|
|
||||||
|
|
||||||
There is already unit checking built into the arithmetic, so you typically
|
|
||||||
need to use this function only when using the ``amount`` property of
|
|
||||||
``Quantity``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if unit is None:
|
|
||||||
if isinstance(value, Quantity):
|
|
||||||
raise DimensionError
|
|
||||||
else:
|
|
||||||
if not isinstance(value, Quantity) or value.unit != unit:
|
|
||||||
raise DimensionError
|
|
||||||
|
|
||||||
def strip_unit(value, unit):
|
|
||||||
"""Check that the value has the specified unit and returns its amount.
|
|
||||||
Raises ``DimensionError`` if the units does not match.
|
|
||||||
|
|
||||||
If the passed value is not a ``Quantity``, it is assumed to be in the
|
|
||||||
specified unit and is returned unchanged.
|
|
||||||
|
|
||||||
If ``unit`` is ``None``, passing a ``Quantity`` as value raises
|
|
||||||
``DimensionError``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if unit is None:
|
|
||||||
if isinstance(value, Quantity):
|
|
||||||
raise DimensionError
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
else:
|
|
||||||
if isinstance(value, Quantity):
|
|
||||||
if value.unit != unit:
|
|
||||||
raise DimensionError
|
|
||||||
else:
|
|
||||||
return value.amount
|
|
||||||
else:
|
|
||||||
return value
|
|
||||||
|
|
|
@ -1,39 +1,70 @@
|
||||||
import os
|
import os
|
||||||
|
import logging
|
||||||
|
import asyncio
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Notifier
|
from artiq.protocols.sync_struct import Notifier
|
||||||
from artiq.tools import file_import
|
from artiq.master.worker import Worker
|
||||||
from artiq.language.experiment import is_experiment
|
|
||||||
|
|
||||||
|
|
||||||
def scan_experiments():
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def _scan_experiments(log):
|
||||||
r = dict()
|
r = dict()
|
||||||
for f in os.listdir("repository"):
|
for f in os.listdir("repository"):
|
||||||
if f.endswith(".py"):
|
if f.endswith(".py"):
|
||||||
try:
|
try:
|
||||||
m = file_import(os.path.join("repository", f))
|
full_name = os.path.join("repository", f)
|
||||||
except:
|
worker = Worker({"log": lambda message: log("scan", message)})
|
||||||
continue
|
try:
|
||||||
for k, v in m.__dict__.items():
|
description = yield from worker.examine(full_name)
|
||||||
if is_experiment(v):
|
finally:
|
||||||
if v.__doc__ is None:
|
yield from worker.close()
|
||||||
name = k
|
for class_name, class_desc in description.items():
|
||||||
else:
|
name = class_desc["name"]
|
||||||
name = v.__doc__.splitlines()[0].strip()
|
arguments = class_desc["arguments"]
|
||||||
if name[-1] == ".":
|
if name in r:
|
||||||
name = name[:-1]
|
logger.warning("Duplicate experiment name: '%s'", name)
|
||||||
|
basename = name
|
||||||
|
i = 1
|
||||||
|
while name in r:
|
||||||
|
name = basename + str(i)
|
||||||
|
i += 1
|
||||||
entry = {
|
entry = {
|
||||||
"file": os.path.join("repository", f),
|
"file": full_name,
|
||||||
"experiment": k,
|
"class_name": class_name,
|
||||||
"gui_file": getattr(v, "__artiq_gui_file__", None)
|
"arguments": arguments
|
||||||
}
|
}
|
||||||
r[name] = entry
|
r[name] = entry
|
||||||
|
except:
|
||||||
|
logger.warning("Skipping file '%s'", f, exc_info=True)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
class Repository:
|
def _sync_explist(target, source):
|
||||||
def __init__(self):
|
for k in list(target.read.keys()):
|
||||||
self.explist = Notifier(scan_experiments())
|
if k not in source:
|
||||||
|
del target[k]
|
||||||
|
for k in source.keys():
|
||||||
|
if k not in target.read or target.read[k] != source[k]:
|
||||||
|
target[k] = source[k]
|
||||||
|
|
||||||
def get_data(self, filename):
|
|
||||||
with open(os.path.join("repository", filename)) as f:
|
class Repository:
|
||||||
return f.read()
|
def __init__(self, log_fn):
|
||||||
|
self.explist = Notifier(dict())
|
||||||
|
self._scanning = False
|
||||||
|
self.log_fn = log_fn
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def scan(self):
|
||||||
|
if self._scanning:
|
||||||
|
return
|
||||||
|
self._scanning = True
|
||||||
|
new_explist = yield from _scan_experiments(self.log_fn)
|
||||||
|
_sync_explist(self.explist, new_explist)
|
||||||
|
self._scanning = False
|
||||||
|
|
||||||
|
def scan_async(self):
|
||||||
|
asyncio.async(self.scan())
|
||||||
|
|
|
@ -1,103 +0,0 @@
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import re
|
|
||||||
|
|
||||||
import numpy
|
|
||||||
import h5py
|
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Notifier, process_mod
|
|
||||||
|
|
||||||
|
|
||||||
def get_hdf5_output(start_time, rid, name):
|
|
||||||
dirname = os.path.join("results",
|
|
||||||
time.strftime("%Y-%m-%d", start_time),
|
|
||||||
time.strftime("%H-%M", start_time))
|
|
||||||
filename = "{:09}-{}.h5".format(rid, name)
|
|
||||||
os.makedirs(dirname, exist_ok=True)
|
|
||||||
return h5py.File(os.path.join(dirname, filename), "w")
|
|
||||||
|
|
||||||
|
|
||||||
def get_last_rid():
|
|
||||||
r = -1
|
|
||||||
try:
|
|
||||||
day_folders = os.listdir("results")
|
|
||||||
except:
|
|
||||||
return r
|
|
||||||
day_folders = filter(lambda x: re.fullmatch('\d\d\d\d-\d\d-\d\d', x),
|
|
||||||
day_folders)
|
|
||||||
for df in day_folders:
|
|
||||||
day_path = os.path.join("results", df)
|
|
||||||
try:
|
|
||||||
minute_folders = os.listdir(day_path)
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
minute_folders = filter(lambda x: re.fullmatch('\d\d-\d\d', x),
|
|
||||||
minute_folders)
|
|
||||||
for mf in minute_folders:
|
|
||||||
minute_path = os.path.join(day_path, mf)
|
|
||||||
try:
|
|
||||||
h5files = os.listdir(minute_path)
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
for x in h5files:
|
|
||||||
m = re.fullmatch('(\d\d\d\d\d\d\d\d\d)-.*\.h5', x)
|
|
||||||
rid = int(m.group(1))
|
|
||||||
if rid > r:
|
|
||||||
r = rid
|
|
||||||
return r
|
|
||||||
|
|
||||||
|
|
||||||
_type_to_hdf5 = {
|
|
||||||
int: h5py.h5t.STD_I64BE,
|
|
||||||
float: h5py.h5t.IEEE_F64BE
|
|
||||||
}
|
|
||||||
|
|
||||||
def result_dict_to_hdf5(f, rd):
|
|
||||||
for name, data in rd.items():
|
|
||||||
if isinstance(data, list):
|
|
||||||
el_ty = type(data[0])
|
|
||||||
for d in data:
|
|
||||||
if type(d) != el_ty:
|
|
||||||
raise TypeError("All list elements must have the same"
|
|
||||||
" type for HDF5 output")
|
|
||||||
try:
|
|
||||||
el_ty_h5 = _type_to_hdf5[el_ty]
|
|
||||||
except KeyError:
|
|
||||||
raise TypeError("List element type {} is not supported for"
|
|
||||||
" HDF5 output".format(el_ty))
|
|
||||||
dataset = f.create_dataset(name, (len(data), ), el_ty_h5)
|
|
||||||
dataset[:] = data
|
|
||||||
elif isinstance(data, numpy.ndarray):
|
|
||||||
f.create_dataset(name, data=data)
|
|
||||||
else:
|
|
||||||
ty = type(data)
|
|
||||||
try:
|
|
||||||
ty_h5 = _type_to_hdf5[ty]
|
|
||||||
except KeyError:
|
|
||||||
raise TypeError("Type {} is not supported for HDF5 output"
|
|
||||||
.format(ty))
|
|
||||||
dataset = f.create_dataset(name, (), ty_h5)
|
|
||||||
dataset[()] = data
|
|
||||||
|
|
||||||
|
|
||||||
class RTResults:
|
|
||||||
def __init__(self):
|
|
||||||
self.groups = Notifier(dict())
|
|
||||||
self.current_group = "default"
|
|
||||||
|
|
||||||
def init(self, description):
|
|
||||||
data = dict()
|
|
||||||
for rtr in description.keys():
|
|
||||||
if isinstance(rtr, tuple):
|
|
||||||
for e in rtr:
|
|
||||||
data[e] = []
|
|
||||||
else:
|
|
||||||
data[rtr] = []
|
|
||||||
self.groups[self.current_group] = {
|
|
||||||
"description": description,
|
|
||||||
"data": data
|
|
||||||
}
|
|
||||||
|
|
||||||
def update(self, mod):
|
|
||||||
target = self.groups[self.current_group]["data"]
|
|
||||||
process_mod(target, mod)
|
|
|
@ -4,7 +4,8 @@ from enum import Enum
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
from artiq.master.worker import Worker
|
from artiq.master.worker import Worker
|
||||||
from artiq.tools import asyncio_wait_or_cancel, asyncio_queue_peek
|
from artiq.tools import (asyncio_wait_or_cancel, asyncio_queue_peek,
|
||||||
|
TaskObject, WaitSet)
|
||||||
from artiq.protocols.sync_struct import Notifier
|
from artiq.protocols.sync_struct import Notifier
|
||||||
|
|
||||||
|
|
||||||
|
@ -13,27 +14,28 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class RunStatus(Enum):
|
class RunStatus(Enum):
|
||||||
pending = 0
|
pending = 0
|
||||||
preparing = 1
|
flushing = 1
|
||||||
prepare_done = 2
|
preparing = 2
|
||||||
running = 3
|
prepare_done = 3
|
||||||
run_done = 4
|
running = 4
|
||||||
analyzing = 5
|
run_done = 5
|
||||||
analyze_done = 6
|
analyzing = 6
|
||||||
paused = 7
|
analyze_done = 7
|
||||||
|
paused = 8
|
||||||
|
|
||||||
|
|
||||||
def _mk_worker_method(name):
|
def _mk_worker_method(name):
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def worker_method(self, *args, **kwargs):
|
def worker_method(self, *args, **kwargs):
|
||||||
if self._terminated:
|
if self.worker.closed.is_set():
|
||||||
return True
|
return True
|
||||||
m = getattr(self._worker, name)
|
m = getattr(self.worker, name)
|
||||||
try:
|
try:
|
||||||
return (yield from m(*args, **kwargs))
|
return (yield from m(*args, **kwargs))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if isinstance(e, asyncio.CancelledError):
|
if isinstance(e, asyncio.CancelledError):
|
||||||
raise
|
raise
|
||||||
if self._terminated:
|
if self.worker.closed.is_set():
|
||||||
logger.debug("suppressing worker exception of terminated run",
|
logger.debug("suppressing worker exception of terminated run",
|
||||||
exc_info=True)
|
exc_info=True)
|
||||||
# Return completion on termination
|
# Return completion on termination
|
||||||
|
@ -45,7 +47,7 @@ def _mk_worker_method(name):
|
||||||
|
|
||||||
class Run:
|
class Run:
|
||||||
def __init__(self, rid, pipeline_name,
|
def __init__(self, rid, pipeline_name,
|
||||||
expid, priority, due_date,
|
expid, priority, due_date, flush,
|
||||||
worker_handlers, notifier):
|
worker_handlers, notifier):
|
||||||
# called through pool
|
# called through pool
|
||||||
self.rid = rid
|
self.rid = rid
|
||||||
|
@ -53,10 +55,11 @@ class Run:
|
||||||
self.expid = expid
|
self.expid = expid
|
||||||
self.priority = priority
|
self.priority = priority
|
||||||
self.due_date = due_date
|
self.due_date = due_date
|
||||||
|
self.flush = flush
|
||||||
|
|
||||||
|
self.worker = Worker(worker_handlers)
|
||||||
|
|
||||||
self._status = RunStatus.pending
|
self._status = RunStatus.pending
|
||||||
self._terminated = False
|
|
||||||
self._worker = Worker(worker_handlers)
|
|
||||||
|
|
||||||
self._notifier = notifier
|
self._notifier = notifier
|
||||||
self._notifier[self.rid] = {
|
self._notifier[self.rid] = {
|
||||||
|
@ -64,6 +67,7 @@ class Run:
|
||||||
"expid": self.expid,
|
"expid": self.expid,
|
||||||
"priority": self.priority,
|
"priority": self.priority,
|
||||||
"due_date": self.due_date,
|
"due_date": self.due_date,
|
||||||
|
"flush": self.flush,
|
||||||
"status": self._status.name
|
"status": self._status.name
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -74,33 +78,35 @@ class Run:
|
||||||
@status.setter
|
@status.setter
|
||||||
def status(self, value):
|
def status(self, value):
|
||||||
self._status = value
|
self._status = value
|
||||||
if not self._terminated:
|
if not self.worker.closed.is_set():
|
||||||
self._notifier[self.rid]["status"] = self._status.name
|
self._notifier[self.rid]["status"] = self._status.name
|
||||||
|
|
||||||
# The run with the largest priority_key is to be scheduled first
|
# The run with the largest priority_key is to be scheduled first
|
||||||
def priority_key(self, now):
|
def priority_key(self, now=None):
|
||||||
if self.due_date is None:
|
if self.due_date is None:
|
||||||
overdue = 0
|
|
||||||
due_date_k = 0
|
due_date_k = 0
|
||||||
else:
|
else:
|
||||||
overdue = int(now > self.due_date)
|
|
||||||
due_date_k = -self.due_date
|
due_date_k = -self.due_date
|
||||||
return (overdue, self.priority, due_date_k, -self.rid)
|
if now is not None and self.due_date is not None:
|
||||||
|
runnable = int(now > self.due_date)
|
||||||
|
else:
|
||||||
|
runnable = 1
|
||||||
|
return (runnable, self.priority, due_date_k, -self.rid)
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def close(self):
|
def close(self):
|
||||||
# called through pool
|
# called through pool
|
||||||
self._terminated = True
|
yield from self.worker.close()
|
||||||
yield from self._worker.close()
|
|
||||||
del self._notifier[self.rid]
|
del self._notifier[self.rid]
|
||||||
|
|
||||||
_prepare = _mk_worker_method("prepare")
|
_build = _mk_worker_method("build")
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def prepare(self):
|
def build(self):
|
||||||
yield from self._prepare(self.rid, self.pipeline_name, self.expid,
|
yield from self._build(self.rid, self.pipeline_name, self.expid,
|
||||||
self.priority)
|
self.priority)
|
||||||
|
|
||||||
|
prepare = _mk_worker_method("prepare")
|
||||||
run = _mk_worker_method("run")
|
run = _mk_worker_method("run")
|
||||||
resume = _mk_worker_method("resume")
|
resume = _mk_worker_method("resume")
|
||||||
analyze = _mk_worker_method("analyze")
|
analyze = _mk_worker_method("analyze")
|
||||||
|
@ -120,20 +126,20 @@ class RIDCounter:
|
||||||
class RunPool:
|
class RunPool:
|
||||||
def __init__(self, ridc, worker_handlers, notifier):
|
def __init__(self, ridc, worker_handlers, notifier):
|
||||||
self.runs = dict()
|
self.runs = dict()
|
||||||
self.submitted_callback = None
|
self.submitted_cb = None
|
||||||
|
|
||||||
self._ridc = ridc
|
self._ridc = ridc
|
||||||
self._worker_handlers = worker_handlers
|
self._worker_handlers = worker_handlers
|
||||||
self._notifier = notifier
|
self._notifier = notifier
|
||||||
|
|
||||||
def submit(self, expid, priority, due_date, pipeline_name):
|
def submit(self, expid, priority, due_date, flush, pipeline_name):
|
||||||
# called through scheduler
|
# called through scheduler
|
||||||
rid = self._ridc.get()
|
rid = self._ridc.get()
|
||||||
run = Run(rid, pipeline_name, expid, priority, due_date,
|
run = Run(rid, pipeline_name, expid, priority, due_date, flush,
|
||||||
self._worker_handlers, self._notifier)
|
self._worker_handlers, self._notifier)
|
||||||
self.runs[rid] = run
|
self.runs[rid] = run
|
||||||
if self.submitted_callback is not None:
|
if self.submitted_cb is not None:
|
||||||
self.submitted_callback()
|
self.submitted_cb()
|
||||||
return rid
|
return rid
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
|
@ -145,29 +151,15 @@ class RunPool:
|
||||||
del self.runs[rid]
|
del self.runs[rid]
|
||||||
|
|
||||||
|
|
||||||
class TaskObject:
|
|
||||||
def start(self):
|
|
||||||
self.task = asyncio.async(self._do())
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def stop(self):
|
|
||||||
self.task.cancel()
|
|
||||||
yield from asyncio.wait([self.task])
|
|
||||||
del self.task
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
|
||||||
def _do(self):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class PrepareStage(TaskObject):
|
class PrepareStage(TaskObject):
|
||||||
def __init__(self, deleter, pool, outq):
|
def __init__(self, flush_tracker, delete_cb, pool, outq):
|
||||||
self.deleter = deleter
|
self.flush_tracker = flush_tracker
|
||||||
|
self.delete_cb = delete_cb
|
||||||
self.pool = pool
|
self.pool = pool
|
||||||
self.outq = outq
|
self.outq = outq
|
||||||
|
|
||||||
self.pool_submitted = asyncio.Event()
|
self.pool_submitted = asyncio.Event()
|
||||||
self.pool.submitted_callback = lambda: self.pool_submitted.set()
|
self.pool.submitted_cb = lambda: self.pool_submitted.set()
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def _push_runs(self):
|
def _push_runs(self):
|
||||||
|
@ -186,14 +178,24 @@ class PrepareStage(TaskObject):
|
||||||
# pending_runs is an empty sequence
|
# pending_runs is an empty sequence
|
||||||
return None
|
return None
|
||||||
if run.due_date is None or run.due_date < now:
|
if run.due_date is None or run.due_date < now:
|
||||||
|
if run.flush:
|
||||||
|
run.status = RunStatus.flushing
|
||||||
|
yield from asyncio_wait_or_cancel(
|
||||||
|
[self.flush_tracker.wait_empty(),
|
||||||
|
run.worker.closed.wait()],
|
||||||
|
return_when=asyncio.FIRST_COMPLETED)
|
||||||
|
if run.worker.closed.is_set():
|
||||||
|
continue
|
||||||
run.status = RunStatus.preparing
|
run.status = RunStatus.preparing
|
||||||
|
self.flush_tracker.add(run.rid)
|
||||||
try:
|
try:
|
||||||
|
yield from run.build()
|
||||||
yield from run.prepare()
|
yield from run.prepare()
|
||||||
except:
|
except:
|
||||||
logger.warning("got worker exception in prepare stage, "
|
logger.warning("got worker exception in prepare stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
run.rid, exc_info=True)
|
run.rid, exc_info=True)
|
||||||
self.deleter.delete(run.rid)
|
self.delete_cb(run.rid)
|
||||||
run.status = RunStatus.prepare_done
|
run.status = RunStatus.prepare_done
|
||||||
yield from self.outq.put(run)
|
yield from self.outq.put(run)
|
||||||
else:
|
else:
|
||||||
|
@ -214,8 +216,8 @@ class PrepareStage(TaskObject):
|
||||||
|
|
||||||
|
|
||||||
class RunStage(TaskObject):
|
class RunStage(TaskObject):
|
||||||
def __init__(self, deleter, inq, outq):
|
def __init__(self, delete_cb, inq, outq):
|
||||||
self.deleter = deleter
|
self.delete_cb = delete_cb
|
||||||
self.inq = inq
|
self.inq = inq
|
||||||
self.outq = outq
|
self.outq = outq
|
||||||
|
|
||||||
|
@ -228,10 +230,9 @@ class RunStage(TaskObject):
|
||||||
next_irun = asyncio_queue_peek(self.inq)
|
next_irun = asyncio_queue_peek(self.inq)
|
||||||
except asyncio.QueueEmpty:
|
except asyncio.QueueEmpty:
|
||||||
next_irun = None
|
next_irun = None
|
||||||
now = time()
|
|
||||||
if not stack or (
|
if not stack or (
|
||||||
next_irun is not None and
|
next_irun is not None and
|
||||||
next_irun.priority_key(now) > stack[-1].priority_key(now)):
|
next_irun.priority_key() > stack[-1].priority_key()):
|
||||||
stack.append((yield from self.inq.get()))
|
stack.append((yield from self.inq.get()))
|
||||||
|
|
||||||
run = stack.pop()
|
run = stack.pop()
|
||||||
|
@ -246,7 +247,7 @@ class RunStage(TaskObject):
|
||||||
logger.warning("got worker exception in run stage, "
|
logger.warning("got worker exception in run stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
run.rid, exc_info=True)
|
run.rid, exc_info=True)
|
||||||
self.deleter.delete(run.rid)
|
self.delete_cb(run.rid)
|
||||||
else:
|
else:
|
||||||
if completed:
|
if completed:
|
||||||
run.status = RunStatus.run_done
|
run.status = RunStatus.run_done
|
||||||
|
@ -257,8 +258,8 @@ class RunStage(TaskObject):
|
||||||
|
|
||||||
|
|
||||||
class AnalyzeStage(TaskObject):
|
class AnalyzeStage(TaskObject):
|
||||||
def __init__(self, deleter, inq):
|
def __init__(self, delete_cb, inq):
|
||||||
self.deleter = deleter
|
self.delete_cb = delete_cb
|
||||||
self.inq = inq
|
self.inq = inq
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
|
@ -273,17 +274,23 @@ class AnalyzeStage(TaskObject):
|
||||||
logger.warning("got worker exception in analyze stage, "
|
logger.warning("got worker exception in analyze stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
run.rid, exc_info=True)
|
run.rid, exc_info=True)
|
||||||
self.deleter.delete(run.rid)
|
self.delete_cb(run.rid)
|
||||||
run.status = RunStatus.analyze_done
|
run.status = RunStatus.analyze_done
|
||||||
self.deleter.delete(run.rid)
|
self.delete_cb(run.rid)
|
||||||
|
|
||||||
|
|
||||||
class Pipeline:
|
class Pipeline:
|
||||||
def __init__(self, ridc, deleter, worker_handlers, notifier):
|
def __init__(self, ridc, deleter, worker_handlers, notifier):
|
||||||
|
flush_tracker = WaitSet()
|
||||||
|
def delete_cb(rid):
|
||||||
|
deleter.delete(rid)
|
||||||
|
flush_tracker.discard(rid)
|
||||||
self.pool = RunPool(ridc, worker_handlers, notifier)
|
self.pool = RunPool(ridc, worker_handlers, notifier)
|
||||||
self._prepare = PrepareStage(deleter, self.pool, asyncio.Queue(maxsize=1))
|
self._prepare = PrepareStage(flush_tracker, delete_cb,
|
||||||
self._run = RunStage(deleter, self._prepare.outq, asyncio.Queue(maxsize=1))
|
self.pool, asyncio.Queue(maxsize=1))
|
||||||
self._analyze = AnalyzeStage(deleter, self._run.outq)
|
self._run = RunStage(delete_cb,
|
||||||
|
self._prepare.outq, asyncio.Queue(maxsize=1))
|
||||||
|
self._analyze = AnalyzeStage(delete_cb, self._run.outq)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self._prepare.start()
|
self._prepare.start()
|
||||||
|
@ -366,7 +373,7 @@ class Scheduler:
|
||||||
if self._pipelines:
|
if self._pipelines:
|
||||||
logger.warning("some pipelines were not garbage-collected")
|
logger.warning("some pipelines were not garbage-collected")
|
||||||
|
|
||||||
def submit(self, pipeline_name, expid, priority, due_date):
|
def submit(self, pipeline_name, expid, priority, due_date, flush):
|
||||||
if self._terminated:
|
if self._terminated:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
|
@ -377,7 +384,7 @@ class Scheduler:
|
||||||
self._worker_handlers, self.notifier)
|
self._worker_handlers, self.notifier)
|
||||||
self._pipelines[pipeline_name] = pipeline
|
self._pipelines[pipeline_name] = pipeline
|
||||||
pipeline.start()
|
pipeline.start()
|
||||||
return pipeline.pool.submit(expid, priority, due_date, pipeline_name)
|
return pipeline.pool.submit(expid, priority, due_date, flush, pipeline_name)
|
||||||
|
|
||||||
def delete(self, rid):
|
def delete(self, rid):
|
||||||
self._deleter.delete(rid)
|
self._deleter.delete(rid)
|
||||||
|
|
|
@ -4,10 +4,11 @@ import logging
|
||||||
import subprocess
|
import subprocess
|
||||||
import traceback
|
import traceback
|
||||||
import time
|
import time
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.language.units import strip_unit
|
from artiq.tools import (asyncio_process_wait_timeout, asyncio_process_wait,
|
||||||
from artiq.tools import asyncio_process_wait_timeout, asyncio_wait_or_cancel
|
asyncio_wait_or_cancel)
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -26,14 +27,9 @@ class WorkerError(Exception):
|
||||||
|
|
||||||
|
|
||||||
class Worker:
|
class Worker:
|
||||||
def __init__(self, handlers,
|
def __init__(self, handlers=dict(), send_timeout=0.5):
|
||||||
send_timeout=0.5, term_timeout=1.0,
|
|
||||||
prepare_timeout=15.0, results_timeout=15.0):
|
|
||||||
self.handlers = handlers
|
self.handlers = handlers
|
||||||
self.send_timeout = send_timeout
|
self.send_timeout = send_timeout
|
||||||
self.term_timeout = term_timeout
|
|
||||||
self.prepare_timeout = prepare_timeout
|
|
||||||
self.results_timeout = results_timeout
|
|
||||||
|
|
||||||
self.rid = None
|
self.rid = None
|
||||||
self.process = None
|
self.process = None
|
||||||
|
@ -49,7 +45,7 @@ class Worker:
|
||||||
avail = set(range(n_user_watchdogs + 1)) \
|
avail = set(range(n_user_watchdogs + 1)) \
|
||||||
- set(self.watchdogs.keys())
|
- set(self.watchdogs.keys())
|
||||||
wid = next(iter(avail))
|
wid = next(iter(avail))
|
||||||
self.watchdogs[wid] = time.monotonic() + strip_unit(t, "s")
|
self.watchdogs[wid] = time.monotonic() + t
|
||||||
return wid
|
return wid
|
||||||
|
|
||||||
def delete_watchdog(self, wid):
|
def delete_watchdog(self, wid):
|
||||||
|
@ -74,7 +70,12 @@ class Worker:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def close(self):
|
def close(self, term_timeout=1.0):
|
||||||
|
"""Interrupts any I/O with the worker process and terminates the
|
||||||
|
worker process.
|
||||||
|
|
||||||
|
This method should always be called by the user to clean up, even if
|
||||||
|
build() or examine() raises an exception."""
|
||||||
self.closed.set()
|
self.closed.set()
|
||||||
yield from self.io_lock.acquire()
|
yield from self.io_lock.acquire()
|
||||||
try:
|
try:
|
||||||
|
@ -83,33 +84,35 @@ class Worker:
|
||||||
logger.debug("worker was not created (RID %s)", self.rid)
|
logger.debug("worker was not created (RID %s)", self.rid)
|
||||||
return
|
return
|
||||||
if self.process.returncode is not None:
|
if self.process.returncode is not None:
|
||||||
logger.debug("worker already terminated (RID %d)", self.rid)
|
logger.debug("worker already terminated (RID %s)", self.rid)
|
||||||
if self.process.returncode != 0:
|
if self.process.returncode != 0:
|
||||||
logger.warning("worker finished with status code %d"
|
logger.warning("worker finished with status code %d"
|
||||||
" (RID %d)", self.process.returncode,
|
" (RID %s)", self.process.returncode,
|
||||||
self.rid)
|
self.rid)
|
||||||
return
|
return
|
||||||
obj = {"action": "terminate"}
|
obj = {"action": "terminate"}
|
||||||
try:
|
try:
|
||||||
yield from self._send(obj, self.send_timeout, cancellable=False)
|
yield from self._send(obj, cancellable=False)
|
||||||
except:
|
except:
|
||||||
logger.warning("failed to send terminate command to worker"
|
logger.warning("failed to send terminate command to worker"
|
||||||
" (RID %d), killing", self.rid, exc_info=True)
|
" (RID %s), killing", self.rid, exc_info=True)
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
|
yield from asyncio_process_wait(self.process)
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
yield from asyncio_process_wait_timeout(self.process,
|
yield from asyncio_process_wait_timeout(self.process,
|
||||||
self.term_timeout)
|
term_timeout)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
logger.warning("worker did not exit (RID %d), killing", self.rid)
|
logger.warning("worker did not exit (RID %s), killing", self.rid)
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
|
yield from asyncio_process_wait(self.process)
|
||||||
else:
|
else:
|
||||||
logger.debug("worker exited gracefully (RID %d)", self.rid)
|
logger.debug("worker exited gracefully (RID %s)", self.rid)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def _send(self, obj, timeout, cancellable=True):
|
def _send(self, obj, cancellable=True):
|
||||||
assert self.io_lock.locked()
|
assert self.io_lock.locked()
|
||||||
line = pyon.encode(obj)
|
line = pyon.encode(obj)
|
||||||
self.process.stdin.write(line.encode())
|
self.process.stdin.write(line.encode())
|
||||||
|
@ -118,7 +121,7 @@ class Worker:
|
||||||
if cancellable:
|
if cancellable:
|
||||||
ifs.append(self.closed.wait())
|
ifs.append(self.closed.wait())
|
||||||
fs = yield from asyncio_wait_or_cancel(
|
fs = yield from asyncio_wait_or_cancel(
|
||||||
ifs, timeout=timeout,
|
ifs, timeout=self.send_timeout,
|
||||||
return_when=asyncio.FIRST_COMPLETED)
|
return_when=asyncio.FIRST_COMPLETED)
|
||||||
if all(f.cancelled() for f in fs):
|
if all(f.cancelled() for f in fs):
|
||||||
raise WorkerTimeout("Timeout sending data to worker")
|
raise WorkerTimeout("Timeout sending data to worker")
|
||||||
|
@ -135,7 +138,7 @@ class Worker:
|
||||||
[self.process.stdout.readline(), self.closed.wait()],
|
[self.process.stdout.readline(), self.closed.wait()],
|
||||||
timeout=timeout, return_when=asyncio.FIRST_COMPLETED)
|
timeout=timeout, return_when=asyncio.FIRST_COMPLETED)
|
||||||
if all(f.cancelled() for f in fs):
|
if all(f.cancelled() for f in fs):
|
||||||
raise WorkerTimeout("Timeout sending data to worker")
|
raise WorkerTimeout("Timeout receiving data from worker")
|
||||||
if self.closed.is_set():
|
if self.closed.is_set():
|
||||||
raise WorkerError("Data transmission to worker cancelled")
|
raise WorkerError("Data transmission to worker cancelled")
|
||||||
line = fs[0].result()
|
line = fs[0].result()
|
||||||
|
@ -168,8 +171,12 @@ class Worker:
|
||||||
func = self.create_watchdog
|
func = self.create_watchdog
|
||||||
elif action == "delete_watchdog":
|
elif action == "delete_watchdog":
|
||||||
func = self.delete_watchdog
|
func = self.delete_watchdog
|
||||||
|
elif action == "register_experiment":
|
||||||
|
func = self.register_experiment
|
||||||
else:
|
else:
|
||||||
func = self.handlers[action]
|
func = self.handlers[action]
|
||||||
|
if getattr(func, "worker_pass_rid", False):
|
||||||
|
func = partial(func, self.rid)
|
||||||
try:
|
try:
|
||||||
data = func(**obj)
|
data = func(**obj)
|
||||||
reply = {"status": "ok", "data": data}
|
reply = {"status": "ok", "data": data}
|
||||||
|
@ -178,7 +185,7 @@ class Worker:
|
||||||
"message": traceback.format_exc()}
|
"message": traceback.format_exc()}
|
||||||
yield from self.io_lock.acquire()
|
yield from self.io_lock.acquire()
|
||||||
try:
|
try:
|
||||||
yield from self._send(reply, self.send_timeout)
|
yield from self._send(reply)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
|
|
||||||
|
@ -189,7 +196,7 @@ class Worker:
|
||||||
try:
|
try:
|
||||||
yield from self.io_lock.acquire()
|
yield from self.io_lock.acquire()
|
||||||
try:
|
try:
|
||||||
yield from self._send(obj, self.send_timeout)
|
yield from self._send(obj)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
try:
|
try:
|
||||||
|
@ -202,16 +209,20 @@ class Worker:
|
||||||
return completed
|
return completed
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def prepare(self, rid, pipeline_name, expid, priority):
|
def build(self, rid, pipeline_name, expid, priority, timeout=15.0):
|
||||||
self.rid = rid
|
self.rid = rid
|
||||||
yield from self._create_process()
|
yield from self._create_process()
|
||||||
yield from self._worker_action(
|
yield from self._worker_action(
|
||||||
{"action": "prepare",
|
{"action": "build",
|
||||||
"rid": rid,
|
"rid": rid,
|
||||||
"pipeline_name": pipeline_name,
|
"pipeline_name": pipeline_name,
|
||||||
"expid": expid,
|
"expid": expid,
|
||||||
"priority": priority},
|
"priority": priority},
|
||||||
self.prepare_timeout)
|
timeout)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def prepare(self):
|
||||||
|
yield from self._worker_action({"action": "prepare"})
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def run(self):
|
def run(self):
|
||||||
|
@ -236,6 +247,18 @@ class Worker:
|
||||||
yield from self._worker_action({"action": "analyze"})
|
yield from self._worker_action({"action": "analyze"})
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def write_results(self):
|
def write_results(self, timeout=15.0):
|
||||||
yield from self._worker_action({"action": "write_results"},
|
yield from self._worker_action({"action": "write_results"},
|
||||||
self.results_timeout)
|
timeout)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def examine(self, file, timeout=20.0):
|
||||||
|
yield from self._create_process()
|
||||||
|
r = dict()
|
||||||
|
def register(class_name, name, arguments):
|
||||||
|
r[class_name] = {"name": name, "arguments": arguments}
|
||||||
|
self.register_experiment = register
|
||||||
|
yield from self._worker_action({"action": "examine",
|
||||||
|
"file": file}, timeout)
|
||||||
|
del self.register_experiment
|
||||||
|
return r
|
||||||
|
|
|
@ -1,76 +1,114 @@
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
import importlib
|
import importlib
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
import re
|
||||||
|
|
||||||
|
import numpy
|
||||||
|
import h5py
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Notifier
|
from artiq.protocols.sync_struct import Notifier
|
||||||
from artiq.protocols.pc_rpc import Client, BestEffortClient
|
from artiq.protocols.pc_rpc import Client, BestEffortClient
|
||||||
from artiq.master.results import result_dict_to_hdf5
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ResultDB:
|
def get_hdf5_output(start_time, rid, name):
|
||||||
def __init__(self, init_rt_results, update_rt_results):
|
dirname = os.path.join("results",
|
||||||
self.init_rt_results = init_rt_results
|
time.strftime("%Y-%m-%d", start_time),
|
||||||
self.update_rt_results = update_rt_results
|
time.strftime("%H-%M", start_time))
|
||||||
self.rtr_description = dict()
|
filename = "{:09}-{}.h5".format(rid, name)
|
||||||
|
os.makedirs(dirname, exist_ok=True)
|
||||||
|
return h5py.File(os.path.join(dirname, filename), "w")
|
||||||
|
|
||||||
def add_rt_results(self, rtr_description):
|
|
||||||
intr = set(self.rtr_description.keys()).intersection(
|
|
||||||
set(rtr_description.keys()))
|
|
||||||
if intr:
|
|
||||||
raise ValueError("Duplicate realtime results: " + ", ".join(intr))
|
|
||||||
self.rtr_description.update(rtr_description)
|
|
||||||
|
|
||||||
def build(self):
|
def get_last_rid():
|
||||||
realtime_results_set = set()
|
r = -1
|
||||||
for rtr in self.rtr_description.keys():
|
try:
|
||||||
if isinstance(rtr, tuple):
|
day_folders = os.listdir("results")
|
||||||
for e in rtr:
|
except:
|
||||||
realtime_results_set.add(e)
|
|
||||||
else:
|
|
||||||
realtime_results_set.add(rtr)
|
|
||||||
|
|
||||||
self.realtime_data = Notifier({x: [] for x in realtime_results_set})
|
|
||||||
self.data = Notifier(dict())
|
|
||||||
|
|
||||||
self.init_rt_results(self.rtr_description)
|
|
||||||
self.realtime_data.publish = lambda notifier, data: \
|
|
||||||
self.update_rt_results(data)
|
|
||||||
|
|
||||||
def _request(self, name):
|
|
||||||
try:
|
|
||||||
return self.realtime_data[name]
|
|
||||||
except KeyError:
|
|
||||||
try:
|
|
||||||
return self.data[name]
|
|
||||||
except KeyError:
|
|
||||||
self.data[name] = []
|
|
||||||
return self.data[name]
|
|
||||||
|
|
||||||
def request(self, name):
|
|
||||||
r = self._request(name)
|
|
||||||
r.kernel_attr_init = False
|
|
||||||
return r
|
return r
|
||||||
|
day_folders = filter(lambda x: re.fullmatch('\d\d\d\d-\d\d-\d\d', x),
|
||||||
|
day_folders)
|
||||||
|
for df in day_folders:
|
||||||
|
day_path = os.path.join("results", df)
|
||||||
|
try:
|
||||||
|
minute_folders = os.listdir(day_path)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
minute_folders = filter(lambda x: re.fullmatch('\d\d-\d\d', x),
|
||||||
|
minute_folders)
|
||||||
|
for mf in minute_folders:
|
||||||
|
minute_path = os.path.join(day_path, mf)
|
||||||
|
try:
|
||||||
|
h5files = os.listdir(minute_path)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
for x in h5files:
|
||||||
|
m = re.fullmatch('(\d\d\d\d\d\d\d\d\d)-.*\.h5', x)
|
||||||
|
rid = int(m.group(1))
|
||||||
|
if rid > r:
|
||||||
|
r = rid
|
||||||
|
return r
|
||||||
|
|
||||||
def set(self, name, value):
|
|
||||||
if name in self.realtime_data.read:
|
_type_to_hdf5 = {
|
||||||
self.realtime_data[name] = value
|
int: h5py.h5t.STD_I64BE,
|
||||||
|
float: h5py.h5t.IEEE_F64BE
|
||||||
|
}
|
||||||
|
|
||||||
|
def result_dict_to_hdf5(f, rd):
|
||||||
|
for name, data in rd.items():
|
||||||
|
if isinstance(data, list):
|
||||||
|
el_ty = type(data[0])
|
||||||
|
for d in data:
|
||||||
|
if type(d) != el_ty:
|
||||||
|
raise TypeError("All list elements must have the same"
|
||||||
|
" type for HDF5 output")
|
||||||
|
try:
|
||||||
|
el_ty_h5 = _type_to_hdf5[el_ty]
|
||||||
|
except KeyError:
|
||||||
|
raise TypeError("List element type {} is not supported for"
|
||||||
|
" HDF5 output".format(el_ty))
|
||||||
|
dataset = f.create_dataset(name, (len(data), ), el_ty_h5)
|
||||||
|
dataset[:] = data
|
||||||
|
elif isinstance(data, numpy.ndarray):
|
||||||
|
f.create_dataset(name, data=data)
|
||||||
else:
|
else:
|
||||||
self.data[name] = value
|
ty = type(data)
|
||||||
|
try:
|
||||||
|
ty_h5 = _type_to_hdf5[ty]
|
||||||
|
except KeyError:
|
||||||
|
raise TypeError("Type {} is not supported for HDF5 output"
|
||||||
|
.format(ty))
|
||||||
|
dataset = f.create_dataset(name, (), ty_h5)
|
||||||
|
dataset[()] = data
|
||||||
|
|
||||||
|
|
||||||
|
class ResultDB:
|
||||||
|
def __init__(self):
|
||||||
|
self.rt = Notifier(dict())
|
||||||
|
self.nrt = dict()
|
||||||
|
|
||||||
|
def get(self, key):
|
||||||
|
try:
|
||||||
|
return self.nrt[key]
|
||||||
|
except KeyError:
|
||||||
|
return self.rt[key].read
|
||||||
|
|
||||||
def write_hdf5(self, f):
|
def write_hdf5(self, f):
|
||||||
result_dict_to_hdf5(f, self.realtime_data.read)
|
result_dict_to_hdf5(f, self.rt.read)
|
||||||
result_dict_to_hdf5(f, self.data.read)
|
result_dict_to_hdf5(f, self.nrt)
|
||||||
|
|
||||||
|
|
||||||
def _create_device(desc, dbh):
|
def _create_device(desc, dmgr):
|
||||||
ty = desc["type"]
|
ty = desc["type"]
|
||||||
if ty == "local":
|
if ty == "local":
|
||||||
module = importlib.import_module(desc["module"])
|
module = importlib.import_module(desc["module"])
|
||||||
device_class = getattr(module, desc["class"])
|
device_class = getattr(module, desc["class"])
|
||||||
return device_class(dbh, **desc["arguments"])
|
return device_class(dmgr, **desc["arguments"])
|
||||||
elif ty == "controller":
|
elif ty == "controller":
|
||||||
if desc["best_effort"]:
|
if desc["best_effort"]:
|
||||||
cl = BestEffortClient
|
cl = BestEffortClient
|
||||||
|
@ -81,30 +119,26 @@ def _create_device(desc, dbh):
|
||||||
raise ValueError("Unsupported type in device DB: " + ty)
|
raise ValueError("Unsupported type in device DB: " + ty)
|
||||||
|
|
||||||
|
|
||||||
class DBHub:
|
class DeviceManager:
|
||||||
"""Connects device, parameter and result databases to experiment.
|
"""Handles creation and destruction of local device drivers and controller
|
||||||
Handle device driver creation and destruction.
|
RPC clients."""
|
||||||
"""
|
def __init__(self, ddb, virtual_devices=dict()):
|
||||||
def __init__(self, ddb, pdb, rdb, read_only=False):
|
|
||||||
self.ddb = ddb
|
self.ddb = ddb
|
||||||
|
self.virtual_devices = virtual_devices
|
||||||
self.active_devices = OrderedDict()
|
self.active_devices = OrderedDict()
|
||||||
|
|
||||||
self.get_parameter = pdb.request
|
def get(self, name):
|
||||||
|
"""Get the device driver or controller client corresponding to a
|
||||||
if not read_only:
|
device database entry."""
|
||||||
self.set_parameter = pdb.set
|
if name in self.virtual_devices:
|
||||||
self.add_rt_results = rdb.add_rt_results
|
return self.virtual_devices[name]
|
||||||
self.get_result = rdb.request
|
|
||||||
self.set_result = rdb.set
|
|
||||||
|
|
||||||
def get_device(self, name):
|
|
||||||
if name in self.active_devices:
|
if name in self.active_devices:
|
||||||
return self.active_devices[name]
|
return self.active_devices[name]
|
||||||
else:
|
else:
|
||||||
desc = self.ddb.request(name)
|
desc = self.ddb.get(name)
|
||||||
while isinstance(desc, str):
|
while isinstance(desc, str):
|
||||||
# alias
|
# alias
|
||||||
desc = self.ddb.request(desc)
|
desc = self.ddb.get(desc)
|
||||||
dev = _create_device(desc, self)
|
dev = _create_device(desc, self)
|
||||||
self.active_devices[name] = dev
|
self.active_devices[name] = dev
|
||||||
return dev
|
return dev
|
||||||
|
|
|
@ -3,9 +3,8 @@ import time
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.tools import file_import
|
from artiq.tools import file_import
|
||||||
from artiq.master.worker_db import DBHub, ResultDB
|
from artiq.master.worker_db import DeviceManager, ResultDB, get_hdf5_output
|
||||||
from artiq.master.results import get_hdf5_output
|
from artiq.language.environment import is_experiment
|
||||||
from artiq.language.experiment import is_experiment
|
|
||||||
from artiq.language.core import set_watchdog_factory
|
from artiq.language.core import set_watchdog_factory
|
||||||
|
|
||||||
|
|
||||||
|
@ -45,16 +44,34 @@ def make_parent_action(action, argnames, exception=ParentActionError):
|
||||||
return parent_action
|
return parent_action
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class LogForwarder:
|
||||||
|
def __init__(self):
|
||||||
|
self.buffer = ""
|
||||||
|
|
||||||
|
to_parent = staticmethod(make_parent_action("log", "message"))
|
||||||
|
|
||||||
|
def write(self, data):
|
||||||
|
self.buffer += data
|
||||||
|
while "\n" in self.buffer:
|
||||||
|
i = self.buffer.index("\n")
|
||||||
|
self.to_parent(self.buffer[:i])
|
||||||
|
self.buffer = self.buffer[i+1:]
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ParentDDB:
|
class ParentDDB:
|
||||||
request = make_parent_action("req_device", "name", KeyError)
|
get = make_parent_action("get_device", "name", KeyError)
|
||||||
|
|
||||||
|
|
||||||
class ParentPDB:
|
class ParentPDB:
|
||||||
request = make_parent_action("req_parameter", "name", KeyError)
|
get = make_parent_action("get_parameter", "name", KeyError)
|
||||||
set = make_parent_action("set_parameter", "name value")
|
set = make_parent_action("set_parameter", "name value")
|
||||||
|
|
||||||
|
|
||||||
init_rt_results = make_parent_action("init_rt_results", "description")
|
|
||||||
update_rt_results = make_parent_action("update_rt_results", "mod")
|
update_rt_results = make_parent_action("update_rt_results", "mod")
|
||||||
|
|
||||||
|
|
||||||
|
@ -79,18 +96,18 @@ class Scheduler:
|
||||||
pause = staticmethod(make_parent_action("pause", ""))
|
pause = staticmethod(make_parent_action("pause", ""))
|
||||||
|
|
||||||
submit = staticmethod(make_parent_action("scheduler_submit",
|
submit = staticmethod(make_parent_action("scheduler_submit",
|
||||||
"pipeline_name expid priority due_date"))
|
"pipeline_name expid priority due_date flush"))
|
||||||
cancel = staticmethod(make_parent_action("scheduler_cancel", "rid"))
|
cancel = staticmethod(make_parent_action("scheduler_cancel", "rid"))
|
||||||
|
|
||||||
def __init__(self, pipeline_name, expid, priority):
|
def set_run_info(self, pipeline_name, expid, priority):
|
||||||
self.pipeline_name = pipeline_name
|
self.pipeline_name = pipeline_name
|
||||||
self.expid = expid
|
self.expid = expid
|
||||||
self.priority = priority
|
self.priority = priority
|
||||||
|
|
||||||
|
|
||||||
def get_exp(file, exp):
|
def get_exp(file, class_name):
|
||||||
module = file_import(file)
|
module = file_import(file)
|
||||||
if exp is None:
|
if class_name is None:
|
||||||
exps = [v for k, v in module.__dict__.items()
|
exps = [v for k, v in module.__dict__.items()
|
||||||
if is_experiment(v)]
|
if is_experiment(v)]
|
||||||
if len(exps) != 1:
|
if len(exps) != 1:
|
||||||
|
@ -98,11 +115,44 @@ def get_exp(file, exp):
|
||||||
.format(len(exps)))
|
.format(len(exps)))
|
||||||
return exps[0]
|
return exps[0]
|
||||||
else:
|
else:
|
||||||
return getattr(module, exp)
|
return getattr(module, class_name)
|
||||||
|
|
||||||
|
|
||||||
|
register_experiment = make_parent_action("register_experiment",
|
||||||
|
"class_name name arguments")
|
||||||
|
|
||||||
|
|
||||||
|
class DummyDMGR:
|
||||||
|
def get(self, name):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class DummyPDB:
|
||||||
|
def get(self, name):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def set(self, name, value):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def examine(dmgr, pdb, rdb, file):
|
||||||
|
module = file_import(file)
|
||||||
|
for class_name, exp_class in module.__dict__.items():
|
||||||
|
if is_experiment(exp_class):
|
||||||
|
if exp_class.__doc__ is None:
|
||||||
|
name = class_name
|
||||||
|
else:
|
||||||
|
name = exp_class.__doc__.splitlines()[0].strip()
|
||||||
|
if name[-1] == ".":
|
||||||
|
name = name[:-1]
|
||||||
|
exp_inst = exp_class(dmgr, pdb, rdb, default_arg_none=True)
|
||||||
|
arguments = [(k, v.describe())
|
||||||
|
for k, v in exp_inst.requested_args.items()]
|
||||||
|
register_experiment(class_name, name, arguments)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
sys.stdout = sys.stderr
|
sys.stdout = sys.stderr = LogForwarder()
|
||||||
|
|
||||||
start_time = None
|
start_time = None
|
||||||
rid = None
|
rid = None
|
||||||
|
@ -110,26 +160,27 @@ def main():
|
||||||
exp = None
|
exp = None
|
||||||
exp_inst = None
|
exp_inst = None
|
||||||
|
|
||||||
rdb = ResultDB(init_rt_results, update_rt_results)
|
dmgr = DeviceManager(ParentDDB,
|
||||||
dbh = DBHub(ParentDDB, ParentPDB, rdb)
|
virtual_devices={"scheduler": Scheduler()})
|
||||||
|
rdb = ResultDB()
|
||||||
|
rdb.rt.publish = update_rt_results
|
||||||
|
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
obj = get_object()
|
obj = get_object()
|
||||||
action = obj["action"]
|
action = obj["action"]
|
||||||
if action == "prepare":
|
if action == "build":
|
||||||
start_time = time.localtime()
|
start_time = time.localtime()
|
||||||
rid = obj["rid"]
|
rid = obj["rid"]
|
||||||
pipeline_name = obj["pipeline_name"]
|
|
||||||
expid = obj["expid"]
|
expid = obj["expid"]
|
||||||
priority = obj["priority"]
|
exp = get_exp(expid["file"], expid["class_name"])
|
||||||
exp = get_exp(expid["file"], expid["experiment"])
|
dmgr.virtual_devices["scheduler"].set_run_info(
|
||||||
exp_inst = exp(dbh,
|
obj["pipeline_name"], expid, obj["priority"])
|
||||||
scheduler=Scheduler(pipeline_name,
|
exp_inst = exp(dmgr, ParentPDB, rdb,
|
||||||
expid,
|
**expid["arguments"])
|
||||||
priority),
|
put_object({"action": "completed"})
|
||||||
**expid["arguments"])
|
elif action == "prepare":
|
||||||
rdb.build()
|
exp_inst.prepare()
|
||||||
put_object({"action": "completed"})
|
put_object({"action": "completed"})
|
||||||
elif action == "run":
|
elif action == "run":
|
||||||
exp_inst.run()
|
exp_inst.run()
|
||||||
|
@ -144,10 +195,13 @@ def main():
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
put_object({"action": "completed"})
|
put_object({"action": "completed"})
|
||||||
|
elif action == "examine":
|
||||||
|
examine(DummyDMGR(), DummyPDB(), ResultDB(), obj["file"])
|
||||||
|
put_object({"action": "completed"})
|
||||||
elif action == "terminate":
|
elif action == "terminate":
|
||||||
break
|
break
|
||||||
finally:
|
finally:
|
||||||
dbh.close_devices()
|
dmgr.close_devices()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -20,7 +20,7 @@ class FlatFileDB:
|
||||||
def save(self):
|
def save(self):
|
||||||
pyon.store_file(self.filename, self.data.read)
|
pyon.store_file(self.filename, self.data.read)
|
||||||
|
|
||||||
def request(self, name):
|
def get(self, name):
|
||||||
return self.data.read[name]
|
return self.data.read[name]
|
||||||
|
|
||||||
def set(self, name, value):
|
def set(self, name, value):
|
||||||
|
@ -36,19 +36,3 @@ class FlatFileDB:
|
||||||
timestamp = time()
|
timestamp = time()
|
||||||
for hook in self.hooks:
|
for hook in self.hooks:
|
||||||
hook.delete(timestamp, name)
|
hook.delete(timestamp, name)
|
||||||
|
|
||||||
|
|
||||||
class SimpleHistory:
|
|
||||||
def __init__(self, depth):
|
|
||||||
self.depth = depth
|
|
||||||
self.history = Notifier([])
|
|
||||||
|
|
||||||
def set(self, timestamp, name, value):
|
|
||||||
if len(self.history.read) >= self.depth:
|
|
||||||
del self.history[0]
|
|
||||||
self.history.append((timestamp, name, value))
|
|
||||||
|
|
||||||
def delete(self, timestamp, name):
|
|
||||||
if len(self.history.read) >= self.depth:
|
|
||||||
del self.history[0]
|
|
||||||
self.history.append((timestamp, name))
|
|
||||||
|
|
|
@ -0,0 +1,45 @@
|
||||||
|
import threading
|
||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class FFProxy:
|
||||||
|
"""Proxies a target object and runs its methods in the background.
|
||||||
|
|
||||||
|
All method calls to this object are forwarded to the target and executed
|
||||||
|
in a background thread. Method calls return immediately. Exceptions from
|
||||||
|
the target method are turned into warnings. At most one method from the
|
||||||
|
target object may be executed in the background; if a new call is
|
||||||
|
submitted while the previous one is still executing, a warning is printed
|
||||||
|
and the new call is dropped.
|
||||||
|
|
||||||
|
This feature is typically used to wrap slow and non-critical RPCs in
|
||||||
|
experiments.
|
||||||
|
"""
|
||||||
|
def __init__(self, target):
|
||||||
|
self.target = target
|
||||||
|
self._thread = None
|
||||||
|
|
||||||
|
def ff_join(self):
|
||||||
|
"""Waits until any background method finishes its execution."""
|
||||||
|
if self._thread is not None:
|
||||||
|
self._thread.join()
|
||||||
|
|
||||||
|
def __getattr__(self, k):
|
||||||
|
def run_in_thread(*args, **kwargs):
|
||||||
|
if self._thread is not None and self._thread.is_alive():
|
||||||
|
logger.warning("skipping fire-and-forget call to %r.%s as "
|
||||||
|
"previous call did not complete",
|
||||||
|
self.target, k)
|
||||||
|
return
|
||||||
|
def thread_body():
|
||||||
|
try:
|
||||||
|
getattr(self.target, k)(*args, **kwargs)
|
||||||
|
except:
|
||||||
|
logger.warning("fire-and-forget call to %r.%s raised an "
|
||||||
|
"exception:", self.target, k, exc_info=True)
|
||||||
|
self._thread = threading.Thread(target=thread_body)
|
||||||
|
self._thread.start()
|
||||||
|
return run_in_thread
|
|
@ -22,7 +22,6 @@ import inspect
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.protocols.asyncio_server import AsyncioServer as _AsyncioServer
|
from artiq.protocols.asyncio_server import AsyncioServer as _AsyncioServer
|
||||||
from artiq.tools import format_arguments
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -246,7 +245,8 @@ class AsyncioClient:
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def proxy(*args, **kwargs):
|
def proxy(*args, **kwargs):
|
||||||
return self.__do_rpc(name, args, kwargs)
|
res = yield from self.__do_rpc(name, args, kwargs)
|
||||||
|
return res
|
||||||
return proxy
|
return proxy
|
||||||
|
|
||||||
|
|
||||||
|
@ -374,6 +374,16 @@ class BestEffortClient:
|
||||||
return proxy
|
return proxy
|
||||||
|
|
||||||
|
|
||||||
|
def _format_arguments(arguments):
|
||||||
|
fmtargs = []
|
||||||
|
for k, v in sorted(arguments.items(), key=itemgetter(0)):
|
||||||
|
fmtargs.append(k + "=" + repr(v))
|
||||||
|
if fmtargs:
|
||||||
|
return ", ".join(fmtargs)
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
class _PrettyPrintCall:
|
class _PrettyPrintCall:
|
||||||
def __init__(self, obj):
|
def __init__(self, obj):
|
||||||
self.obj = obj
|
self.obj = obj
|
||||||
|
@ -382,7 +392,7 @@ class _PrettyPrintCall:
|
||||||
r = self.obj["name"] + "("
|
r = self.obj["name"] + "("
|
||||||
args = ", ".join([repr(a) for a in self.obj["args"]])
|
args = ", ".join([repr(a) for a in self.obj["args"]])
|
||||||
r += args
|
r += args
|
||||||
kwargs = format_arguments(self.obj["kwargs"])
|
kwargs = _format_arguments(self.obj["kwargs"])
|
||||||
if args and kwargs:
|
if args and kwargs:
|
||||||
r += ", "
|
r += ", "
|
||||||
r += kwargs
|
r += kwargs
|
||||||
|
@ -442,15 +452,18 @@ class Server(_AsyncioServer):
|
||||||
try:
|
try:
|
||||||
if obj["action"] == "get_rpc_method_list":
|
if obj["action"] == "get_rpc_method_list":
|
||||||
members = inspect.getmembers(target, inspect.ismethod)
|
members = inspect.getmembers(target, inspect.ismethod)
|
||||||
methods = {}
|
doc = {
|
||||||
|
"docstring": inspect.getdoc(target),
|
||||||
|
"methods": {}
|
||||||
|
}
|
||||||
for name, method in members:
|
for name, method in members:
|
||||||
if name.startswith("_"):
|
if name.startswith("_"):
|
||||||
continue
|
continue
|
||||||
method = getattr(target, name)
|
method = getattr(target, name)
|
||||||
argspec = inspect.getfullargspec(method)
|
argspec = inspect.getfullargspec(method)
|
||||||
methods[name] = (dict(argspec.__dict__),
|
doc["methods"][name] = (dict(argspec.__dict__),
|
||||||
inspect.getdoc(method))
|
inspect.getdoc(method))
|
||||||
obj = {"status": "ok", "ret": methods}
|
obj = {"status": "ok", "ret": doc}
|
||||||
elif obj["action"] == "call":
|
elif obj["action"] == "call":
|
||||||
logger.debug("calling %s", _PrettyPrintCall(obj))
|
logger.debug("calling %s", _PrettyPrintCall(obj))
|
||||||
method = getattr(target, obj["name"])
|
method = getattr(target, obj["name"])
|
||||||
|
|
|
@ -14,7 +14,6 @@ The main rationale for this new custom serializer (instead of using JSON) is
|
||||||
that JSON does not support Numpy and more generally cannot be extended with
|
that JSON does not support Numpy and more generally cannot be extended with
|
||||||
other data types while keeping a concise syntax. Here we can use the Python
|
other data types while keeping a concise syntax. Here we can use the Python
|
||||||
function call syntax to mark special data types.
|
function call syntax to mark special data types.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@ -25,8 +24,6 @@ import tempfile
|
||||||
|
|
||||||
import numpy
|
import numpy
|
||||||
|
|
||||||
from artiq.language.units import Quantity
|
|
||||||
|
|
||||||
|
|
||||||
_encode_map = {
|
_encode_map = {
|
||||||
type(None): "none",
|
type(None): "none",
|
||||||
|
@ -39,7 +36,6 @@ _encode_map = {
|
||||||
list: "list",
|
list: "list",
|
||||||
dict: "dict",
|
dict: "dict",
|
||||||
Fraction: "fraction",
|
Fraction: "fraction",
|
||||||
Quantity: "quantity",
|
|
||||||
numpy.ndarray: "nparray"
|
numpy.ndarray: "nparray"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,10 +106,6 @@ class _Encoder:
|
||||||
return "Fraction({}, {})".format(encode(x.numerator),
|
return "Fraction({}, {})".format(encode(x.numerator),
|
||||||
encode(x.denominator))
|
encode(x.denominator))
|
||||||
|
|
||||||
def encode_quantity(self, x):
|
|
||||||
return "Quantity({}, {})".format(encode(x.amount),
|
|
||||||
encode(x.unit))
|
|
||||||
|
|
||||||
def encode_nparray(self, x):
|
def encode_nparray(self, x):
|
||||||
r = "nparray("
|
r = "nparray("
|
||||||
r += encode(x.shape) + ", "
|
r += encode(x.shape) + ", "
|
||||||
|
@ -147,7 +139,6 @@ _eval_dict = {
|
||||||
"true": True,
|
"true": True,
|
||||||
|
|
||||||
"Fraction": Fraction,
|
"Fraction": Fraction,
|
||||||
"Quantity": Quantity,
|
|
||||||
"nparray": _nparray
|
"nparray": _nparray
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,11 +8,11 @@ describing each modification made to the structure (*mods*).
|
||||||
|
|
||||||
Structures must be PYON serializable and contain only lists, dicts, and
|
Structures must be PYON serializable and contain only lists, dicts, and
|
||||||
immutable types. Lists and dicts can be nested arbitrarily.
|
immutable types. Lists and dicts can be nested arbitrarily.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from operator import getitem
|
from operator import getitem
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.protocols.asyncio_server import AsyncioServer
|
from artiq.protocols.asyncio_server import AsyncioServer
|
||||||
|
@ -22,9 +22,7 @@ _init_string = b"ARTIQ sync_struct\n"
|
||||||
|
|
||||||
|
|
||||||
def process_mod(target, mod):
|
def process_mod(target, mod):
|
||||||
"""Apply a *mod* to the target, mutating it.
|
"""Apply a *mod* to the target, mutating it."""
|
||||||
|
|
||||||
"""
|
|
||||||
for key in mod["path"]:
|
for key in mod["path"]:
|
||||||
target = getitem(target, key)
|
target = getitem(target, key)
|
||||||
action = mod["action"]
|
action = mod["action"]
|
||||||
|
@ -52,8 +50,8 @@ class Subscriber:
|
||||||
Multiple functions can be specified in a list for the ``Subscriber``
|
Multiple functions can be specified in a list for the ``Subscriber``
|
||||||
to update several local objects simultaneously.
|
to update several local objects simultaneously.
|
||||||
:param notify_cb: An optional function called every time a mod is received
|
:param notify_cb: An optional function called every time a mod is received
|
||||||
from the publisher. The mod is passed as parameter.
|
from the publisher. The mod is passed as parameter. The function is
|
||||||
|
called after the mod has been processed.
|
||||||
"""
|
"""
|
||||||
def __init__(self, notifier_name, target_builder, notify_cb=None):
|
def __init__(self, notifier_name, target_builder, notify_cb=None):
|
||||||
self.notifier_name = notifier_name
|
self.notifier_name = notifier_name
|
||||||
|
@ -133,7 +131,6 @@ class Notifier:
|
||||||
|
|
||||||
:param backing_struct: Structure to encapsulate. For convenience, it
|
:param backing_struct: Structure to encapsulate. For convenience, it
|
||||||
also becomes available as the ``read`` property of the ``Notifier``.
|
also becomes available as the ``read`` property of the ``Notifier``.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, backing_struct, root=None, path=[]):
|
def __init__(self, backing_struct, root=None, path=[]):
|
||||||
self.read = backing_struct
|
self.read = backing_struct
|
||||||
|
@ -149,52 +146,46 @@ class Notifier:
|
||||||
# All modifications must go through them!
|
# All modifications must go through them!
|
||||||
|
|
||||||
def append(self, x):
|
def append(self, x):
|
||||||
"""Append to a list.
|
"""Append to a list."""
|
||||||
|
|
||||||
"""
|
|
||||||
self._backing_struct.append(x)
|
self._backing_struct.append(x)
|
||||||
if self.root.publish is not None:
|
if self.root.publish is not None:
|
||||||
self.root.publish(self.root, {"action": "append",
|
self.root.publish({"action": "append",
|
||||||
"path": self._path,
|
"path": self._path,
|
||||||
"x": x})
|
"x": x})
|
||||||
|
|
||||||
def insert(self, i, x):
|
def insert(self, i, x):
|
||||||
"""Insert an element into a list.
|
"""Insert an element into a list."""
|
||||||
|
|
||||||
"""
|
|
||||||
self._backing_struct.insert(i, x)
|
self._backing_struct.insert(i, x)
|
||||||
if self.root.publish is not None:
|
if self.root.publish is not None:
|
||||||
self.root.publish(self.root, {"action": "insert",
|
self.root.publish({"action": "insert",
|
||||||
"path": self._path,
|
"path": self._path,
|
||||||
"i": i, "x": x})
|
"i": i, "x": x})
|
||||||
|
|
||||||
def pop(self, i=-1):
|
def pop(self, i=-1):
|
||||||
"""Pop an element from a list. The returned element is not
|
"""Pop an element from a list. The returned element is not
|
||||||
encapsulated in a ``Notifier`` and its mutations are no longer
|
encapsulated in a ``Notifier`` and its mutations are no longer
|
||||||
tracked.
|
tracked."""
|
||||||
|
|
||||||
"""
|
|
||||||
r = self._backing_struct.pop(i)
|
r = self._backing_struct.pop(i)
|
||||||
if self.root.publish is not None:
|
if self.root.publish is not None:
|
||||||
self.root.publish(self.root, {"action": "pop",
|
self.root.publish({"action": "pop",
|
||||||
"path": self._path,
|
"path": self._path,
|
||||||
"i": i})
|
"i": i})
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
self._backing_struct.__setitem__(key, value)
|
self._backing_struct.__setitem__(key, value)
|
||||||
if self.root.publish is not None:
|
if self.root.publish is not None:
|
||||||
self.root.publish(self.root, {"action": "setitem",
|
self.root.publish({"action": "setitem",
|
||||||
"path": self._path,
|
"path": self._path,
|
||||||
"key": key,
|
"key": key,
|
||||||
"value": value})
|
"value": value})
|
||||||
|
|
||||||
def __delitem__(self, key):
|
def __delitem__(self, key):
|
||||||
self._backing_struct.__delitem__(key)
|
self._backing_struct.__delitem__(key)
|
||||||
if self.root.publish is not None:
|
if self.root.publish is not None:
|
||||||
self.root.publish(self.root, {"action": "delitem",
|
self.root.publish({"action": "delitem",
|
||||||
"path": self._path,
|
"path": self._path,
|
||||||
"key": key})
|
"key": key})
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
item = getitem(self._backing_struct, key)
|
item = getitem(self._backing_struct, key)
|
||||||
|
@ -208,7 +199,6 @@ class Publisher(AsyncioServer):
|
||||||
:param notifiers: A dictionary containing the notifiers to associate with
|
:param notifiers: A dictionary containing the notifiers to associate with
|
||||||
the ``Publisher``. The keys of the dictionary are the names of the
|
the ``Publisher``. The keys of the dictionary are the names of the
|
||||||
notifiers to be used with ``Subscriber``.
|
notifiers to be used with ``Subscriber``.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, notifiers):
|
def __init__(self, notifiers):
|
||||||
AsyncioServer.__init__(self)
|
AsyncioServer.__init__(self)
|
||||||
|
@ -217,7 +207,7 @@ class Publisher(AsyncioServer):
|
||||||
self._notifier_names = {id(v): k for k, v in notifiers.items()}
|
self._notifier_names = {id(v): k for k, v in notifiers.items()}
|
||||||
|
|
||||||
for notifier in notifiers.values():
|
for notifier in notifiers.values():
|
||||||
notifier.publish = self.publish
|
notifier.publish = partial(self.publish, notifier)
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def _handle_connection_cr(self, reader, writer):
|
def _handle_connection_cr(self, reader, writer):
|
||||||
|
@ -256,8 +246,8 @@ class Publisher(AsyncioServer):
|
||||||
finally:
|
finally:
|
||||||
writer.close()
|
writer.close()
|
||||||
|
|
||||||
def publish(self, notifier, obj):
|
def publish(self, notifier, mod):
|
||||||
line = pyon.encode(obj) + "\n"
|
line = pyon.encode(mod) + "\n"
|
||||||
line = line.encode()
|
line = line.encode()
|
||||||
notifier_name = self._notifier_names[id(notifier)]
|
notifier_name = self._notifier_names[id(notifier)]
|
||||||
for recipient in self._recipients[notifier_name]:
|
for recipient in self._recipients[notifier_name]:
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from artiq.py2llvm.module import Module
|
from artiq.py2llvm.module import Module
|
||||||
|
|
||||||
def get_runtime_binary(env, func_def):
|
def get_runtime_binary(runtime, func_def):
|
||||||
module = Module(env)
|
module = Module(runtime)
|
||||||
module.compile_function(func_def, dict())
|
module.compile_function(func_def, dict())
|
||||||
return module.emit_object()
|
return module.emit_object()
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from pythonparser import ast
|
from pythonparser import ast
|
||||||
|
|
||||||
import llvmlite.ir as ll
|
import llvmlite_or1k.ir as ll
|
||||||
|
|
||||||
from artiq.py2llvm import values, base_types, fractions, lists, iterators
|
from artiq.py2llvm import values, base_types, fractions, lists, iterators
|
||||||
from artiq.py2llvm.tools import is_terminated
|
from artiq.py2llvm.tools import is_terminated
|
||||||
|
@ -39,8 +39,8 @@ _ast_cmps = {
|
||||||
|
|
||||||
|
|
||||||
class Visitor:
|
class Visitor:
|
||||||
def __init__(self, env, ns, builder=None):
|
def __init__(self, runtime, ns, builder=None):
|
||||||
self.env = env
|
self.runtime = runtime
|
||||||
self.ns = ns
|
self.ns = ns
|
||||||
self.builder = builder
|
self.builder = builder
|
||||||
self._break_stack = []
|
self._break_stack = []
|
||||||
|
@ -182,7 +182,7 @@ class Visitor:
|
||||||
self.builder,
|
self.builder,
|
||||||
[self.visit_expression(arg) for arg in node.args])
|
[self.visit_expression(arg) for arg in node.args])
|
||||||
elif fn == "syscall":
|
elif fn == "syscall":
|
||||||
return self.env.build_syscall(
|
return self.runtime.build_syscall(
|
||||||
node.args[0].s,
|
node.args[0].s,
|
||||||
[self.visit_expression(expr) for expr in node.args[1:]],
|
[self.visit_expression(expr) for expr in node.args[1:]],
|
||||||
self.builder)
|
self.builder)
|
||||||
|
@ -420,7 +420,7 @@ class Visitor:
|
||||||
def _break_loop_body(self, target_block):
|
def _break_loop_body(self, target_block):
|
||||||
exception_levels = self._exception_level_stack[-1]
|
exception_levels = self._exception_level_stack[-1]
|
||||||
if exception_levels:
|
if exception_levels:
|
||||||
self.env.build_pop(self.builder, exception_levels)
|
self.runtime.build_pop(self.builder, exception_levels)
|
||||||
self.builder.branch(target_block)
|
self.builder.branch(target_block)
|
||||||
|
|
||||||
def _visit_stmt_Break(self, node):
|
def _visit_stmt_Break(self, node):
|
||||||
|
@ -436,7 +436,7 @@ class Visitor:
|
||||||
val = self.visit_expression(node.value)
|
val = self.visit_expression(node.value)
|
||||||
exception_levels = sum(self._exception_level_stack)
|
exception_levels = sum(self._exception_level_stack)
|
||||||
if exception_levels:
|
if exception_levels:
|
||||||
self.env.build_pop(self.builder, exception_levels)
|
self.runtime.build_pop(self.builder, exception_levels)
|
||||||
if isinstance(val, base_types.VNone):
|
if isinstance(val, base_types.VNone):
|
||||||
self.builder.ret_void()
|
self.builder.ret_void()
|
||||||
else:
|
else:
|
||||||
|
@ -456,11 +456,11 @@ class Visitor:
|
||||||
self.builder.branch(finally_block)
|
self.builder.branch(finally_block)
|
||||||
else:
|
else:
|
||||||
eid = ll.Constant(ll.IntType(32), node.exc.args[0].n)
|
eid = ll.Constant(ll.IntType(32), node.exc.args[0].n)
|
||||||
self.env.build_raise(self.builder, eid)
|
self.runtime.build_raise(self.builder, eid)
|
||||||
|
|
||||||
def _handle_exception(self, function, finally_block,
|
def _handle_exception(self, function, finally_block,
|
||||||
propagate, propagate_eid, handlers):
|
propagate, propagate_eid, handlers):
|
||||||
eid = self.env.build_getid(self.builder)
|
eid = self.runtime.build_getid(self.builder)
|
||||||
self._active_exception_stack.append(
|
self._active_exception_stack.append(
|
||||||
(finally_block, propagate, propagate_eid))
|
(finally_block, propagate, propagate_eid))
|
||||||
self.builder.store(ll.Constant(ll.IntType(1), 1), propagate)
|
self.builder.store(ll.Constant(ll.IntType(1), 1), propagate)
|
||||||
|
@ -509,7 +509,7 @@ class Visitor:
|
||||||
self.builder.store(ll.Constant(ll.IntType(1), 0), propagate)
|
self.builder.store(ll.Constant(ll.IntType(1), 0), propagate)
|
||||||
propagate_eid = self.builder.alloca(ll.IntType(32),
|
propagate_eid = self.builder.alloca(ll.IntType(32),
|
||||||
name="propagate_eid")
|
name="propagate_eid")
|
||||||
exception_occured = self.env.build_catch(self.builder)
|
exception_occured = self.runtime.build_catch(self.builder)
|
||||||
self.builder.cbranch(exception_occured, exc_block, noexc_block)
|
self.builder.cbranch(exception_occured, exc_block, noexc_block)
|
||||||
|
|
||||||
self.builder.position_at_end(noexc_block)
|
self.builder.position_at_end(noexc_block)
|
||||||
|
@ -517,7 +517,7 @@ class Visitor:
|
||||||
self.visit_statements(node.body)
|
self.visit_statements(node.body)
|
||||||
self._exception_level_stack[-1] -= 1
|
self._exception_level_stack[-1] -= 1
|
||||||
if not self._bb_terminated():
|
if not self._bb_terminated():
|
||||||
self.env.build_pop(self.builder, 1)
|
self.runtime.build_pop(self.builder, 1)
|
||||||
self.visit_statements(node.orelse)
|
self.visit_statements(node.orelse)
|
||||||
if not self._bb_terminated():
|
if not self._bb_terminated():
|
||||||
self.builder.branch(finally_block)
|
self.builder.branch(finally_block)
|
||||||
|
@ -534,6 +534,6 @@ class Visitor:
|
||||||
self.builder.load(propagate),
|
self.builder.load(propagate),
|
||||||
propagate_block, merge_block)
|
propagate_block, merge_block)
|
||||||
self.builder.position_at_end(propagate_block)
|
self.builder.position_at_end(propagate_block)
|
||||||
self.env.build_raise(self.builder, self.builder.load(propagate_eid))
|
self.runtime.build_raise(self.builder, self.builder.load(propagate_eid))
|
||||||
self.builder.branch(merge_block)
|
self.builder.branch(merge_block)
|
||||||
self.builder.position_at_end(merge_block)
|
self.builder.position_at_end(merge_block)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import llvmlite.ir as ll
|
import llvmlite_or1k.ir as ll
|
||||||
|
|
||||||
from artiq.py2llvm.values import VGeneric
|
from artiq.py2llvm.values import VGeneric
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import inspect
|
import inspect
|
||||||
from pythonparser import parse, ast
|
from pythonparser import parse, ast
|
||||||
|
|
||||||
import llvmlite.ir as ll
|
import llvmlite_or1k.ir as ll
|
||||||
|
|
||||||
from artiq.py2llvm.values import VGeneric, operators
|
from artiq.py2llvm.values import VGeneric, operators
|
||||||
from artiq.py2llvm.base_types import VBool, VInt, VFloat
|
from artiq.py2llvm.base_types import VBool, VInt, VFloat
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import llvmlite.ir as ll
|
import llvmlite_or1k.ir as ll
|
||||||
|
|
||||||
from artiq.py2llvm.values import VGeneric
|
from artiq.py2llvm.values import VGeneric
|
||||||
from artiq.py2llvm.base_types import VInt, VNone
|
from artiq.py2llvm.base_types import VInt, VNone
|
||||||
|
|
|
@ -1,16 +1,16 @@
|
||||||
import llvmlite.ir as ll
|
import llvmlite_or1k.ir as ll
|
||||||
import llvmlite.binding as llvm
|
import llvmlite_or1k.binding as llvm
|
||||||
|
|
||||||
from artiq.py2llvm import infer_types, ast_body, base_types, fractions, tools
|
from artiq.py2llvm import infer_types, ast_body, base_types, fractions, tools
|
||||||
|
|
||||||
|
|
||||||
class Module:
|
class Module:
|
||||||
def __init__(self, env=None):
|
def __init__(self, runtime=None):
|
||||||
self.llvm_module = ll.Module("main")
|
self.llvm_module = ll.Module("main")
|
||||||
self.env = env
|
self.runtime = runtime
|
||||||
|
|
||||||
if self.env is not None:
|
if self.runtime is not None:
|
||||||
self.env.init_module(self)
|
self.runtime.init_module(self)
|
||||||
fractions.init_module(self)
|
fractions.init_module(self)
|
||||||
|
|
||||||
def finalize(self):
|
def finalize(self):
|
||||||
|
@ -30,10 +30,10 @@ class Module:
|
||||||
|
|
||||||
def emit_object(self):
|
def emit_object(self):
|
||||||
self.finalize()
|
self.finalize()
|
||||||
return self.env.emit_object()
|
return self.runtime.emit_object()
|
||||||
|
|
||||||
def compile_function(self, func_def, param_types):
|
def compile_function(self, func_def, param_types):
|
||||||
ns = infer_types.infer_function_types(self.env, func_def, param_types)
|
ns = infer_types.infer_function_types(self.runtime, func_def, param_types)
|
||||||
retval = ns["return"]
|
retval = ns["return"]
|
||||||
|
|
||||||
function_type = ll.FunctionType(retval.get_llvm_type(),
|
function_type = ll.FunctionType(retval.get_llvm_type(),
|
||||||
|
@ -50,7 +50,7 @@ class Module:
|
||||||
for arg_ast, arg_llvm in zip(func_def.args.args, function.args):
|
for arg_ast, arg_llvm in zip(func_def.args.args, function.args):
|
||||||
ns[arg_ast.arg].auto_store(builder, arg_llvm)
|
ns[arg_ast.arg].auto_store(builder, arg_llvm)
|
||||||
|
|
||||||
visitor = ast_body.Visitor(self.env, ns, builder)
|
visitor = ast_body.Visitor(self.runtime, ns, builder)
|
||||||
visitor.visit_statements(func_def.body)
|
visitor.visit_statements(func_def.body)
|
||||||
|
|
||||||
if not tools.is_terminated(builder.basic_block):
|
if not tools.is_terminated(builder.basic_block):
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import llvmlite.ir as ll
|
import llvmlite_or1k.ir as ll
|
||||||
|
|
||||||
def is_terminated(basic_block):
|
def is_terminated(basic_block):
|
||||||
return (basic_block.instructions
|
return (basic_block.instructions
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
from copy import copy
|
from copy import copy
|
||||||
|
|
||||||
import llvmlite.ir as ll
|
import llvmlite_or1k.ir as ll
|
||||||
|
|
||||||
|
|
||||||
class VGeneric:
|
class VGeneric:
|
||||||
|
|
|
@ -1,29 +1,29 @@
|
||||||
from random import Random
|
from random import Random
|
||||||
|
|
||||||
from artiq.language.core import delay, kernel
|
from artiq.language.core import delay, kernel
|
||||||
from artiq.language.db import *
|
|
||||||
from artiq.language import units
|
from artiq.language import units
|
||||||
from artiq.sim import time
|
from artiq.sim import time
|
||||||
|
|
||||||
|
|
||||||
class Core(AutoDB):
|
class Core:
|
||||||
_level = 0
|
def __init__(self, dmgr):
|
||||||
|
self.ref_period = 1
|
||||||
|
self._level = 0
|
||||||
|
|
||||||
def run(self, k_function, k_args, k_kwargs):
|
def run(self, k_function, k_args, k_kwargs):
|
||||||
Core._level += 1
|
self._level += 1
|
||||||
r = k_function(*k_args, **k_kwargs)
|
r = k_function(*k_args, **k_kwargs)
|
||||||
Core._level -= 1
|
self._level -= 1
|
||||||
if Core._level == 0:
|
if self._level == 0:
|
||||||
print(time.manager.format_timeline())
|
print(time.manager.format_timeline())
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
class Input(AutoDB):
|
class Input:
|
||||||
class DBKeys:
|
def __init__(self, dmgr, name):
|
||||||
core = Device()
|
self.core = dmgr.get("core")
|
||||||
name = Argument()
|
self.name = name
|
||||||
|
|
||||||
def build(self):
|
|
||||||
self.prng = Random()
|
self.prng = Random()
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
|
@ -40,10 +40,10 @@ class Input(AutoDB):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class WaveOutput(AutoDB):
|
class WaveOutput:
|
||||||
class DBKeys:
|
def __init__(self, dmgr, name):
|
||||||
core = Device()
|
self.core = dmgr.get("core")
|
||||||
name = Argument()
|
self.name = name
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def pulse(self, frequency, duration):
|
def pulse(self, frequency, duration):
|
||||||
|
@ -51,10 +51,10 @@ class WaveOutput(AutoDB):
|
||||||
delay(duration)
|
delay(duration)
|
||||||
|
|
||||||
|
|
||||||
class VoltageOutput(AutoDB):
|
class VoltageOutput:
|
||||||
class DBKeys:
|
def __init__(self, dmgr, name):
|
||||||
core = Device()
|
self.core = dmgr.get("core")
|
||||||
name = Argument()
|
self.name = name
|
||||||
|
|
||||||
@kernel
|
@kernel
|
||||||
def set(self, value):
|
def set(self, value):
|
||||||
|
|
|
@ -30,37 +30,39 @@ class Manager:
|
||||||
self.timeline = []
|
self.timeline = []
|
||||||
|
|
||||||
def enter_sequential(self):
|
def enter_sequential(self):
|
||||||
new_context = SequentialTimeContext(self.get_time())
|
new_context = SequentialTimeContext(self.get_time_mu())
|
||||||
self.stack.append(new_context)
|
self.stack.append(new_context)
|
||||||
|
|
||||||
def enter_parallel(self):
|
def enter_parallel(self):
|
||||||
new_context = ParallelTimeContext(self.get_time())
|
new_context = ParallelTimeContext(self.get_time_mu())
|
||||||
self.stack.append(new_context)
|
self.stack.append(new_context)
|
||||||
|
|
||||||
def exit(self):
|
def exit(self):
|
||||||
old_context = self.stack.pop()
|
old_context = self.stack.pop()
|
||||||
self.take_time(old_context.block_duration)
|
self.take_time(old_context.block_duration)
|
||||||
|
|
||||||
def take_time(self, duration):
|
def take_time_mu(self, duration):
|
||||||
self.stack[-1].take_time(duration)
|
self.stack[-1].take_time(duration)
|
||||||
|
|
||||||
def get_time(self):
|
def get_time_mu(self):
|
||||||
return self.stack[-1].current_time
|
return self.stack[-1].current_time
|
||||||
|
|
||||||
def set_time(self, t):
|
def set_time_mu(self, t):
|
||||||
dt = t - self.get_time()
|
dt = t - self.get_time_mu()
|
||||||
if dt < 0*s:
|
if dt < 0*s:
|
||||||
raise ValueError("Attempted to go back in time")
|
raise ValueError("Attempted to go back in time")
|
||||||
self.take_time(dt)
|
self.take_time(dt)
|
||||||
|
|
||||||
|
take_time = take_time_mu
|
||||||
|
|
||||||
def event(self, description):
|
def event(self, description):
|
||||||
self.timeline.append((self.get_time(), description))
|
self.timeline.append((self.get_time_mu(), description))
|
||||||
|
|
||||||
def format_timeline(self):
|
def format_timeline(self):
|
||||||
r = ""
|
r = ""
|
||||||
prev_time = 0*s
|
prev_time = 0*s
|
||||||
for time, description in sorted(self.timeline, key=itemgetter(0)):
|
for time, description in sorted(self.timeline, key=itemgetter(0)):
|
||||||
r += "@{:10} (+{:10}) ".format(str(time), str(time-prev_time))
|
r += "@{:.9f} (+{:.9f}) ".format(time, time-prev_time)
|
||||||
for item in description:
|
for item in description:
|
||||||
r += "{:16}".format(str(item))
|
r += "{:16}".format(str(item))
|
||||||
r += "\n"
|
r += "\n"
|
||||||
|
|
|
@ -12,7 +12,7 @@ class TestSplineCoef(unittest.TestCase):
|
||||||
self.s = coefficients.SplineSource(self.x, self.y, order=4)
|
self.s = coefficients.SplineSource(self.x, self.y, order=4)
|
||||||
|
|
||||||
def test_get_segment(self):
|
def test_get_segment(self):
|
||||||
return list(self.s.get_segment_data(1.5, 3.2, 1/100.))
|
return list(self.s.get_segment_data(start=1.5, stop=3.2, scale=.01))
|
||||||
|
|
||||||
def test_synth(self):
|
def test_synth(self):
|
||||||
d = self.test_get_segment()
|
d = self.test_get_segment()
|
||||||
|
@ -32,7 +32,7 @@ class TestSplineCoef(unittest.TestCase):
|
||||||
|
|
||||||
@unittest.skip("manual/visual test")
|
@unittest.skip("manual/visual test")
|
||||||
def test_plot(self):
|
def test_plot(self):
|
||||||
import cairoplot
|
import matplotlib.pyplot as plt
|
||||||
y = self.test_run()
|
y = self.test_run()
|
||||||
x = list(range(len(y)))
|
plt.step(np.arange(len(y)), y)
|
||||||
cairoplot.scatter_plot("plot.png", [x, y])
|
plt.show()
|
||||||
|
|
|
@ -0,0 +1,276 @@
|
||||||
|
from math import sqrt
|
||||||
|
|
||||||
|
from artiq.language import *
|
||||||
|
from artiq.test.hardware_testbench import ExperimentCase
|
||||||
|
from artiq.coredevice.runtime_exceptions import RTIOUnderflow
|
||||||
|
from artiq.coredevice import runtime_exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class RTT(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_device("ttl_inout")
|
||||||
|
|
||||||
|
def set_rtt(self, rtt):
|
||||||
|
self.set_result("rtt", rtt)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
self.ttl_inout.output()
|
||||||
|
delay(1*us)
|
||||||
|
with parallel:
|
||||||
|
# make sure not to send two commands into the same RTIO
|
||||||
|
# channel with the same timestamp
|
||||||
|
self.ttl_inout.gate_rising(5*us)
|
||||||
|
with sequential:
|
||||||
|
delay(1*us)
|
||||||
|
t0 = now_mu()
|
||||||
|
self.ttl_inout.pulse(1*us)
|
||||||
|
self.set_rtt(mu_to_seconds(self.ttl_inout.timestamp_mu() - t0))
|
||||||
|
|
||||||
|
|
||||||
|
class Loopback(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_device("loop_in")
|
||||||
|
self.attr_device("loop_out")
|
||||||
|
|
||||||
|
def set_rtt(self, rtt):
|
||||||
|
self.set_result("rtt", rtt)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
self.loop_in.input()
|
||||||
|
delay(1*us)
|
||||||
|
with parallel:
|
||||||
|
self.loop_in.gate_rising(2*us)
|
||||||
|
with sequential:
|
||||||
|
delay(1*us)
|
||||||
|
t0 = now_mu()
|
||||||
|
self.loop_out.pulse(1*us)
|
||||||
|
self.set_rtt(mu_to_seconds(self.loop_in.timestamp_mu() - t0))
|
||||||
|
|
||||||
|
|
||||||
|
class ClockGeneratorLoopback(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_device("loop_clock_in")
|
||||||
|
self.attr_device("loop_clock_out")
|
||||||
|
|
||||||
|
def set_count(self, count):
|
||||||
|
self.set_result("count", count)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
self.loop_clock_in.input()
|
||||||
|
self.loop_clock_out.stop()
|
||||||
|
delay(1*us)
|
||||||
|
with parallel:
|
||||||
|
self.loop_clock_in.gate_rising(10*us)
|
||||||
|
with sequential:
|
||||||
|
delay(200*ns)
|
||||||
|
self.loop_clock_out.set(1*MHz)
|
||||||
|
self.set_count(self.loop_clock_in.count())
|
||||||
|
|
||||||
|
|
||||||
|
class PulseRate(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_device("loop_out")
|
||||||
|
|
||||||
|
def set_pulse_rate(self, pulse_rate):
|
||||||
|
self.set_result("pulse_rate", pulse_rate)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
dt = seconds_to_mu(1000*ns)
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
for i in range(1000):
|
||||||
|
self.loop_out.pulse_mu(dt)
|
||||||
|
delay_mu(dt)
|
||||||
|
except RTIOUnderflow:
|
||||||
|
dt += 1
|
||||||
|
self.core.break_realtime()
|
||||||
|
else:
|
||||||
|
self.set_pulse_rate(mu_to_seconds(2*dt))
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
class Watchdog(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
with watchdog(50*ms):
|
||||||
|
while True:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class LoopbackCount(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_device("ttl_inout")
|
||||||
|
self.attr_argument("npulses")
|
||||||
|
|
||||||
|
def set_count(self, count):
|
||||||
|
self.set_result("count", count)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
self.ttl_inout.output()
|
||||||
|
delay(1*us)
|
||||||
|
with parallel:
|
||||||
|
self.ttl_inout.gate_rising(10*us)
|
||||||
|
with sequential:
|
||||||
|
for i in range(self.npulses):
|
||||||
|
delay(25*ns)
|
||||||
|
self.ttl_inout.pulse(25*ns)
|
||||||
|
self.set_count(self.ttl_inout.count())
|
||||||
|
|
||||||
|
|
||||||
|
class Underflow(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_device("ttl_out")
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
while True:
|
||||||
|
delay(25*ns)
|
||||||
|
self.ttl_out.pulse(25*ns)
|
||||||
|
|
||||||
|
|
||||||
|
class SequenceError(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_device("ttl_out")
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
t = now_mu()
|
||||||
|
self.ttl_out.pulse(25*us)
|
||||||
|
at_mu(t)
|
||||||
|
self.ttl_out.pulse(25*us)
|
||||||
|
|
||||||
|
|
||||||
|
class TimeKeepsRunning(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
|
||||||
|
def set_time_at_start(self, time_at_start):
|
||||||
|
self.set_result("time_at_start", time_at_start)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
self.set_time_at_start(now_mu())
|
||||||
|
|
||||||
|
|
||||||
|
class Handover(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def get_now(self):
|
||||||
|
self.time_at_start = now_mu()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.get_now()
|
||||||
|
self.set_result("t1", self.time_at_start)
|
||||||
|
self.get_now()
|
||||||
|
self.set_result("t2", self.time_at_start)
|
||||||
|
|
||||||
|
|
||||||
|
class CoredeviceTest(ExperimentCase):
|
||||||
|
def test_rtt(self):
|
||||||
|
self.execute(RTT)
|
||||||
|
rtt = self.rdb.get("rtt")
|
||||||
|
print(rtt)
|
||||||
|
self.assertGreater(rtt, 0*ns)
|
||||||
|
self.assertLess(rtt, 100*ns)
|
||||||
|
|
||||||
|
def test_loopback(self):
|
||||||
|
self.execute(Loopback)
|
||||||
|
rtt = self.rdb.get("rtt")
|
||||||
|
print(rtt)
|
||||||
|
self.assertGreater(rtt, 0*ns)
|
||||||
|
self.assertLess(rtt, 50*ns)
|
||||||
|
|
||||||
|
def test_clock_generator_loopback(self):
|
||||||
|
self.execute(ClockGeneratorLoopback)
|
||||||
|
count = self.rdb.get("count")
|
||||||
|
self.assertEqual(count, 10)
|
||||||
|
|
||||||
|
def test_pulse_rate(self):
|
||||||
|
self.execute(PulseRate)
|
||||||
|
rate = self.rdb.get("pulse_rate")
|
||||||
|
print(rate)
|
||||||
|
self.assertGreater(rate, 100*ns)
|
||||||
|
self.assertLess(rate, 2500*ns)
|
||||||
|
|
||||||
|
def test_loopback_count(self):
|
||||||
|
npulses = 2
|
||||||
|
r = self.execute(LoopbackCount, npulses=npulses)
|
||||||
|
count = self.rdb.get("count")
|
||||||
|
self.assertEqual(count, npulses)
|
||||||
|
|
||||||
|
def test_underflow(self):
|
||||||
|
with self.assertRaises(runtime_exceptions.RTIOUnderflow):
|
||||||
|
self.execute(Underflow)
|
||||||
|
|
||||||
|
def test_sequence_error(self):
|
||||||
|
with self.assertRaises(runtime_exceptions.RTIOSequenceError):
|
||||||
|
self.execute(SequenceError)
|
||||||
|
|
||||||
|
def test_watchdog(self):
|
||||||
|
# watchdog only works on the device
|
||||||
|
with self.assertRaises(IOError):
|
||||||
|
self.execute(Watchdog)
|
||||||
|
|
||||||
|
def test_time_keeps_running(self):
|
||||||
|
self.execute(TimeKeepsRunning)
|
||||||
|
t1 = self.rdb.get("time_at_start")
|
||||||
|
self.execute(TimeKeepsRunning)
|
||||||
|
t2 = self.rdb.get("time_at_start")
|
||||||
|
dead_time = mu_to_seconds(t2 - t1, self.dmgr.get("core"))
|
||||||
|
print(dead_time)
|
||||||
|
self.assertGreater(dead_time, 1*ms)
|
||||||
|
self.assertLess(dead_time, 300*ms)
|
||||||
|
|
||||||
|
def test_handover(self):
|
||||||
|
self.execute(Handover)
|
||||||
|
self.assertEqual(self.rdb.get("t1"), self.rdb.get("t2"))
|
||||||
|
|
||||||
|
|
||||||
|
class RPCTiming(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_argument("repeats", FreeValue(100))
|
||||||
|
|
||||||
|
def nop(self, x):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def bench(self):
|
||||||
|
self.ts = [0. for _ in range(self.repeats)]
|
||||||
|
for i in range(self.repeats):
|
||||||
|
t1 = self.core.get_rtio_counter_mu()
|
||||||
|
self.nop(1)
|
||||||
|
t2 = self.core.get_rtio_counter_mu()
|
||||||
|
self.ts[i] = mu_to_seconds(t2 - t1)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.bench()
|
||||||
|
mean = sum(self.ts)/self.repeats
|
||||||
|
self.set_result("rpc_time_stddev", sqrt(
|
||||||
|
sum([(t - mean)**2 for t in self.ts])/self.repeats))
|
||||||
|
self.set_result("rpc_time_mean", mean)
|
||||||
|
|
||||||
|
|
||||||
|
class RPCTest(ExperimentCase):
|
||||||
|
def test_rpc_timing(self):
|
||||||
|
self.execute(RPCTiming)
|
||||||
|
self.assertGreater(self.rdb.get("rpc_time_mean"), 100*ns)
|
||||||
|
self.assertLess(self.rdb.get("rpc_time_mean"), 15*ms)
|
||||||
|
self.assertLess(self.rdb.get("rpc_time_stddev"), 1*ms)
|
|
@ -0,0 +1,220 @@
|
||||||
|
from operator import itemgetter
|
||||||
|
from fractions import Fraction
|
||||||
|
|
||||||
|
from artiq import *
|
||||||
|
from artiq.sim import devices as sim_devices
|
||||||
|
from artiq.test.hardware_testbench import ExperimentCase
|
||||||
|
|
||||||
|
|
||||||
|
def _run_on_host(k_class, **arguments):
|
||||||
|
dmgr = dict()
|
||||||
|
dmgr["core"] = sim_devices.Core(dmgr)
|
||||||
|
k_inst = k_class(dmgr, **arguments)
|
||||||
|
k_inst.run()
|
||||||
|
return k_inst
|
||||||
|
|
||||||
|
|
||||||
|
class _Primes(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_argument("output_list")
|
||||||
|
self.attr_argument("maximum")
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
for x in range(1, self.maximum):
|
||||||
|
d = 2
|
||||||
|
prime = True
|
||||||
|
while d*d <= x:
|
||||||
|
if x % d == 0:
|
||||||
|
prime = False
|
||||||
|
break
|
||||||
|
d += 1
|
||||||
|
if prime:
|
||||||
|
self.output_list.append(x)
|
||||||
|
|
||||||
|
|
||||||
|
class _Misc(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
|
||||||
|
self.input = 84
|
||||||
|
self.al = [1, 2, 3, 4, 5]
|
||||||
|
self.list_copy_in = [2*Hz, 10*MHz]
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
self.half_input = self.input//2
|
||||||
|
self.decimal_fraction = Fraction("1.2")
|
||||||
|
self.acc = 0
|
||||||
|
for i in range(len(self.al)):
|
||||||
|
self.acc += self.al[i]
|
||||||
|
self.list_copy_out = self.list_copy_in
|
||||||
|
|
||||||
|
|
||||||
|
class _PulseLogger(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_argument("output_list")
|
||||||
|
self.attr_argument("name")
|
||||||
|
|
||||||
|
def _append(self, t, l, f):
|
||||||
|
if not hasattr(self, "first_timestamp"):
|
||||||
|
self.first_timestamp = t
|
||||||
|
self.output_list.append((self.name, t-self.first_timestamp, l, f))
|
||||||
|
|
||||||
|
def int_usec(self, mu):
|
||||||
|
return round(mu_to_seconds(mu, self.core)*1000000)
|
||||||
|
|
||||||
|
def on(self, t, f):
|
||||||
|
self._append(self.int_usec(t), True, f)
|
||||||
|
|
||||||
|
def off(self, t):
|
||||||
|
self._append(self.int_usec(t), False, 0)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def pulse(self, f, duration):
|
||||||
|
self.on(now_mu(), f)
|
||||||
|
delay(duration)
|
||||||
|
self.off(now_mu())
|
||||||
|
|
||||||
|
|
||||||
|
class _Pulses(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_argument("output_list")
|
||||||
|
|
||||||
|
for name in "a", "b", "c", "d":
|
||||||
|
pl = _PulseLogger(*self.dbs(),
|
||||||
|
output_list=self.output_list,
|
||||||
|
name=name)
|
||||||
|
setattr(self, name, pl)
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
for i in range(3):
|
||||||
|
with parallel:
|
||||||
|
with sequential:
|
||||||
|
self.a.pulse(100+i, 20*us)
|
||||||
|
self.b.pulse(200+i, 20*us)
|
||||||
|
with sequential:
|
||||||
|
self.c.pulse(300+i, 10*us)
|
||||||
|
self.d.pulse(400+i, 20*us)
|
||||||
|
|
||||||
|
|
||||||
|
class _MyException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _Exceptions(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_argument("trace")
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
for i in range(10):
|
||||||
|
self.trace.append(i)
|
||||||
|
if i == 4:
|
||||||
|
try:
|
||||||
|
self.trace.append(10)
|
||||||
|
try:
|
||||||
|
self.trace.append(11)
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self.trace.append(12)
|
||||||
|
try:
|
||||||
|
self.trace.append(13)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
except _MyException:
|
||||||
|
self.trace.append(14)
|
||||||
|
|
||||||
|
for i in range(4):
|
||||||
|
try:
|
||||||
|
self.trace.append(100)
|
||||||
|
if i == 1:
|
||||||
|
raise _MyException
|
||||||
|
elif i == 2:
|
||||||
|
raise IndexError
|
||||||
|
except (TypeError, IndexError):
|
||||||
|
self.trace.append(101)
|
||||||
|
raise
|
||||||
|
except:
|
||||||
|
self.trace.append(102)
|
||||||
|
else:
|
||||||
|
self.trace.append(103)
|
||||||
|
finally:
|
||||||
|
self.trace.append(104)
|
||||||
|
|
||||||
|
|
||||||
|
class _RPCExceptions(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("core")
|
||||||
|
self.attr_argument("catch", FreeValue(False))
|
||||||
|
|
||||||
|
self.success = False
|
||||||
|
|
||||||
|
def exception_raiser(self):
|
||||||
|
raise _MyException
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def run(self):
|
||||||
|
if self.catch:
|
||||||
|
self.do_catch()
|
||||||
|
else:
|
||||||
|
self.do_not_catch()
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def do_not_catch(self):
|
||||||
|
self.exception_raiser()
|
||||||
|
|
||||||
|
@kernel
|
||||||
|
def do_catch(self):
|
||||||
|
try:
|
||||||
|
self.exception_raiser()
|
||||||
|
except _MyException:
|
||||||
|
self.success = True
|
||||||
|
|
||||||
|
|
||||||
|
class HostVsDeviceCase(ExperimentCase):
|
||||||
|
def test_primes(self):
|
||||||
|
l_device, l_host = [], []
|
||||||
|
self.execute(_Primes, maximum=100, output_list=l_device)
|
||||||
|
_run_on_host(_Primes, maximum=100, output_list=l_host)
|
||||||
|
self.assertEqual(l_device, l_host)
|
||||||
|
|
||||||
|
def test_misc(self):
|
||||||
|
for f in self.execute, _run_on_host:
|
||||||
|
uut = f(_Misc)
|
||||||
|
self.assertEqual(uut.half_input, 42)
|
||||||
|
self.assertEqual(uut.decimal_fraction, Fraction("1.2"))
|
||||||
|
self.assertEqual(uut.acc, sum(uut.al))
|
||||||
|
self.assertEqual(uut.list_copy_in, uut.list_copy_out)
|
||||||
|
|
||||||
|
def test_pulses(self):
|
||||||
|
l_device, l_host = [], []
|
||||||
|
self.execute(_Pulses, output_list=l_device)
|
||||||
|
_run_on_host(_Pulses, output_list=l_host)
|
||||||
|
l_host = sorted(l_host, key=itemgetter(1))
|
||||||
|
for channel in "a", "b", "c", "d":
|
||||||
|
c_device = [x for x in l_device if x[0] == channel]
|
||||||
|
c_host = [x for x in l_host if x[0] == channel]
|
||||||
|
self.assertEqual(c_device, c_host)
|
||||||
|
|
||||||
|
def test_exceptions(self):
|
||||||
|
t_device, t_host = [], []
|
||||||
|
with self.assertRaises(IndexError):
|
||||||
|
self.execute(_Exceptions, trace=t_device)
|
||||||
|
with self.assertRaises(IndexError):
|
||||||
|
_run_on_host(_Exceptions, trace=t_host)
|
||||||
|
self.assertEqual(t_device, t_host)
|
||||||
|
|
||||||
|
def test_rpc_exceptions(self):
|
||||||
|
for f in self.execute, _run_on_host:
|
||||||
|
with self.assertRaises(_MyException):
|
||||||
|
f(_RPCExceptions, catch=False)
|
||||||
|
uut = self.execute(_RPCExceptions, catch=True)
|
||||||
|
self.assertTrue(uut.success)
|
|
@ -1,371 +0,0 @@
|
||||||
import unittest
|
|
||||||
from operator import itemgetter
|
|
||||||
import os
|
|
||||||
from fractions import Fraction
|
|
||||||
|
|
||||||
from artiq import *
|
|
||||||
from artiq.language.units import DimensionError
|
|
||||||
from artiq.coredevice import comm_tcp, core, runtime_exceptions, ttl
|
|
||||||
from artiq.sim import devices as sim_devices
|
|
||||||
|
|
||||||
|
|
||||||
core_device = os.getenv("ARTIQ_CORE_DEVICE")
|
|
||||||
|
|
||||||
|
|
||||||
def _run_on_device(k_class, **parameters):
|
|
||||||
comm = comm_tcp.Comm(host=core_device)
|
|
||||||
try:
|
|
||||||
coredev = core.Core(comm=comm)
|
|
||||||
k_inst = k_class(core=coredev, **parameters)
|
|
||||||
k_inst.run()
|
|
||||||
finally:
|
|
||||||
comm.close()
|
|
||||||
|
|
||||||
|
|
||||||
def _run_on_host(k_class, **parameters):
|
|
||||||
coredev = sim_devices.Core()
|
|
||||||
k_inst = k_class(core=coredev, **parameters)
|
|
||||||
k_inst.run()
|
|
||||||
|
|
||||||
|
|
||||||
class _Primes(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
output_list = Argument()
|
|
||||||
maximum = Argument()
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
for x in range(1, self.maximum):
|
|
||||||
d = 2
|
|
||||||
prime = True
|
|
||||||
while d*d <= x:
|
|
||||||
if x % d == 0:
|
|
||||||
prime = False
|
|
||||||
break
|
|
||||||
d += 1
|
|
||||||
if prime:
|
|
||||||
self.output_list.append(x)
|
|
||||||
|
|
||||||
|
|
||||||
class _Misc(AutoDB):
|
|
||||||
def build(self):
|
|
||||||
self.input = 84
|
|
||||||
self.inhomogeneous_units = []
|
|
||||||
self.al = [1, 2, 3, 4, 5]
|
|
||||||
self.list_copy_in = [2*Hz, 10*MHz]
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
self.half_input = self.input//2
|
|
||||||
self.decimal_fraction = Fraction("1.2")
|
|
||||||
self.inhomogeneous_units.append(1000*Hz)
|
|
||||||
self.inhomogeneous_units.append(10*s)
|
|
||||||
self.acc = 0
|
|
||||||
for i in range(len(self.al)):
|
|
||||||
self.acc += self.al[i]
|
|
||||||
self.list_copy_out = self.list_copy_in
|
|
||||||
self.unit_comp = [1*MHz for _ in range(3)]
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def dimension_error1(self):
|
|
||||||
print(1*Hz + 1*s)
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def dimension_error2(self):
|
|
||||||
print(1*Hz < 1*s)
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def dimension_error3(self):
|
|
||||||
check_unit(1*Hz, "s")
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def dimension_error4(self):
|
|
||||||
delay(10*Hz)
|
|
||||||
|
|
||||||
|
|
||||||
class _PulseLogger(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
output_list = Argument()
|
|
||||||
name = Argument()
|
|
||||||
|
|
||||||
def _append(self, t, l, f):
|
|
||||||
if not hasattr(self, "first_timestamp"):
|
|
||||||
self.first_timestamp = t
|
|
||||||
self.output_list.append((self.name, t-self.first_timestamp, l, f))
|
|
||||||
|
|
||||||
def on(self, t, f):
|
|
||||||
self._append(t, True, f)
|
|
||||||
|
|
||||||
def off(self, t):
|
|
||||||
self._append(t, False, 0)
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def pulse(self, f, duration):
|
|
||||||
self.on(int(now().amount*1000000000), f)
|
|
||||||
delay(duration)
|
|
||||||
self.off(int(now().amount*1000000000))
|
|
||||||
|
|
||||||
|
|
||||||
class _Pulses(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
output_list = Argument()
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
for name in "a", "b", "c", "d":
|
|
||||||
pl = _PulseLogger(core=self.core,
|
|
||||||
output_list=self.output_list,
|
|
||||||
name=name)
|
|
||||||
setattr(self, name, pl)
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
for i in range(3):
|
|
||||||
with parallel:
|
|
||||||
with sequential:
|
|
||||||
self.a.pulse(100+i, 20*us)
|
|
||||||
self.b.pulse(200+i, 20*us)
|
|
||||||
with sequential:
|
|
||||||
self.c.pulse(300+i, 10*us)
|
|
||||||
self.d.pulse(400+i, 20*us)
|
|
||||||
|
|
||||||
|
|
||||||
class _MyException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class _Exceptions(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
trace = Argument()
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
for i in range(10):
|
|
||||||
self.trace.append(i)
|
|
||||||
if i == 4:
|
|
||||||
try:
|
|
||||||
self.trace.append(10)
|
|
||||||
try:
|
|
||||||
self.trace.append(11)
|
|
||||||
break
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self.trace.append(12)
|
|
||||||
try:
|
|
||||||
self.trace.append(13)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
except _MyException:
|
|
||||||
self.trace.append(14)
|
|
||||||
|
|
||||||
for i in range(4):
|
|
||||||
try:
|
|
||||||
self.trace.append(100)
|
|
||||||
if i == 1:
|
|
||||||
raise _MyException
|
|
||||||
elif i == 2:
|
|
||||||
raise IndexError
|
|
||||||
except (TypeError, IndexError):
|
|
||||||
self.trace.append(101)
|
|
||||||
raise
|
|
||||||
except:
|
|
||||||
self.trace.append(102)
|
|
||||||
else:
|
|
||||||
self.trace.append(103)
|
|
||||||
finally:
|
|
||||||
self.trace.append(104)
|
|
||||||
|
|
||||||
|
|
||||||
class _RPCExceptions(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
|
|
||||||
def build(self):
|
|
||||||
self.success = False
|
|
||||||
|
|
||||||
def exception_raiser(self):
|
|
||||||
raise _MyException
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def do_not_catch(self):
|
|
||||||
self.exception_raiser()
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def catch(self):
|
|
||||||
try:
|
|
||||||
self.exception_raiser()
|
|
||||||
except _MyException:
|
|
||||||
self.success = True
|
|
||||||
|
|
||||||
|
|
||||||
class _Watchdog(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
with watchdog(50*ms):
|
|
||||||
while True:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@unittest.skipUnless(core_device, "no hardware")
|
|
||||||
class ExecutionCase(unittest.TestCase):
|
|
||||||
def test_primes(self):
|
|
||||||
l_device, l_host = [], []
|
|
||||||
_run_on_device(_Primes, maximum=100, output_list=l_device)
|
|
||||||
_run_on_host(_Primes, maximum=100, output_list=l_host)
|
|
||||||
self.assertEqual(l_device, l_host)
|
|
||||||
|
|
||||||
def test_misc(self):
|
|
||||||
comm = comm_tcp.Comm(host=core_device)
|
|
||||||
try:
|
|
||||||
coredev = core.Core(comm=comm)
|
|
||||||
uut = _Misc(core=coredev)
|
|
||||||
uut.run()
|
|
||||||
self.assertEqual(uut.half_input, 42)
|
|
||||||
self.assertEqual(uut.decimal_fraction, Fraction("1.2"))
|
|
||||||
self.assertEqual(uut.inhomogeneous_units, [1000*Hz, 10*s])
|
|
||||||
self.assertEqual(uut.acc, sum(uut.al))
|
|
||||||
self.assertEqual(uut.list_copy_in, uut.list_copy_out)
|
|
||||||
self.assertEqual(uut.unit_comp, [1*MHz for _ in range(3)])
|
|
||||||
with self.assertRaises(DimensionError):
|
|
||||||
uut.dimension_error1()
|
|
||||||
with self.assertRaises(DimensionError):
|
|
||||||
uut.dimension_error2()
|
|
||||||
with self.assertRaises(DimensionError):
|
|
||||||
uut.dimension_error3()
|
|
||||||
with self.assertRaises(DimensionError):
|
|
||||||
uut.dimension_error4()
|
|
||||||
finally:
|
|
||||||
comm.close()
|
|
||||||
|
|
||||||
def test_pulses(self):
|
|
||||||
l_device, l_host = [], []
|
|
||||||
_run_on_device(_Pulses, output_list=l_device)
|
|
||||||
_run_on_host(_Pulses, output_list=l_host)
|
|
||||||
l_host = sorted(l_host, key=itemgetter(1))
|
|
||||||
for channel in "a", "b", "c", "d":
|
|
||||||
c_device = [x for x in l_device if x[0] == channel]
|
|
||||||
c_host = [x for x in l_host if x[0] == channel]
|
|
||||||
self.assertEqual(c_device, c_host)
|
|
||||||
|
|
||||||
def test_exceptions(self):
|
|
||||||
t_device, t_host = [], []
|
|
||||||
with self.assertRaises(IndexError):
|
|
||||||
_run_on_device(_Exceptions, trace=t_device)
|
|
||||||
with self.assertRaises(IndexError):
|
|
||||||
_run_on_host(_Exceptions, trace=t_host)
|
|
||||||
self.assertEqual(t_device, t_host)
|
|
||||||
|
|
||||||
def test_rpc_exceptions(self):
|
|
||||||
comm = comm_tcp.Comm(host=core_device)
|
|
||||||
try:
|
|
||||||
uut = _RPCExceptions(core=core.Core(comm=comm))
|
|
||||||
with self.assertRaises(_MyException):
|
|
||||||
uut.do_not_catch()
|
|
||||||
uut.catch()
|
|
||||||
self.assertTrue(uut.success)
|
|
||||||
finally:
|
|
||||||
comm.close()
|
|
||||||
|
|
||||||
def test_watchdog(self):
|
|
||||||
with self.assertRaises(IOError):
|
|
||||||
_run_on_device(_Watchdog)
|
|
||||||
|
|
||||||
|
|
||||||
class _RTIOLoopback(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
io = Device()
|
|
||||||
npulses = Argument()
|
|
||||||
|
|
||||||
def report(self, n):
|
|
||||||
self.result = n
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
self.io.output()
|
|
||||||
delay(1*us)
|
|
||||||
with parallel:
|
|
||||||
self.io.gate_rising(10*us)
|
|
||||||
with sequential:
|
|
||||||
for i in range(self.npulses):
|
|
||||||
delay(25*ns)
|
|
||||||
self.io.pulse(25*ns)
|
|
||||||
self.report(self.io.count())
|
|
||||||
|
|
||||||
|
|
||||||
class _RTIOUnderflow(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
o = Device()
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
while True:
|
|
||||||
delay(25*ns)
|
|
||||||
self.o.pulse(25*ns)
|
|
||||||
|
|
||||||
|
|
||||||
class _RTIOSequenceError(AutoDB):
|
|
||||||
class DBKeys:
|
|
||||||
core = Device()
|
|
||||||
o = Device()
|
|
||||||
|
|
||||||
@kernel
|
|
||||||
def run(self):
|
|
||||||
t = now()
|
|
||||||
self.o.pulse(25*us)
|
|
||||||
at(t)
|
|
||||||
self.o.pulse(25*us)
|
|
||||||
|
|
||||||
|
|
||||||
@unittest.skipUnless(core_device, "no hardware")
|
|
||||||
class RTIOCase(unittest.TestCase):
|
|
||||||
# Connect channels 0 and 1 together for this test
|
|
||||||
# (C11 and C13 on Papilio Pro)
|
|
||||||
def test_loopback(self):
|
|
||||||
npulses = 4
|
|
||||||
comm = comm_tcp.Comm(host=core_device)
|
|
||||||
try:
|
|
||||||
coredev = core.Core(comm=comm)
|
|
||||||
uut = _RTIOLoopback(
|
|
||||||
core=coredev,
|
|
||||||
io=ttl.TTLInOut(core=coredev, channel=0),
|
|
||||||
npulses=npulses
|
|
||||||
)
|
|
||||||
uut.run()
|
|
||||||
self.assertEqual(uut.result, npulses)
|
|
||||||
finally:
|
|
||||||
comm.close()
|
|
||||||
|
|
||||||
def test_underflow(self):
|
|
||||||
comm = comm_tcp.Comm(host=core_device)
|
|
||||||
try:
|
|
||||||
coredev = core.Core(comm=comm)
|
|
||||||
uut = _RTIOUnderflow(
|
|
||||||
core=coredev,
|
|
||||||
o=ttl.TTLOut(core=coredev, channel=2)
|
|
||||||
)
|
|
||||||
with self.assertRaises(runtime_exceptions.RTIOUnderflow):
|
|
||||||
uut.run()
|
|
||||||
finally:
|
|
||||||
comm.close()
|
|
||||||
|
|
||||||
def test_sequence_error(self):
|
|
||||||
comm = comm_tcp.Comm(host=core_device)
|
|
||||||
try:
|
|
||||||
coredev = core.Core(comm=comm)
|
|
||||||
uut = _RTIOSequenceError(
|
|
||||||
core=coredev,
|
|
||||||
o=ttl.TTLOut(core=coredev, channel=2)
|
|
||||||
)
|
|
||||||
with self.assertRaises(runtime_exceptions.RTIOSequenceError):
|
|
||||||
uut.run()
|
|
||||||
finally:
|
|
||||||
comm.close()
|
|
|
@ -0,0 +1,58 @@
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import unittest
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from artiq.language import *
|
||||||
|
from artiq.protocols.file_db import FlatFileDB
|
||||||
|
from artiq.master.worker_db import DeviceManager, ResultDB
|
||||||
|
from artiq.frontend.artiq_run import DummyScheduler
|
||||||
|
|
||||||
|
|
||||||
|
artiq_root = os.getenv("ARTIQ_ROOT")
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_from_ddb(*path, default="skip"):
|
||||||
|
if not artiq_root:
|
||||||
|
raise unittest.SkipTest("no ARTIQ_ROOT")
|
||||||
|
v = FlatFileDB(os.path.join(artiq_root, "ddb.pyon")).data
|
||||||
|
try:
|
||||||
|
for p in path:
|
||||||
|
v = v[p]
|
||||||
|
return v.read
|
||||||
|
except KeyError:
|
||||||
|
if default == "skip":
|
||||||
|
raise unittest.SkipTest("ddb path {} not found".format(path))
|
||||||
|
else:
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
|
@unittest.skipUnless(artiq_root, "no ARTIQ_ROOT")
|
||||||
|
class ExperimentCase(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.ddb = FlatFileDB(os.path.join(artiq_root, "ddb.pyon"))
|
||||||
|
self.dmgr = DeviceManager(self.ddb,
|
||||||
|
virtual_devices={"scheduler": DummyScheduler()})
|
||||||
|
self.pdb = FlatFileDB(os.path.join(artiq_root, "pdb.pyon"))
|
||||||
|
self.rdb = ResultDB()
|
||||||
|
|
||||||
|
def execute(self, cls, **kwargs):
|
||||||
|
expid = {
|
||||||
|
"file": sys.modules[cls.__module__].__file__,
|
||||||
|
"class_name": cls.__name__,
|
||||||
|
"arguments": kwargs
|
||||||
|
}
|
||||||
|
self.dmgr.virtual_devices["scheduler"].expid = expid
|
||||||
|
try:
|
||||||
|
try:
|
||||||
|
exp = cls(self.dmgr, self.pdb, self.rdb, **kwargs)
|
||||||
|
except KeyError as e:
|
||||||
|
# skip if ddb does not match requirements
|
||||||
|
raise unittest.SkipTest(*e.args)
|
||||||
|
exp.prepare()
|
||||||
|
exp.run()
|
||||||
|
exp.analyze()
|
||||||
|
return exp
|
||||||
|
finally:
|
||||||
|
self.dmgr.close_devices()
|
|
@ -1,34 +1,28 @@
|
||||||
import unittest
|
import unittest
|
||||||
import os
|
|
||||||
|
|
||||||
from artiq.devices.lda.driver import Lda, Ldasim
|
from artiq.devices.lda.driver import Lda, Ldasim
|
||||||
from artiq.language.units import dB
|
from artiq.language.units import dB
|
||||||
|
from artiq.test.hardware_testbench import get_from_ddb
|
||||||
|
|
||||||
lda_serial = os.getenv("ARTIQ_LDA_SERIAL")
|
|
||||||
|
|
||||||
|
|
||||||
class GenericLdaTest:
|
class GenericLdaTest:
|
||||||
def test_attenuation(self):
|
def test_attenuation(self):
|
||||||
step = self.cont.get_att_step_size().amount
|
step = self.cont.get_att_step_size()
|
||||||
max = self.cont.get_att_max().amount
|
attmax = self.cont.get_att_max()
|
||||||
test_vector = [i*step*dB for i in range(0, int(max*int(1/step)+1))]
|
test_vector = [i*step*dB for i in range(0, int(attmax*int(1/step)+1))]
|
||||||
for i in test_vector:
|
for i in test_vector:
|
||||||
with self.subTest(i=i):
|
with self.subTest(i=i):
|
||||||
self.cont.set_attenuation(i)
|
self.cont.set_attenuation(i)
|
||||||
self.assertEqual(i, self.cont.get_attenuation())
|
self.assertEqual(i, self.cont.get_attenuation())
|
||||||
|
|
||||||
|
|
||||||
@unittest.skipUnless(lda_serial, "no hardware")
|
|
||||||
class TestLda(GenericLdaTest, unittest.TestCase):
|
class TestLda(GenericLdaTest, unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
product = os.getenv("ARTIQ_LDA_PRODUCT")
|
lda_serial = get_from_ddb("lda", "device")
|
||||||
self.cont = Lda(serial=lda_serial, product=product)
|
lda_product = get_from_ddb("lda", "product")
|
||||||
|
self.cont = Lda(serial=lda_serial, product=lda_product)
|
||||||
|
|
||||||
|
|
||||||
class TestLdaSim(GenericLdaTest, unittest.TestCase):
|
class TestLdaSim(GenericLdaTest, unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.cont = Ldasim()
|
self.cont = Ldasim()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
unittest.main()
|
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from artiq.devices.novatech409b.driver import Novatech409B
|
||||||
|
from artiq.test.hardware_testbench import get_from_ddb
|
||||||
|
|
||||||
|
|
||||||
|
class GenericNovatech409BTest:
|
||||||
|
def test_parameters_readback(self):
|
||||||
|
# write sample data and read it back
|
||||||
|
for i in range(4):
|
||||||
|
self.driver.set_freq(i, 1e6)
|
||||||
|
self.driver.set_phase(i, 0.5)
|
||||||
|
self.driver.set_gain(i, 0.25)
|
||||||
|
result = self.driver.get_status()
|
||||||
|
|
||||||
|
# check for expected status message; ignore all but first 23 bytes
|
||||||
|
# compare with previous result extracted from Novatech
|
||||||
|
for i in range(4):
|
||||||
|
r = result[i]
|
||||||
|
self.assertEqual(r[0:23], "00989680 2000 01F5 0000")
|
||||||
|
|
||||||
|
|
||||||
|
class TestNovatech409B(GenericNovatech409BTest, unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
novatech409b_device = get_from_ddb("novatech409b", "device")
|
||||||
|
self.driver = Novatech409B(novatech409b_device)
|
||||||
|
|
||||||
|
|
||||||
|
class TestNovatech409BSim(GenericNovatech409BTest, unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.driver = Novatech409B(None)
|
|
@ -6,7 +6,7 @@ import time
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from artiq.protocols import pc_rpc
|
from artiq.protocols import pc_rpc, fire_and_forget
|
||||||
|
|
||||||
|
|
||||||
test_address = "::1"
|
test_address = "::1"
|
||||||
|
@ -73,13 +73,29 @@ class RPCCase(unittest.TestCase):
|
||||||
remote.close_rpc()
|
remote.close_rpc()
|
||||||
|
|
||||||
def _loop_asyncio_echo(self):
|
def _loop_asyncio_echo(self):
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
loop.run_until_complete(self._asyncio_echo())
|
asyncio.set_event_loop(loop)
|
||||||
|
try:
|
||||||
|
loop.run_until_complete(self._asyncio_echo())
|
||||||
|
finally:
|
||||||
|
loop.close()
|
||||||
|
|
||||||
def test_asyncio_echo(self):
|
def test_asyncio_echo(self):
|
||||||
self._run_server_and_test(self._loop_asyncio_echo)
|
self._run_server_and_test(self._loop_asyncio_echo)
|
||||||
|
|
||||||
|
|
||||||
|
class FireAndForgetCase(unittest.TestCase):
|
||||||
|
def _set_ok(self):
|
||||||
|
self.ok = True
|
||||||
|
|
||||||
|
def test_fire_and_forget(self):
|
||||||
|
self.ok = False
|
||||||
|
p = fire_and_forget.FFProxy(self)
|
||||||
|
p._set_ok()
|
||||||
|
p.ff_join()
|
||||||
|
self.assertTrue(self.ok)
|
||||||
|
|
||||||
|
|
||||||
class Echo:
|
class Echo:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.terminate_notify = asyncio.Semaphore(0)
|
self.terminate_notify = asyncio.Semaphore(0)
|
||||||
|
@ -96,7 +112,8 @@ class Echo:
|
||||||
|
|
||||||
|
|
||||||
def run_server():
|
def run_server():
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(loop)
|
||||||
try:
|
try:
|
||||||
echo = Echo()
|
echo = Echo()
|
||||||
server = pc_rpc.Server({"test": echo})
|
server = pc_rpc.Server({"test": echo})
|
||||||
|
|
|
@ -5,7 +5,7 @@ from fractions import Fraction
|
||||||
from ctypes import CFUNCTYPE, c_int, c_int32, c_int64, c_double
|
from ctypes import CFUNCTYPE, c_int, c_int32, c_int64, c_double
|
||||||
import struct
|
import struct
|
||||||
|
|
||||||
import llvmlite.binding as llvm
|
import llvmlite_or1k.binding as llvm
|
||||||
|
|
||||||
from artiq.language.core import int64
|
from artiq.language.core import int64
|
||||||
from artiq.py2llvm.infer_types import infer_function_types
|
from artiq.py2llvm.infer_types import infer_function_types
|
||||||
|
|
|
@ -0,0 +1,169 @@
|
||||||
|
import unittest
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
from time import time, sleep
|
||||||
|
|
||||||
|
from artiq import *
|
||||||
|
from artiq.master.scheduler import Scheduler
|
||||||
|
|
||||||
|
|
||||||
|
class EmptyExperiment(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BackgroundExperiment(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
self.attr_device("scheduler")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
while True:
|
||||||
|
self.scheduler.pause()
|
||||||
|
sleep(0.2)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_expid(name):
|
||||||
|
return {
|
||||||
|
"file": sys.modules[__name__].__file__,
|
||||||
|
"class_name": name,
|
||||||
|
"arguments": dict()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_basic_steps(rid, expid, priority=0, flush=False):
|
||||||
|
return [
|
||||||
|
{"action": "setitem", "key": rid, "value":
|
||||||
|
{"pipeline": "main", "status": "pending", "priority": priority,
|
||||||
|
"expid": expid, "due_date": None, "flush": flush},
|
||||||
|
"path": []},
|
||||||
|
{"action": "setitem", "key": "status", "value": "preparing",
|
||||||
|
"path": [rid]},
|
||||||
|
{"action": "setitem", "key": "status", "value": "prepare_done",
|
||||||
|
"path": [rid]},
|
||||||
|
{"action": "setitem", "key": "status", "value": "running",
|
||||||
|
"path": [rid]},
|
||||||
|
{"action": "setitem", "key": "status", "value": "run_done",
|
||||||
|
"path": [rid]},
|
||||||
|
{"action": "setitem", "key": "status", "value": "analyzing",
|
||||||
|
"path": [rid]},
|
||||||
|
{"action": "setitem", "key": "status", "value": "analyze_done",
|
||||||
|
"path": [rid]},
|
||||||
|
{"action": "delitem", "key": rid, "path": []}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
_handlers = {
|
||||||
|
"init_rt_results": lambda description: None
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class SchedulerCase(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(self.loop)
|
||||||
|
|
||||||
|
def test_steps(self):
|
||||||
|
loop = self.loop
|
||||||
|
scheduler = Scheduler(0, _handlers)
|
||||||
|
expid = _get_expid("EmptyExperiment")
|
||||||
|
|
||||||
|
expect = _get_basic_steps(1, expid)
|
||||||
|
done = asyncio.Event()
|
||||||
|
expect_idx = 0
|
||||||
|
def notify(mod):
|
||||||
|
nonlocal expect_idx
|
||||||
|
self.assertEqual(mod, expect[expect_idx])
|
||||||
|
expect_idx += 1
|
||||||
|
if expect_idx >= len(expect):
|
||||||
|
done.set()
|
||||||
|
scheduler.notifier.publish = notify
|
||||||
|
|
||||||
|
scheduler.start()
|
||||||
|
|
||||||
|
# Verify that a timed experiment far in the future does not
|
||||||
|
# get run, even if it has high priority.
|
||||||
|
late = time() + 100000
|
||||||
|
expect.insert(0,
|
||||||
|
{"action": "setitem", "key": 0, "value":
|
||||||
|
{"pipeline": "main", "status": "pending", "priority": 99,
|
||||||
|
"expid": expid, "due_date": late, "flush": False},
|
||||||
|
"path": []})
|
||||||
|
scheduler.submit("main", expid, 99, late, False)
|
||||||
|
|
||||||
|
# This one (RID 1) gets run instead.
|
||||||
|
scheduler.submit("main", expid, 0, None, False)
|
||||||
|
|
||||||
|
loop.run_until_complete(done.wait())
|
||||||
|
scheduler.notifier.publish = None
|
||||||
|
loop.run_until_complete(scheduler.stop())
|
||||||
|
|
||||||
|
def test_pause(self):
|
||||||
|
loop = self.loop
|
||||||
|
scheduler = Scheduler(0, _handlers)
|
||||||
|
expid_bg = _get_expid("BackgroundExperiment")
|
||||||
|
expid = _get_expid("EmptyExperiment")
|
||||||
|
|
||||||
|
expect = _get_basic_steps(1, expid)
|
||||||
|
background_running = asyncio.Event()
|
||||||
|
done = asyncio.Event()
|
||||||
|
expect_idx = 0
|
||||||
|
def notify(mod):
|
||||||
|
nonlocal expect_idx
|
||||||
|
if mod == {"path": [0],
|
||||||
|
"value": "running",
|
||||||
|
"key": "status",
|
||||||
|
"action": "setitem"}:
|
||||||
|
background_running.set()
|
||||||
|
if mod["path"] == [1] or (mod["path"] == [] and mod["key"] == 1):
|
||||||
|
self.assertEqual(mod, expect[expect_idx])
|
||||||
|
expect_idx += 1
|
||||||
|
if expect_idx >= len(expect):
|
||||||
|
done.set()
|
||||||
|
scheduler.notifier.publish = notify
|
||||||
|
|
||||||
|
scheduler.start()
|
||||||
|
scheduler.submit("main", expid_bg, -99, None, False)
|
||||||
|
loop.run_until_complete(background_running.wait())
|
||||||
|
scheduler.submit("main", expid, 0, None, False)
|
||||||
|
loop.run_until_complete(done.wait())
|
||||||
|
loop.run_until_complete(scheduler.stop())
|
||||||
|
|
||||||
|
def test_flush(self):
|
||||||
|
loop = self.loop
|
||||||
|
scheduler = Scheduler(0, _handlers)
|
||||||
|
expid = _get_expid("EmptyExperiment")
|
||||||
|
|
||||||
|
expect = _get_basic_steps(1, expid, 1, True)
|
||||||
|
expect.insert(1, {"key": "status",
|
||||||
|
"path": [1],
|
||||||
|
"value": "flushing",
|
||||||
|
"action": "setitem"})
|
||||||
|
first_preparing = asyncio.Event()
|
||||||
|
done = asyncio.Event()
|
||||||
|
expect_idx = 0
|
||||||
|
def notify(mod):
|
||||||
|
nonlocal expect_idx
|
||||||
|
if mod == {"path": [0],
|
||||||
|
"value": "preparing",
|
||||||
|
"key": "status",
|
||||||
|
"action": "setitem"}:
|
||||||
|
first_preparing.set()
|
||||||
|
if mod["path"] == [1] or (mod["path"] == [] and mod["key"] == 1):
|
||||||
|
self.assertEqual(mod, expect[expect_idx])
|
||||||
|
expect_idx += 1
|
||||||
|
if expect_idx >= len(expect):
|
||||||
|
done.set()
|
||||||
|
scheduler.notifier.publish = notify
|
||||||
|
|
||||||
|
scheduler.start()
|
||||||
|
scheduler.submit("main", expid, 0, None, False)
|
||||||
|
loop.run_until_complete(first_preparing.wait())
|
||||||
|
scheduler.submit("main", expid, 1, None, True)
|
||||||
|
loop.run_until_complete(done.wait())
|
||||||
|
loop.run_until_complete(scheduler.stop())
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.loop.close()
|
|
@ -0,0 +1,82 @@
|
||||||
|
import unittest
|
||||||
|
import asyncio
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from artiq.protocols import sync_struct
|
||||||
|
|
||||||
|
test_address = "::1"
|
||||||
|
test_port = 7777
|
||||||
|
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def write_test_data(test_dict):
|
||||||
|
test_values = [5, 2.1, None, True, False,
|
||||||
|
{"a": 5, 2: np.linspace(0, 10, 1)},
|
||||||
|
(4, 5), (10,), "ab\nx\"'"]
|
||||||
|
for i in range(10):
|
||||||
|
test_dict[str(i)] = i
|
||||||
|
for key, value in enumerate(test_values):
|
||||||
|
test_dict[key] = value
|
||||||
|
test_dict[1.5] = 1.5
|
||||||
|
test_dict["array"] = []
|
||||||
|
test_dict["array"].append(42)
|
||||||
|
test_dict["array"].insert(1, 1)
|
||||||
|
test_dict[100] = 0
|
||||||
|
test_dict[100] = 1
|
||||||
|
test_dict[101] = 1
|
||||||
|
test_dict.pop(101)
|
||||||
|
test_dict[102] = 1
|
||||||
|
del test_dict[102]
|
||||||
|
test_dict["finished"] = True
|
||||||
|
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def start_server(publisher_future, test_dict_future):
|
||||||
|
test_dict = sync_struct.Notifier(dict())
|
||||||
|
publisher = sync_struct.Publisher(
|
||||||
|
{"test": test_dict})
|
||||||
|
yield from publisher.start(test_address, test_port)
|
||||||
|
publisher_future.set_result(publisher)
|
||||||
|
test_dict_future.set_result(test_dict)
|
||||||
|
|
||||||
|
|
||||||
|
class SyncStructCase(unittest.TestCase):
|
||||||
|
def init_test_dict(self, init):
|
||||||
|
self.test_dict = init
|
||||||
|
return init
|
||||||
|
|
||||||
|
def notify(self, mod):
|
||||||
|
if ((mod["action"] == "init" and "finished" in mod["struct"])
|
||||||
|
or (mod["action"] == "setitem" and mod["key"] == "finished")):
|
||||||
|
self.receiving_done.set()
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(self.loop)
|
||||||
|
|
||||||
|
def test_recv(self):
|
||||||
|
loop = self.loop
|
||||||
|
self.receiving_done = asyncio.Event()
|
||||||
|
publisher = asyncio.Future()
|
||||||
|
test_dict = asyncio.Future()
|
||||||
|
asyncio.async(start_server(publisher, test_dict))
|
||||||
|
loop.run_until_complete(publisher)
|
||||||
|
loop.run_until_complete(test_dict)
|
||||||
|
|
||||||
|
self.publisher = publisher.result()
|
||||||
|
test_dict = test_dict.result()
|
||||||
|
test_vector = dict()
|
||||||
|
loop.run_until_complete(write_test_data(test_vector))
|
||||||
|
|
||||||
|
asyncio.async(write_test_data(test_dict))
|
||||||
|
self.subscriber = sync_struct.Subscriber("test", self.init_test_dict,
|
||||||
|
self.notify)
|
||||||
|
loop.run_until_complete(self.subscriber.connect(test_address,
|
||||||
|
test_port))
|
||||||
|
loop.run_until_complete(self.receiving_done.wait())
|
||||||
|
self.assertEqual(self.test_dict, test_vector)
|
||||||
|
self.loop.run_until_complete(self.subscriber.close())
|
||||||
|
self.loop.run_until_complete(self.publisher.stop())
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.loop.close()
|
|
@ -1,9 +1,9 @@
|
||||||
import unittest
|
import unittest
|
||||||
import os
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from artiq.devices.thorlabs_tcube.driver import Tdc, Tpz, TdcSim, TpzSim
|
from artiq.devices.thorlabs_tcube.driver import Tdc, Tpz, TdcSim, TpzSim
|
||||||
from artiq.language.units import V
|
from artiq.language.units import V
|
||||||
|
from artiq.test.hardware_testbench import get_from_ddb
|
||||||
|
|
||||||
|
|
||||||
class GenericTdcTest:
|
class GenericTdcTest:
|
||||||
|
@ -88,7 +88,7 @@ class GenericTpzTest:
|
||||||
|
|
||||||
def test_ouput_volts(self):
|
def test_ouput_volts(self):
|
||||||
for voltage in 5*V, 10*V, 15*V, \
|
for voltage in 5*V, 10*V, 15*V, \
|
||||||
round(self.cont.get_tpz_io_settings()[0].amount)*V:
|
round(self.cont.get_tpz_io_settings()[0])*V:
|
||||||
with self.subTest(voltage=voltage):
|
with self.subTest(voltage=voltage):
|
||||||
test_vector = voltage
|
test_vector = voltage
|
||||||
self.cont.set_output_volts(test_vector)
|
self.cont.set_output_volts(test_vector)
|
||||||
|
@ -131,12 +131,9 @@ class GenericTpzTest:
|
||||||
self.assertEqual(test_vector, self.cont.get_tpz_io_settings())
|
self.assertEqual(test_vector, self.cont.get_tpz_io_settings())
|
||||||
|
|
||||||
|
|
||||||
tdc_serial = os.getenv("ARTIQ_TDC_SERIAL")
|
|
||||||
|
|
||||||
|
|
||||||
@unittest.skipUnless(tdc_serial, "no hardware")
|
|
||||||
class TestTdc(unittest.TestCase, GenericTdcTest):
|
class TestTdc(unittest.TestCase, GenericTdcTest):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
tdc_serial = get_from_ddb("tdc", "device")
|
||||||
self.cont = Tdc(serial_dev=tdc_serial)
|
self.cont = Tdc(serial_dev=tdc_serial)
|
||||||
|
|
||||||
|
|
||||||
|
@ -145,12 +142,9 @@ class TestTdcSim(unittest.TestCase, GenericTdcTest):
|
||||||
self.cont = TdcSim()
|
self.cont = TdcSim()
|
||||||
|
|
||||||
|
|
||||||
tpz_serial = os.getenv("ARTIQ_TPZ_SERIAL")
|
|
||||||
|
|
||||||
|
|
||||||
@unittest.skipUnless(tpz_serial, "no hardware")
|
|
||||||
class TestTpz(unittest.TestCase, GenericTpzTest):
|
class TestTpz(unittest.TestCase, GenericTpzTest):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
tpz_serial = get_from_ddb("tpz", "device")
|
||||||
self.cont = Tpz(serial_dev=tpz_serial)
|
self.cont = Tpz(serial_dev=tpz_serial)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,28 +6,20 @@ from artiq.coredevice import comm_dummy, core
|
||||||
from artiq.transforms.unparse import unparse
|
from artiq.transforms.unparse import unparse
|
||||||
|
|
||||||
|
|
||||||
# Original code before inline:
|
|
||||||
#
|
|
||||||
# n = time_to_cycles(1.2345*ns)
|
|
||||||
# ftw = self.dds.frequency_to_ftw(345*MHz)
|
|
||||||
# f = self.dds.ftw_to_frequency(ftw)
|
|
||||||
# phi = 1000*cycles_to_time(n)*f
|
|
||||||
# do_someting(int(phi))
|
|
||||||
#
|
|
||||||
optimize_in = """
|
optimize_in = """
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
dds_sysclk = Quantity(Fraction(1000000000, 1), 'Hz')
|
dds_sysclk = Fraction(1000000000, 1)
|
||||||
n = time_to_cycles((1.2345 * Quantity(Fraction(1, 1000000000), 's')))
|
n = seconds_to_mu((1.2345 * Fraction(1, 1000000000)))
|
||||||
with sequential:
|
with sequential:
|
||||||
frequency = (345 * Quantity(Fraction(1000000, 1), 'Hz'))
|
frequency = 345 * Fraction(1000000, 1)
|
||||||
frequency_to_ftw_return = int((((2 ** 32) * frequency) / dds_sysclk))
|
frequency_to_ftw_return = int((((2 ** 32) * frequency) / dds_sysclk))
|
||||||
ftw = frequency_to_ftw_return
|
ftw = frequency_to_ftw_return
|
||||||
with sequential:
|
with sequential:
|
||||||
ftw2 = ftw
|
ftw2 = ftw
|
||||||
ftw_to_frequency_return = ((ftw2 * dds_sysclk) / (2 ** 32))
|
ftw_to_frequency_return = ((ftw2 * dds_sysclk) / (2 ** 32))
|
||||||
f = ftw_to_frequency_return
|
f = ftw_to_frequency_return
|
||||||
phi = ((1000 * cycles_to_time(n)) * f)
|
phi = ((1000 * mu_to_seconds(n)) * f)
|
||||||
do_something(int(phi))
|
do_something(int(phi))
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -44,7 +36,9 @@ def run():
|
||||||
|
|
||||||
class OptimizeCase(unittest.TestCase):
|
class OptimizeCase(unittest.TestCase):
|
||||||
def test_optimize(self):
|
def test_optimize(self):
|
||||||
coredev = core.Core(comm=comm_dummy.Comm(), ref_period=1*ns)
|
dmgr = dict()
|
||||||
|
dmgr["comm"] = comm_dummy.Comm(dmgr)
|
||||||
|
coredev = core.Core(dmgr, ref_period=1*ns)
|
||||||
func_def = ast.parse(optimize_in).body[0]
|
func_def = ast.parse(optimize_in).body[0]
|
||||||
coredev.transform_stack(func_def, dict(), dict())
|
coredev.transform_stack(func_def, dict(), dict())
|
||||||
self.assertEqual(unparse(func_def), optimize_out)
|
self.assertEqual(unparse(func_def), optimize_out)
|
||||||
|
|
|
@ -7,20 +7,26 @@ from artiq import *
|
||||||
from artiq.master.worker import *
|
from artiq.master.worker import *
|
||||||
|
|
||||||
|
|
||||||
class WatchdogNoTimeout(Experiment, AutoDB):
|
class WatchdogNoTimeout(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
pass
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
for i in range(10):
|
for i in range(10):
|
||||||
with watchdog(0.5*s):
|
with watchdog(0.5*s):
|
||||||
sleep(0.1)
|
sleep(0.1)
|
||||||
|
|
||||||
|
|
||||||
class WatchdogTimeout(Experiment, AutoDB):
|
class WatchdogTimeout(EnvExperiment):
|
||||||
|
def build(self):
|
||||||
|
pass
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
with watchdog(0.1*s):
|
with watchdog(0.1*s):
|
||||||
sleep(100.0)
|
sleep(100.0)
|
||||||
|
|
||||||
|
|
||||||
class WatchdogTimeoutInBuild(Experiment, AutoDB):
|
class WatchdogTimeoutInBuild(EnvExperiment):
|
||||||
def build(self):
|
def build(self):
|
||||||
with watchdog(0.1*s):
|
with watchdog(0.1*s):
|
||||||
sleep(100.0)
|
sleep(100.0)
|
||||||
|
@ -31,32 +37,33 @@ class WatchdogTimeoutInBuild(Experiment, AutoDB):
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def _call_worker(worker, expid):
|
def _call_worker(worker, expid):
|
||||||
yield from worker.prepare(0, "main", expid, 0)
|
|
||||||
try:
|
try:
|
||||||
|
yield from worker.build(0, "main", expid, 0)
|
||||||
|
yield from worker.prepare()
|
||||||
yield from worker.run()
|
yield from worker.run()
|
||||||
yield from worker.analyze()
|
yield from worker.analyze()
|
||||||
finally:
|
finally:
|
||||||
yield from worker.close()
|
yield from worker.close()
|
||||||
|
|
||||||
|
|
||||||
def _run_experiment(experiment):
|
def _run_experiment(class_name):
|
||||||
expid = {
|
expid = {
|
||||||
"file": sys.modules[__name__].__file__,
|
"file": sys.modules[__name__].__file__,
|
||||||
"experiment": experiment,
|
"class_name": class_name,
|
||||||
"arguments": dict()
|
"arguments": dict()
|
||||||
}
|
}
|
||||||
handlers = {
|
|
||||||
"init_rt_results": lambda description: None
|
|
||||||
}
|
|
||||||
|
|
||||||
worker = Worker(handlers)
|
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
|
worker = Worker()
|
||||||
loop.run_until_complete(_call_worker(worker, expid))
|
loop.run_until_complete(_call_worker(worker, expid))
|
||||||
|
|
||||||
|
|
||||||
class WatchdogCase(unittest.TestCase):
|
class WatchdogCase(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.loop = asyncio.new_event_loop()
|
||||||
|
asyncio.set_event_loop(self.loop)
|
||||||
|
|
||||||
def test_watchdog_no_timeout(self):
|
def test_watchdog_no_timeout(self):
|
||||||
_run_experiment("WatchdogNoTimeout")
|
_run_experiment("WatchdogNoTimeout")
|
||||||
|
|
||||||
def test_watchdog_timeout(self):
|
def test_watchdog_timeout(self):
|
||||||
with self.assertRaises(WorkerWatchdogTimeout):
|
with self.assertRaises(WorkerWatchdogTimeout):
|
||||||
|
@ -65,3 +72,6 @@ class WatchdogCase(unittest.TestCase):
|
||||||
def test_watchdog_timeout_in_build(self):
|
def test_watchdog_timeout_in_build(self):
|
||||||
with self.assertRaises(WorkerWatchdogTimeout):
|
with self.assertRaises(WorkerWatchdogTimeout):
|
||||||
_run_experiment("WatchdogTimeoutInBuild")
|
_run_experiment("WatchdogTimeoutInBuild")
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.loop.close()
|
||||||
|
|
|
@ -7,7 +7,7 @@ import asyncio
|
||||||
import time
|
import time
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
from artiq.language.experiment import is_experiment
|
from artiq.language.environment import is_experiment
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,15 +18,6 @@ def parse_arguments(arguments):
|
||||||
d[name] = pyon.decode(value)
|
d[name] = pyon.decode(value)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def format_arguments(arguments):
|
|
||||||
fmtargs = []
|
|
||||||
for k, v in sorted(arguments.items(), key=itemgetter(0)):
|
|
||||||
fmtargs.append(k + "=" + repr(v))
|
|
||||||
if fmtargs:
|
|
||||||
return ", ".join(fmtargs)
|
|
||||||
else:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
|
|
||||||
def file_import(filename):
|
def file_import(filename):
|
||||||
linecache.checkcache(filename)
|
linecache.checkcache(filename)
|
||||||
|
@ -90,12 +81,21 @@ def asyncio_process_wait_timeout(process, timeout):
|
||||||
# causes a futures.InvalidStateError inside asyncio if and when the
|
# causes a futures.InvalidStateError inside asyncio if and when the
|
||||||
# process terminates after the timeout.
|
# process terminates after the timeout.
|
||||||
# Work around this problem.
|
# Work around this problem.
|
||||||
end_time = time.monotonic() + timeout
|
@asyncio.coroutine
|
||||||
|
def process_wait_returncode_timeout():
|
||||||
|
while True:
|
||||||
|
if process.returncode is not None:
|
||||||
|
break
|
||||||
|
yield from asyncio.sleep(0.1)
|
||||||
|
yield from asyncio.wait_for(process_wait_returncode_timeout(),
|
||||||
|
timeout=timeout)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def asyncio_process_wait(process):
|
||||||
r = True
|
r = True
|
||||||
while r:
|
while r:
|
||||||
r = yield from asyncio.wait_for(
|
f, p = yield from asyncio.wait([process.stdout.read(1024)])
|
||||||
process.stdout.read(1024),
|
r = f.pop().result()
|
||||||
timeout=end_time - time.monotonic())
|
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
|
@ -119,3 +119,42 @@ def asyncio_queue_peek(q):
|
||||||
return q._queue[0]
|
return q._queue[0]
|
||||||
else:
|
else:
|
||||||
raise asyncio.QueueEmpty
|
raise asyncio.QueueEmpty
|
||||||
|
|
||||||
|
|
||||||
|
class TaskObject:
|
||||||
|
def start(self):
|
||||||
|
self.task = asyncio.async(self._do())
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def stop(self):
|
||||||
|
self.task.cancel()
|
||||||
|
yield from asyncio.wait([self.task])
|
||||||
|
del self.task
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def _do(self):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class WaitSet:
|
||||||
|
def __init__(self):
|
||||||
|
self._s = set()
|
||||||
|
self._ev = asyncio.Event()
|
||||||
|
|
||||||
|
def _update_ev(self):
|
||||||
|
if self._s:
|
||||||
|
self._ev.clear()
|
||||||
|
else:
|
||||||
|
self._ev.set()
|
||||||
|
|
||||||
|
def add(self, e):
|
||||||
|
self._s.add(e)
|
||||||
|
self._update_ev()
|
||||||
|
|
||||||
|
def discard(self, e):
|
||||||
|
self._s.discard(e)
|
||||||
|
self._update_ev()
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def wait_empty(self):
|
||||||
|
yield from self._ev.wait()
|
||||||
|
|
|
@ -28,13 +28,14 @@ def _get_duration(stmt):
|
||||||
return -1
|
return -1
|
||||||
elif isinstance(stmt, ast.Call):
|
elif isinstance(stmt, ast.Call):
|
||||||
name = stmt.func.id
|
name = stmt.func.id
|
||||||
if name == "delay":
|
assert(name != "delay")
|
||||||
|
if name == "delay_mu":
|
||||||
try:
|
try:
|
||||||
da = eval_constant(stmt.args[0])
|
da = eval_constant(stmt.args[0])
|
||||||
except NotConstant:
|
except NotConstant:
|
||||||
da = -1
|
da = -1
|
||||||
return da
|
return da
|
||||||
elif name == "at":
|
elif name == "at_mu":
|
||||||
return -1
|
return -1
|
||||||
else:
|
else:
|
||||||
return 0
|
return 0
|
||||||
|
@ -69,7 +70,7 @@ def _interleave_timelines(timelines):
|
||||||
ref_stmt = stmt.stmt
|
ref_stmt = stmt.stmt
|
||||||
delay_stmt = ast.copy_location(
|
delay_stmt = ast.copy_location(
|
||||||
ast.Expr(ast.Call(
|
ast.Expr(ast.Call(
|
||||||
func=ast.Name("delay", ast.Load()),
|
func=ast.Name("delay_mu", ast.Load()),
|
||||||
args=[value_to_ast(dt)],
|
args=[value_to_ast(dt)],
|
||||||
keywords=[], starargs=[], kwargs=[])),
|
keywords=[], starargs=[], kwargs=[])),
|
||||||
ref_stmt)
|
ref_stmt)
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
"""
|
"""
|
||||||
This transform implements time management functions (delay/now/at)
|
This transform implements time management functions (delay_mu/now_mu/at_mu)
|
||||||
using an accumulator 'now' and simple replacement rules:
|
using an accumulator 'now' and simple replacement rules:
|
||||||
|
|
||||||
delay(t) -> now += t
|
delay_mu(t) -> now += t
|
||||||
now() -> now
|
now_mu() -> now
|
||||||
at(t) -> now = t
|
at_mu(t) -> now = t
|
||||||
|
|
||||||
Time parameters must be quantized to integers before running this transform.
|
The function delay(), that uses seconds, must be lowered to delay_mu() before
|
||||||
|
invoking this transform.
|
||||||
The accumulator is initialized to an int64 value at the beginning of the
|
The accumulator is initialized to an int64 value at the beginning of the
|
||||||
output function.
|
output function.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import ast
|
import ast
|
||||||
|
@ -17,7 +17,7 @@ import ast
|
||||||
|
|
||||||
class _TimeLowerer(ast.NodeTransformer):
|
class _TimeLowerer(ast.NodeTransformer):
|
||||||
def visit_Call(self, node):
|
def visit_Call(self, node):
|
||||||
if node.func.id == "now":
|
if node.func.id == "now_mu":
|
||||||
return ast.copy_location(ast.Name("now", ast.Load()), node)
|
return ast.copy_location(ast.Name("now", ast.Load()), node)
|
||||||
else:
|
else:
|
||||||
self.generic_visit(node)
|
self.generic_visit(node)
|
||||||
|
@ -27,13 +27,13 @@ class _TimeLowerer(ast.NodeTransformer):
|
||||||
r = node
|
r = node
|
||||||
if isinstance(node.value, ast.Call):
|
if isinstance(node.value, ast.Call):
|
||||||
funcname = node.value.func.id
|
funcname = node.value.func.id
|
||||||
if funcname == "delay":
|
if funcname == "delay_mu":
|
||||||
r = ast.copy_location(
|
r = ast.copy_location(
|
||||||
ast.AugAssign(target=ast.Name("now", ast.Store()),
|
ast.AugAssign(target=ast.Name("now", ast.Store()),
|
||||||
op=ast.Add(),
|
op=ast.Add(),
|
||||||
value=node.value.args[0]),
|
value=node.value.args[0]),
|
||||||
node)
|
node)
|
||||||
elif funcname == "at":
|
elif funcname == "at_mu":
|
||||||
r = ast.copy_location(
|
r = ast.copy_location(
|
||||||
ast.Assign(targets=[ast.Name("now", ast.Store())],
|
ast.Assign(targets=[ast.Name("now", ast.Store())],
|
||||||
value=node.value.args[0]),
|
value=node.value.args[0]),
|
||||||
|
|
|
@ -1,190 +0,0 @@
|
||||||
import ast
|
|
||||||
from collections import defaultdict
|
|
||||||
from copy import copy
|
|
||||||
|
|
||||||
from artiq.language import units
|
|
||||||
from artiq.transforms.tools import embeddable_func_names
|
|
||||||
|
|
||||||
|
|
||||||
def _add_units(f, unit_list):
|
|
||||||
def wrapper(*args):
|
|
||||||
new_args = []
|
|
||||||
for arg, unit in zip(args, unit_list):
|
|
||||||
if unit is None:
|
|
||||||
new_args.append(arg)
|
|
||||||
else:
|
|
||||||
if isinstance(arg, list):
|
|
||||||
new_args.append([units.Quantity(x, unit) for x in arg])
|
|
||||||
else:
|
|
||||||
new_args.append(units.Quantity(arg, unit))
|
|
||||||
return f(*new_args)
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
class _UnitsLowerer(ast.NodeTransformer):
|
|
||||||
def __init__(self, rpc_map):
|
|
||||||
self.rpc_map = rpc_map
|
|
||||||
# (original rpc number, (unit list)) -> new rpc number
|
|
||||||
self.rpc_remap = defaultdict(lambda: len(self.rpc_remap))
|
|
||||||
self.variable_units = dict()
|
|
||||||
|
|
||||||
def visit_Name(self, node):
|
|
||||||
try:
|
|
||||||
unit = self.variable_units[node.id]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
if unit is not None:
|
|
||||||
node.unit = unit
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_BoolOp(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
us = [getattr(value, "unit", None) for value in node.values]
|
|
||||||
if not all(u == us[0] for u in us[1:]):
|
|
||||||
raise units.DimensionError
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_Compare(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
u0 = getattr(node.left, "unit", None)
|
|
||||||
us = [getattr(comparator, "unit", None)
|
|
||||||
for comparator in node.comparators]
|
|
||||||
if not all(u == u0 for u in us):
|
|
||||||
raise units.DimensionError
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_UnaryOp(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
if hasattr(node.operand, "unit"):
|
|
||||||
node.unit = node.operand.unit
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_BinOp(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
op = type(node.op)
|
|
||||||
left_unit = getattr(node.left, "unit", None)
|
|
||||||
right_unit = getattr(node.right, "unit", None)
|
|
||||||
if op in (ast.Add, ast.Sub, ast.Mod):
|
|
||||||
unit = units.addsub_dimension(left_unit, right_unit)
|
|
||||||
elif op == ast.Mult:
|
|
||||||
unit = units.mul_dimension(left_unit, right_unit)
|
|
||||||
elif op in (ast.Div, ast.FloorDiv):
|
|
||||||
unit = units.div_dimension(left_unit, right_unit)
|
|
||||||
else:
|
|
||||||
if left_unit is not None or right_unit is not None:
|
|
||||||
raise units.DimensionError
|
|
||||||
unit = None
|
|
||||||
if unit is not None:
|
|
||||||
node.unit = unit
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_Attribute(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
if node.attr == "amount" and hasattr(node.value, "unit"):
|
|
||||||
del node.value.unit
|
|
||||||
return node.value
|
|
||||||
else:
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_List(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
if node.elts:
|
|
||||||
us = [getattr(elt, "unit", None) for elt in node.elts]
|
|
||||||
if not all(u == us[0] for u in us[1:]):
|
|
||||||
raise units.DimensionError
|
|
||||||
node.unit = us[0]
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_ListComp(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
if hasattr(node.elt, "unit"):
|
|
||||||
node.unit = node.elt.unit
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_Call(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
if node.func.id == "Quantity":
|
|
||||||
amount, unit = node.args
|
|
||||||
amount.unit = unit.s
|
|
||||||
return amount
|
|
||||||
elif node.func.id in ("now", "cycles_to_time"):
|
|
||||||
node.unit = "s"
|
|
||||||
elif node.func.id == "syscall":
|
|
||||||
# only RPCs can have units
|
|
||||||
if node.args[0].s == "rpc":
|
|
||||||
unit_list = tuple(getattr(arg, "unit", None)
|
|
||||||
for arg in node.args[2:])
|
|
||||||
rpc_n = node.args[1].n
|
|
||||||
node.args[1].n = self.rpc_remap[(rpc_n, (unit_list))]
|
|
||||||
else:
|
|
||||||
if any(hasattr(arg, "unit") for arg in node.args):
|
|
||||||
raise units.DimensionError
|
|
||||||
elif node.func.id in ("delay", "at", "time_to_cycles", "watchdog"):
|
|
||||||
if getattr(node.args[0], "unit", None) != "s":
|
|
||||||
raise units.DimensionError
|
|
||||||
elif node.func.id == "check_unit":
|
|
||||||
self.generic_visit(node)
|
|
||||||
elif node.func.id in embeddable_func_names:
|
|
||||||
# must be last (some embeddable funcs may have units)
|
|
||||||
if any(hasattr(arg, "unit") for arg in node.args):
|
|
||||||
raise units.DimensionError
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_Expr(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
if (isinstance(node.value, ast.Call)
|
|
||||||
and node.value.func.id == "check_unit"):
|
|
||||||
call = node.value
|
|
||||||
if (isinstance(call.args[1], ast.NameConstant)
|
|
||||||
and call.args[1].value is None):
|
|
||||||
if hasattr(call.value.args[0], "unit"):
|
|
||||||
raise units.DimensionError
|
|
||||||
elif isinstance(call.args[1], ast.Str):
|
|
||||||
if getattr(call.args[0], "unit", None) != call.args[1].s:
|
|
||||||
raise units.DimensionError
|
|
||||||
else:
|
|
||||||
raise NotImplementedError
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
return node
|
|
||||||
|
|
||||||
def _update_target(self, target, unit):
|
|
||||||
if isinstance(target, ast.Name):
|
|
||||||
if target.id in self.variable_units:
|
|
||||||
if self.variable_units[target.id] != unit:
|
|
||||||
raise TypeError(
|
|
||||||
"Inconsistent units for variable '{}': '{}' and '{}'"
|
|
||||||
.format(target.id,
|
|
||||||
self.variable_units[target.id],
|
|
||||||
unit))
|
|
||||||
else:
|
|
||||||
self.variable_units[target.id] = unit
|
|
||||||
|
|
||||||
def visit_Assign(self, node):
|
|
||||||
node.value = self.visit(node.value)
|
|
||||||
unit = getattr(node.value, "unit", None)
|
|
||||||
for target in node.targets:
|
|
||||||
self._update_target(target, unit)
|
|
||||||
return node
|
|
||||||
|
|
||||||
def visit_AugAssign(self, node):
|
|
||||||
value = self.visit_BinOp(ast.BinOp(
|
|
||||||
op=node.op, left=node.target, right=node.value))
|
|
||||||
unit = getattr(value, "unit", None)
|
|
||||||
self._update_target(node.target, unit)
|
|
||||||
return node
|
|
||||||
|
|
||||||
# Only dimensionless iterators are supported
|
|
||||||
def visit_For(self, node):
|
|
||||||
self.generic_visit(node)
|
|
||||||
self._update_target(node.target, None)
|
|
||||||
return node
|
|
||||||
|
|
||||||
|
|
||||||
def lower_units(func_def, rpc_map):
|
|
||||||
ul = _UnitsLowerer(rpc_map)
|
|
||||||
ul.visit(func_def)
|
|
||||||
original_map = copy(rpc_map)
|
|
||||||
for (original_rpcn, unit_list), new_rpcn in ul.rpc_remap.items():
|
|
||||||
rpc_map[new_rpcn] = _add_units(original_map[original_rpcn], unit_list)
|
|
|
@ -1,12 +1,12 @@
|
||||||
"""
|
"""
|
||||||
This transform turns calls to delay/now/at that use non-integer time
|
This transform turns calls to delay() that use non-integer time
|
||||||
expressed in seconds into calls that use int64 time expressed in multiples of
|
expressed in seconds into calls to delay_mu() that use int64 time
|
||||||
ref_period.
|
expressed in multiples of ref_period.
|
||||||
|
|
||||||
It does so by inserting multiplication/division/rounding operations around
|
It does so by inserting multiplication/division/rounding operations around
|
||||||
those calls.
|
those calls.
|
||||||
|
|
||||||
The time_to_cycles and cycles_to_time core language functions are also
|
The seconds_to_mu and mu_to_seconds core language functions are also
|
||||||
implemented here, as well as watchdog to syscall conversion.
|
implemented here, as well as watchdog to syscall conversion.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -15,14 +15,7 @@ import ast
|
||||||
from artiq.transforms.tools import value_to_ast
|
from artiq.transforms.tools import value_to_ast
|
||||||
|
|
||||||
|
|
||||||
def _call_now(node):
|
def _seconds_to_mu(ref_period, node):
|
||||||
return ast.copy_location(
|
|
||||||
ast.Call(func=ast.Name("now", ast.Load()),
|
|
||||||
args=[], keywords=[], starargs=[], kwargs=[]),
|
|
||||||
node)
|
|
||||||
|
|
||||||
|
|
||||||
def _time_to_cycles(ref_period, node):
|
|
||||||
divided = ast.copy_location(
|
divided = ast.copy_location(
|
||||||
ast.BinOp(left=node,
|
ast.BinOp(left=node,
|
||||||
op=ast.Div(),
|
op=ast.Div(),
|
||||||
|
@ -35,7 +28,7 @@ def _time_to_cycles(ref_period, node):
|
||||||
divided)
|
divided)
|
||||||
|
|
||||||
|
|
||||||
def _cycles_to_time(ref_period, node):
|
def _mu_to_seconds(ref_period, node):
|
||||||
return ast.copy_location(
|
return ast.copy_location(
|
||||||
ast.BinOp(left=node,
|
ast.BinOp(left=node,
|
||||||
op=ast.Mult(),
|
op=ast.Mult(),
|
||||||
|
@ -50,29 +43,22 @@ class _TimeQuantizer(ast.NodeTransformer):
|
||||||
|
|
||||||
def visit_Call(self, node):
|
def visit_Call(self, node):
|
||||||
funcname = node.func.id
|
funcname = node.func.id
|
||||||
if funcname == "now":
|
if funcname == "delay":
|
||||||
return _cycles_to_time(self.ref_period, _call_now(node))
|
node.func.id = "delay_mu"
|
||||||
elif funcname == "delay" or funcname == "at":
|
|
||||||
if (isinstance(node.args[0], ast.Call)
|
if (isinstance(node.args[0], ast.Call)
|
||||||
and node.args[0].func.id == "cycles_to_time"):
|
and node.args[0].func.id == "mu_to_seconds"):
|
||||||
# optimize:
|
# optimize:
|
||||||
# delay/at(cycles_to_time(x)) -> delay/at(x)
|
# delay(mu_to_seconds(x)) -> delay_mu(x)
|
||||||
node.args[0] = self.visit(node.args[0].args[0])
|
node.args[0] = self.visit(node.args[0].args[0])
|
||||||
else:
|
else:
|
||||||
node.args[0] = _time_to_cycles(self.ref_period,
|
node.args[0] = _seconds_to_mu(self.ref_period,
|
||||||
self.visit(node.args[0]))
|
self.visit(node.args[0]))
|
||||||
return node
|
return node
|
||||||
elif funcname == "time_to_cycles":
|
elif funcname == "seconds_to_mu":
|
||||||
if (isinstance(node.args[0], ast.Call)
|
return _seconds_to_mu(self.ref_period,
|
||||||
and node.args[0].func.id == "now"):
|
|
||||||
# optimize:
|
|
||||||
# time_to_cycles(now()) -> now()
|
|
||||||
return _call_now(node)
|
|
||||||
else:
|
|
||||||
return _time_to_cycles(self.ref_period,
|
|
||||||
self.visit(node.args[0]))
|
self.visit(node.args[0]))
|
||||||
elif funcname == "cycles_to_time":
|
elif funcname == "mu_to_seconds":
|
||||||
return _cycles_to_time(self.ref_period,
|
return _mu_to_seconds(self.ref_period,
|
||||||
self.visit(node.args[0]))
|
self.visit(node.args[0]))
|
||||||
else:
|
else:
|
||||||
self.generic_visit(node)
|
self.generic_visit(node)
|
||||||
|
@ -123,6 +109,5 @@ class _TimeQuantizer(ast.NodeTransformer):
|
||||||
return node
|
return node
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def quantize_time(func_def, ref_period):
|
def quantize_time(func_def, ref_period):
|
||||||
_TimeQuantizer(ref_period).visit(func_def)
|
_TimeQuantizer(ref_period).visit(func_def)
|
||||||
|
|
|
@ -6,12 +6,13 @@ from artiq.language import units
|
||||||
|
|
||||||
|
|
||||||
embeddable_funcs = (
|
embeddable_funcs = (
|
||||||
core_language.delay, core_language.at, core_language.now,
|
core_language.delay_mu, core_language.at_mu, core_language.now_mu,
|
||||||
core_language.time_to_cycles, core_language.cycles_to_time,
|
core_language.delay,
|
||||||
|
core_language.seconds_to_mu, core_language.mu_to_seconds,
|
||||||
core_language.syscall, core_language.watchdog,
|
core_language.syscall, core_language.watchdog,
|
||||||
range, bool, int, float, round, len,
|
range, bool, int, float, round, len,
|
||||||
core_language.int64, core_language.round64,
|
core_language.int64, core_language.round64,
|
||||||
Fraction, units.Quantity, units.check_unit, core_language.EncodedException
|
Fraction, core_language.EncodedException
|
||||||
)
|
)
|
||||||
embeddable_func_names = {func.__name__ for func in embeddable_funcs}
|
embeddable_func_names = {func.__name__ for func in embeddable_funcs}
|
||||||
|
|
||||||
|
@ -61,11 +62,6 @@ def value_to_ast(value):
|
||||||
for kg in core_language.kernel_globals:
|
for kg in core_language.kernel_globals:
|
||||||
if value is getattr(core_language, kg):
|
if value is getattr(core_language, kg):
|
||||||
return ast.Name(kg, ast.Load())
|
return ast.Name(kg, ast.Load())
|
||||||
if isinstance(value, units.Quantity):
|
|
||||||
return ast.Call(
|
|
||||||
func=ast.Name("Quantity", ast.Load()),
|
|
||||||
args=[value_to_ast(value.amount), ast.Str(value.unit)],
|
|
||||||
keywords=[], starargs=None, kwargs=None)
|
|
||||||
raise NotASTRepresentable(str(value))
|
raise NotASTRepresentable(str(value))
|
||||||
|
|
||||||
|
|
||||||
|
@ -88,14 +84,6 @@ def eval_constant(node):
|
||||||
numerator = eval_constant(node.args[0])
|
numerator = eval_constant(node.args[0])
|
||||||
denominator = eval_constant(node.args[1])
|
denominator = eval_constant(node.args[1])
|
||||||
return Fraction(numerator, denominator)
|
return Fraction(numerator, denominator)
|
||||||
elif funcname == "Quantity":
|
|
||||||
amount, unit = node.args
|
|
||||||
amount = eval_constant(amount)
|
|
||||||
try:
|
|
||||||
unit = getattr(units, unit.id)
|
|
||||||
except:
|
|
||||||
raise NotConstant
|
|
||||||
return units.Quantity(amount, unit)
|
|
||||||
else:
|
else:
|
||||||
raise NotConstant
|
raise NotConstant
|
||||||
else:
|
else:
|
||||||
|
@ -105,8 +93,7 @@ def eval_constant(node):
|
||||||
_replaceable_funcs = {
|
_replaceable_funcs = {
|
||||||
"bool", "int", "float", "round",
|
"bool", "int", "float", "round",
|
||||||
"int64", "round64", "Fraction",
|
"int64", "round64", "Fraction",
|
||||||
"time_to_cycles", "cycles_to_time",
|
"seconds_to_mu", "mu_to_seconds"
|
||||||
"Quantity"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue