diff --git a/.gitignore b/.gitignore
index 07f493566..22eff109e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -13,6 +13,9 @@ doc/manual/_build
/*.egg-info
/.coverage
artiq/test/results
+artiq/test/h5types.h5
examples/master/results
+examples/master/dataset_db.pyon
+examples/sim/dataset_db.pyon
Output/
/lit-test/libartiq_support/libartiq_support.so
diff --git a/.travis.yml b/.travis.yml
index 1bbda0df9..1acde04de 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -9,29 +9,40 @@ env:
global:
- secure: "DUk/Ihg8KbbzEgPF0qrHqlxU8e8eET9i/BtzNvFddIGX4HP/P2qz0nk3cVkmjuWhqJXSbC22RdKME9qqPzw6fJwJ6dpJ3OR6dDmSd7rewavq+niwxu52PVa+yK8mL4yf1terM7QQ5tIRf+yUL9qGKrZ2xyvEuRit6d4cFep43Ws="
matrix:
- - BUILD_SOC=0
- - BUILD_SOC=1
-before_install:
- - mkdir -p $HOME/.mlabs
- - if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=0; fi
- - if [ $BUILD_SOC -ne 0 ]; then ./.travis/get-xilinx.sh; fi
- - . ./.travis/get-toolchain.sh
- - . ./.travis/get-anaconda.sh
- - echo "BUILD_SOC=$BUILD_SOC" >> $HOME/.mlabs/build_settings.sh
- - source $HOME/miniconda/bin/activate py35
- - conda install -q pip coverage anaconda-client migen cython
- - pip install coveralls
+ - BUILD_SOC=none
+ - BUILD_SOC=pipistrello-nist_qc1
+ - BUILD_SOC=kc705-nist_qc1
+ - BUILD_SOC=kc705-nist_qc2
install:
- - conda build --python 3.5 conda/artiq
- - conda install -q artiq --use-local
+ - mkdir -p $HOME/.m-labs
+ - if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=none; fi
+ - if [ $BUILD_SOC != none ]; then ./.travis/get-xilinx.sh; fi
+ - if [ $BUILD_SOC != none ]; then ./.travis/get-toolchain.sh; fi
+ - if [ $BUILD_SOC != none ]; then ./.travis/get-misoc.sh; fi
+ - . ./.travis/get-anaconda.sh
+ - source $HOME/miniconda/bin/activate py35
+ - conda install -q pip coverage anaconda-client cython
+ - pip install coveralls
+ # workaround for https://github.com/conda/conda-build/issues/466
+ - mkdir -p /home/travis/miniconda/conda-bld/linux-64
+ - conda index /home/travis/miniconda/conda-bld/linux-64
script:
- - coverage run --source=artiq setup.py test
- - make -C doc/manual html
+ - conda build --python 3.5 conda/artiq
+ - conda install -q --use-local artiq
+ - |
+ if [ $BUILD_SOC == none ]; then
+ PACKAGES="$(conda build --output --python 3.5 conda/artiq) $PACKAGES"
+ coverage run --source=artiq setup.py test
+ make -C doc/manual html
+ else
+ PACKAGES="$(conda build --output --python 3.5 conda/artiq-$BUILD_SOC) $PACKAGES"
+ conda build --python 3.5 conda/artiq-$BUILD_SOC
+ fi
after_success:
- |
- if [ "$TRAVIS_BRANCH" == "master" -a $BUILD_SOC -eq 1 ]; then
+ if [ "$TRAVIS_BRANCH" == "master" -a "$PACKAGES" != "" ]; then
anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password
- anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2
+ anaconda -q upload --user $binstar_login --channel dev $PACKAGES
anaconda -q logout
fi
- coveralls
diff --git a/.travis/get-anaconda.sh b/.travis/get-anaconda.sh
index 790a723ad..69f107dce 100755
--- a/.travis/get-anaconda.sh
+++ b/.travis/get-anaconda.sh
@@ -10,4 +10,5 @@ conda update -q conda
conda info -a
conda install conda-build jinja2
conda create -q -n py35 python=$TRAVIS_PYTHON_VERSION
+conda config --add channels https://conda.anaconda.org/m-labs/channel/main
conda config --add channels https://conda.anaconda.org/m-labs/channel/dev
diff --git a/.travis/get-misoc.sh b/.travis/get-misoc.sh
new file mode 100755
index 000000000..355e8ffa5
--- /dev/null
+++ b/.travis/get-misoc.sh
@@ -0,0 +1,4 @@
+#!/bin/sh
+
+git clone --recursive https://github.com/m-labs/misoc $HOME/misoc
+echo "export MSCDIR=$HOME/misoc" >> $HOME/.m-labs/build_settings.sh
diff --git a/.travis/get-toolchain.sh b/.travis/get-toolchain.sh
index 73c268d0a..6dd5f94ff 100755
--- a/.travis/get-toolchain.sh
+++ b/.travis/get-toolchain.sh
@@ -1,7 +1,6 @@
#!/bin/sh
packages="http://us.archive.ubuntu.com/ubuntu/pool/universe/i/iverilog/iverilog_0.9.7-1_amd64.deb"
-archives="http://fehu.whitequark.org/files/llvm-or1k.tbz2"
mkdir -p packages
@@ -12,18 +11,5 @@ do
dpkg -x $pkg_name packages
done
-for a in $archives
-do
- wget $a
- (cd packages && tar xf ../$(basename $a))
-done
-
-export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:$PATH
-export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH
-
-echo "export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh
-echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:\$PATH" >> $HOME/.mlabs/build_settings.sh
-
-or1k-linux-as --version
-llc --version
-clang --version
+echo "export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu" >> $HOME/.m-labs/build_settings.sh
+echo "export PATH=$PWD/packages/usr/bin:\$PATH" >> $HOME/.m-labs/build_settings.sh
diff --git a/.travis/get-xilinx.sh b/.travis/get-xilinx.sh
index 0f37893b3..3d49fd693 100755
--- a/.travis/get-xilinx.sh
+++ b/.travis/get-xilinx.sh
@@ -30,7 +30,7 @@ git clone https://github.com/fallen/impersonate_macaddress
make -C impersonate_macaddress
# Tell mibuild where Xilinx toolchains are installed
# and feed it the mac address corresponding to the license
-cat >> $HOME/.mlabs/build_settings.sh << EOF
+cat >> $HOME/.m-labs/build_settings.sh << EOF
MISOC_EXTRA_VIVADO_CMDLINE="-Ob vivado_path $HOME/Xilinx/Vivado"
MISOC_EXTRA_ISE_CMDLINE="-Ob ise_path $HOME/opt/Xilinx/"
export MACADDR=$macaddress
diff --git a/artiq/devices/pdq2/driver.py b/artiq/devices/pdq2/driver.py
index a8637cc5b..d51a14992 100644
--- a/artiq/devices/pdq2/driver.py
+++ b/artiq/devices/pdq2/driver.py
@@ -139,6 +139,12 @@ class Pdq2:
self.num_channels = self.num_dacs * self.num_boards
self.channels = [Channel() for i in range(self.num_channels)]
+ def get_num_boards(self):
+ return self.num_boards
+
+ def get_num_channels(self):
+ return self.num_channels
+
def close(self):
self.dev.close()
del self.dev
diff --git a/artiq/frontend/artiq_client.py b/artiq/frontend/artiq_client.py
index 02cf67775..bcf6e2851 100755
--- a/artiq/frontend/artiq_client.py
+++ b/artiq/frontend/artiq_client.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python3
import argparse
+import logging
import time
import asyncio
import sys
@@ -51,6 +52,10 @@ def get_argparser():
"(defaults to head, ignored without -R)")
parser_add.add_argument("-c", "--class-name", default=None,
help="name of the class to run")
+ parser_add.add_argument("-v", "--verbose", default=0, action="count",
+ help="increase logging level of the experiment")
+ parser_add.add_argument("-q", "--quiet", default=0, action="count",
+ help="decrease logging level of the experiment")
parser_add.add_argument("file",
help="file containing the experiment to run")
parser_add.add_argument("arguments", nargs="*",
@@ -110,6 +115,7 @@ def _action_submit(remote, args):
sys.exit(1)
expid = {
+ "log_level": logging.WARNING + args.quiet*10 - args.verbose*10,
"file": args.file,
"class_name": args.class_name,
"arguments": arguments,
diff --git a/artiq/frontend/artiq_ctlmgr.py b/artiq/frontend/artiq_ctlmgr.py
index fb25519b1..ad9071eda 100755
--- a/artiq/frontend/artiq_ctlmgr.py
+++ b/artiq/frontend/artiq_ctlmgr.py
@@ -5,39 +5,22 @@ import atexit
import argparse
import os
import logging
+import subprocess
import shlex
import socket
+import platform
from artiq.protocols.sync_struct import Subscriber
from artiq.protocols.pc_rpc import AsyncioClient, Server
-from artiq.tools import verbosity_args, init_logger
+from artiq.protocols.logging import (LogForwarder,
+ parse_log_message, log_with_name,
+ SourceFilter)
from artiq.tools import TaskObject, Condition
logger = logging.getLogger(__name__)
-def get_argparser():
- parser = argparse.ArgumentParser(description="ARTIQ controller manager")
- verbosity_args(parser)
- parser.add_argument(
- "-s", "--server", default="::1",
- help="hostname or IP of the master to connect to")
- parser.add_argument(
- "--port", default=3250, type=int,
- help="TCP port to use to connect to the master")
- parser.add_argument(
- "--retry-master", default=5.0, type=float,
- help="retry timer for reconnecting to master")
- parser.add_argument(
- "--bind", default="::1",
- help="hostname or IP address to bind to")
- parser.add_argument(
- "--bind-port", default=3249, type=int,
- help="TCP port to listen to for control (default: %(default)d)")
- return parser
-
-
class Controller:
def __init__(self, name, ddb_entry):
self.name = name
@@ -96,6 +79,23 @@ class Controller:
else:
break
+ async def forward_logs(self, stream):
+ source = "controller({})".format(self.name)
+ while True:
+ try:
+ entry = (await stream.readline())
+ if not entry:
+ break
+ entry = entry[:-1]
+ level, name, message = parse_log_message(entry.decode())
+ log_with_name(name, level, message, extra={"source": source})
+ except:
+ logger.debug("exception in log forwarding", exc_info=True)
+ break
+ logger.debug("stopped log forwarding of stream %s of %s",
+ stream, self.name)
+
+
async def launcher(self):
try:
while True:
@@ -103,7 +103,12 @@ class Controller:
self.name, self.command)
try:
self.process = await asyncio.create_subprocess_exec(
- *shlex.split(self.command))
+ *shlex.split(self.command),
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ asyncio.ensure_future(self.forward_logs(
+ self.process.stdout))
+ asyncio.ensure_future(self.forward_logs(
+ self.process.stderr))
await self._wait_and_ping()
except FileNotFoundError:
logger.warning("Controller %s failed to start", self.name)
@@ -129,14 +134,20 @@ class Controller:
except:
logger.warning("Controller %s did not respond to terminate "
"command, killing", self.name)
- self.process.kill()
+ try:
+ self.process.kill()
+ except ProcessLookupError:
+ pass
try:
await asyncio.wait_for(self.process.wait(),
self.term_timeout)
except:
logger.warning("Controller %s failed to exit, killing",
self.name)
- self.process.kill()
+ try:
+ self.process.kill()
+ except ProcessLookupError:
+ pass
await self.process.wait()
logger.debug("Controller %s terminated", self.name)
@@ -252,9 +263,48 @@ class ControllerManager(TaskObject):
self.controller_db.current_controllers.active[k].retry_now.notify()
+def get_argparser():
+ parser = argparse.ArgumentParser(description="ARTIQ controller manager")
+
+ group = parser.add_argument_group("verbosity")
+ group.add_argument("-v", "--verbose", default=0, action="count",
+ help="increase logging level of the manager process")
+ group.add_argument("-q", "--quiet", default=0, action="count",
+ help="decrease logging level of the manager process")
+
+ parser.add_argument(
+ "-s", "--server", default="::1",
+ help="hostname or IP of the master to connect to")
+ parser.add_argument(
+ "--port-notify", default=3250, type=int,
+ help="TCP port to connect to for notifications")
+ parser.add_argument(
+ "--port-logging", default=1066, type=int,
+ help="TCP port to connect to for logging")
+ parser.add_argument(
+ "--retry-master", default=5.0, type=float,
+ help="retry timer for reconnecting to master")
+ parser.add_argument(
+ "--bind", default="::1",
+ help="hostname or IP address to bind to")
+ parser.add_argument(
+ "--bind-port", default=3249, type=int,
+ help="TCP port to listen to for control (default: %(default)d)")
+ return parser
+
+
def main():
args = get_argparser().parse_args()
- init_logger(args)
+
+ root_logger = logging.getLogger()
+ root_logger.setLevel(logging.NOTSET)
+ source_adder = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10,
+ "ctlmgr({})".format(platform.node()))
+ console_handler = logging.StreamHandler()
+ console_handler.setFormatter(logging.Formatter(
+ "%(levelname)s:%(source)s:%(name)s:%(message)s"))
+ console_handler.addFilter(source_adder)
+ root_logger.addHandler(console_handler)
if os.name == "nt":
loop = asyncio.ProactorEventLoop()
@@ -263,7 +313,15 @@ def main():
loop = asyncio.get_event_loop()
atexit.register(lambda: loop.close())
- ctlmgr = ControllerManager(args.server, args.port, args.retry_master)
+ logfwd = LogForwarder(args.server, args.port_logging,
+ args.retry_master)
+ logfwd.addFilter(source_adder)
+ root_logger.addHandler(logfwd)
+ logfwd.start()
+ atexit.register(lambda: loop.run_until_complete(logfwd.stop()))
+
+ ctlmgr = ControllerManager(args.server, args.port_notify,
+ args.retry_master)
ctlmgr.start()
atexit.register(lambda: loop.run_until_complete(ctlmgr.stop()))
diff --git a/artiq/frontend/artiq_flash.sh b/artiq/frontend/artiq_flash.sh
index 881e5a616..155fcb422 100755
--- a/artiq/frontend/artiq_flash.sh
+++ b/artiq/frontend/artiq_flash.sh
@@ -1,5 +1,16 @@
-#!/bin/bash
+#!/usr/bin/env python
+# conda-build requires all scripts to have a python shebang.
+# see https://github.com/conda/conda-build/blob/6921f067a/conda_build/noarch_python.py#L36-L38
+def run(script):
+ import sys, tempfile, subprocess
+ file = tempfile.NamedTemporaryFile(mode='w+t', suffix='sh')
+ file.write(script)
+ file.flush()
+ subprocess.run(["/bin/bash", file.name] + sys.argv[1:])
+ file.close()
+
+run("""
# exit on error
set -e
# print commands
@@ -72,7 +83,7 @@ do
echo ""
echo "To flash everything, do not use any of the -b|-B|-r option."
echo ""
- echo "usage: $0 [-b] [-B] [-r] [-h] [-m nist_qc1|nist_qc2] [-t kc705|pipistrello] [-d path] [-f path]"
+ echo "usage: artiq_flash.sh [-b] [-B] [-r] [-h] [-m nist_qc1|nist_qc2] [-t kc705|pipistrello] [-d path] [-f path]"
echo "-b Flash bitstream"
echo "-B Flash BIOS"
echo "-r Flash ARTIQ runtime"
@@ -193,3 +204,4 @@ then
fi
echo "Done."
xc3sprog -v -c $CABLE -R > /dev/null 2>&1
+""")
diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py
index 10db8eb41..4a4b3369c 100755
--- a/artiq/frontend/artiq_gui.py
+++ b/artiq/frontend/artiq_gui.py
@@ -116,6 +116,7 @@ def main():
atexit.register(lambda: loop.run_until_complete(d_schedule.sub_close()))
d_log = LogDock()
+ smgr.register(d_log)
loop.run_until_complete(d_log.sub_connect(
args.server, args.port_notify))
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))
diff --git a/artiq/frontend/artiq_master.py b/artiq/frontend/artiq_master.py
index dc2fb0ac5..186d14e68 100755
--- a/artiq/frontend/artiq_master.py
+++ b/artiq/frontend/artiq_master.py
@@ -5,9 +5,10 @@ import argparse
import atexit
import os
-from artiq.protocols.pc_rpc import Server
+from artiq.protocols.pc_rpc import Server as RPCServer
from artiq.protocols.sync_struct import Publisher
-from artiq.master.log import log_args, init_log
+from artiq.protocols.logging import Server as LoggingServer
+from artiq.master.log import log_args, init_log, log_worker
from artiq.master.databases import DeviceDB, DatasetDB
from artiq.master.scheduler import Scheduler
from artiq.master.worker_db import get_last_rid
@@ -27,6 +28,9 @@ def get_argparser():
group.add_argument(
"--port-control", default=3251, type=int,
help="TCP port to listen to for control (default: %(default)d)")
+ group.add_argument(
+ "--port-logging", default=1066, type=int,
+ help="TCP port to listen to for remote logging (default: %(default)d)")
group = parser.add_argument_group("databases")
group.add_argument("--device-db", default="device_db.pyon",
@@ -49,7 +53,7 @@ def get_argparser():
def main():
args = get_argparser().parse_args()
- log_buffer, log_forwarder = init_log(args)
+ log_buffer = init_log(args)
if os.name == "nt":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
@@ -67,7 +71,7 @@ def main():
else:
repo_backend = FilesystemBackend(args.repository)
repository = Repository(repo_backend, device_db.get_device_db,
- log_forwarder.log_worker)
+ log_worker)
atexit.register(repository.close)
repository.scan_async()
@@ -76,14 +80,14 @@ def main():
"get_device": device_db.get,
"get_dataset": dataset_db.get,
"update_dataset": dataset_db.update,
- "log": log_forwarder.log_worker
+ "log": log_worker
}
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
worker_handlers["scheduler_submit"] = scheduler.submit
scheduler.start()
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
- server_control = Server({
+ server_control = RPCServer({
"master_device_db": device_db,
"master_dataset_db": dataset_db,
"master_schedule": scheduler,
@@ -104,6 +108,11 @@ def main():
args.bind, args.port_notify))
atexit.register(lambda: loop.run_until_complete(server_notify.stop()))
+ server_logging = LoggingServer()
+ loop.run_until_complete(server_logging.start(
+ args.bind, args.port_logging))
+ atexit.register(lambda: loop.run_until_complete(server_logging.stop()))
+
loop.run_forever()
if __name__ == "__main__":
diff --git a/artiq/frontend/artiq_rpctool.py b/artiq/frontend/artiq_rpctool.py
index acfa886d1..e097fb6e8 100755
--- a/artiq/frontend/artiq_rpctool.py
+++ b/artiq/frontend/artiq_rpctool.py
@@ -6,7 +6,7 @@ import sys
import numpy as np # Needed to use numpy in RPC call arguments on cmd line
import pprint
-from artiq.protocols.pc_rpc import Client
+from artiq.protocols.pc_rpc import AutoTarget, Client
def get_argparser():
@@ -85,19 +85,9 @@ def main():
args = get_argparser().parse_args()
remote = Client(args.server, args.port, None)
-
targets, description = remote.get_rpc_id()
-
if args.action != "list-targets":
- # If no target specified and remote has only one, then use this one.
- # Exit otherwise.
- if len(targets) > 1 and args.target is None:
- print("Remote server has several targets, please supply one with "
- "-t")
- sys.exit(1)
- elif args.target is None:
- args.target = targets[0]
- remote.select_rpc_target(args.target)
+ remote.select_rpc_target(AutoTarget)
if args.action == "list-targets":
list_targets(targets, description)
diff --git a/artiq/frontend/pdq2_client.py b/artiq/frontend/pdq2_client.py
index 2f3c3b787..d8c7955ed 100755
--- a/artiq/frontend/pdq2_client.py
+++ b/artiq/frontend/pdq2_client.py
@@ -55,7 +55,6 @@ def main():
args = get_argparser().parse_args()
init_logger(args)
dev = Client(args.server, args.port, "pdq2")
- dev.init()
if args.reset:
dev.write(b"\x00\x00") # flush any escape
@@ -66,8 +65,6 @@ def main():
dev.cmd("DCM", args.dcm)
freq = 100e6 if args.dcm else 50e6
dev.set_freq(freq)
- num_channels = dev.get_num_channels()
- num_frames = dev.get_num_frames()
times = eval(args.times, globals(), {})
voltages = eval(args.voltages, globals(), dict(t=times))
diff --git a/artiq/frontend/thorlabs_tcube_controller.py b/artiq/frontend/thorlabs_tcube_controller.py
index 85ccc6ebc..c9b209070 100755
--- a/artiq/frontend/thorlabs_tcube_controller.py
+++ b/artiq/frontend/thorlabs_tcube_controller.py
@@ -12,7 +12,7 @@ def get_argparser():
parser = argparse.ArgumentParser()
parser.add_argument("-P", "--product", required=True,
help="type of the Thorlabs T-Cube device to control",
- choices=["TDC001", "TPZ001"])
+ choices=["tdc001", "tpz001"])
parser.add_argument("-d", "--device", default=None,
help="serial device. See documentation for how to "
"specify a USB Serial Number.")
@@ -33,19 +33,20 @@ def main():
"argument. Use --help for more information.")
sys.exit(1)
+ product = args.product.lower()
if args.simulation:
- if args.product == "TDC001":
+ if product == "tdc001":
dev = TdcSim()
- elif args.product == "TPZ001":
+ elif product == "tpz001":
dev = TpzSim()
else:
- if args.product == "TDC001":
+ if product == "tdc001":
dev = Tdc(args.device)
- elif args.product == "TPZ001":
+ elif product == "tpz001":
dev = Tpz(args.device)
try:
- simple_server_loop({args.product.lower(): dev}, args.bind, args.port)
+ simple_server_loop({product: dev}, args.bind, args.port)
finally:
dev.close()
diff --git a/artiq/gui/datasets.py b/artiq/gui/datasets.py
index ebe8d295c..fcaa36ac1 100644
--- a/artiq/gui/datasets.py
+++ b/artiq/gui/datasets.py
@@ -12,6 +12,11 @@ from artiq.tools import short_format
from artiq.gui.tools import DictSyncModel
from artiq.gui.displays import *
+try:
+ QSortFilterProxyModel = QtCore.QSortFilterProxyModel
+except AttributeError:
+ QSortFilterProxyModel = QtGui.QSortFilterProxyModel
+
logger = logging.getLogger(__name__)
@@ -74,15 +79,7 @@ class DatasetsDock(dockarea.Dock):
self.displays = dict()
def _search_datasets(self):
- model = self.table_model
- search = self.search.displayText()
- for row in range(model.rowCount(model.index(0, 0))):
- index = model.index(row, 0)
- dataset = model.data(index, QtCore.Qt.DisplayRole)
- if search in dataset:
- self.table.showRow(row)
- else:
- self.table.hideRow(row)
+ self.table_model_filter.setFilterFixedString(self.search.displayText())
def get_dataset(self, key):
return self.table_model.backing_store[key][1]
@@ -97,12 +94,16 @@ class DatasetsDock(dockarea.Dock):
def init_datasets_model(self, init):
self.table_model = DatasetsModel(self.table, init)
- self.table.setModel(self.table_model)
+ self.table_model_filter = QSortFilterProxyModel()
+ self.table_model_filter.setSourceModel(self.table_model)
+ self.table.setModel(self.table_model_filter)
return self.table_model
def update_display_data(self, dsp):
- dsp.update_data({k: self.table_model.backing_store[k][1]
- for k in dsp.data_sources()})
+ filtered_data = {k: self.table_model.backing_store[k][1]
+ for k in dsp.data_sources()
+ if k in self.table_model.backing_store}
+ dsp.update_data(filtered_data)
def on_mod(self, mod):
if mod["action"] == "init":
@@ -110,10 +111,10 @@ class DatasetsDock(dockarea.Dock):
display.update_data(self.table_model.backing_store)
return
- if mod["action"] == "setitem":
- source = mod["key"]
- elif mod["path"]:
+ if mod["path"]:
source = mod["path"][0]
+ elif mod["action"] == "setitem":
+ source = mod["key"]
else:
return
diff --git a/artiq/gui/displays.py b/artiq/gui/displays.py
index a08aed041..5fb8069fc 100644
--- a/artiq/gui/displays.py
+++ b/artiq/gui/displays.py
@@ -137,7 +137,7 @@ class XYDisplay(dockarea.Dock):
error = data.get(result_error, None)
fit = data.get(result_fit, None)
- if not y or len(y) != len(x):
+ if not len(y) or len(y) != len(x):
return
if error is not None and hasattr(error, "__len__"):
if not len(error):
@@ -201,7 +201,7 @@ class HistogramDisplay(dockarea.Dock):
if x is None:
x = list(range(len(y)+1))
- if y and len(x) == len(y) + 1:
+ if len(y) and len(x) == len(y) + 1:
self.plot.clear()
self.plot.plot(x, y, stepMode=True, fillLevel=0,
brush=(0, 0, 255, 150))
diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py
index 75ea4f338..7ee3e311e 100644
--- a/artiq/gui/explorer.py
+++ b/artiq/gui/explorer.py
@@ -1,4 +1,5 @@
import asyncio
+import logging
from quamash import QtGui, QtCore
from pyqtgraph import dockarea
@@ -6,8 +7,9 @@ from pyqtgraph import LayoutWidget
from artiq.protocols.sync_struct import Subscriber
from artiq.protocols import pyon
-from artiq.gui.tools import si_prefix, DictSyncModel
+from artiq.gui.tools import DictSyncModel
from artiq.gui.scan import ScanController
+from artiq.gui.shortcuts import ShortcutManager
class _ExplistModel(DictSyncModel):
@@ -85,9 +87,8 @@ class _NumberEntry(QtGui.QDoubleSpinBox):
self.setMaximum(procdesc["max"]/self.scale)
else:
self.setMaximum(float("inf"))
- suffix = si_prefix(self.scale) + procdesc["unit"]
- if suffix:
- self.setSuffix(" " + suffix)
+ if procdesc["unit"]:
+ self.setSuffix(" " + procdesc["unit"])
if "default" in procdesc:
self.set_argument_value(procdesc["default"])
@@ -122,14 +123,14 @@ _procty_to_entry = {
class _ArgumentEditor(QtGui.QTreeWidget):
- def __init__(self, dialog_parent):
+ def __init__(self, main_window):
QtGui.QTreeWidget.__init__(self)
self.setColumnCount(2)
self.header().setResizeMode(QtGui.QHeaderView.ResizeToContents)
self.header().setVisible(False)
self.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
- self.dialog_parent = dialog_parent
+ self.main_window = main_window
self._groups = dict()
self.set_arguments([])
@@ -176,7 +177,7 @@ class _ArgumentEditor(QtGui.QTreeWidget):
r[arg] = entry.get_argument_value()
except Exception as e:
if show_error_message:
- msgbox = QtGui.QMessageBox(self.dialog_parent)
+ msgbox = QtGui.QMessageBox(self.main_window)
msgbox.setWindowTitle("Error")
msgbox.setText("Failed to obtain value for argument '{}':\n{}"
.format(arg, str(e)))
@@ -215,10 +216,10 @@ class _ArgumentEditor(QtGui.QTreeWidget):
class ExplorerDock(dockarea.Dock):
- def __init__(self, dialog_parent, status_bar, schedule_ctl):
+ def __init__(self, main_window, status_bar, schedule_ctl):
dockarea.Dock.__init__(self, "Explorer", size=(1500, 500))
- self.dialog_parent = dialog_parent
+ self.main_window = main_window
self.status_bar = status_bar
self.schedule_ctl = schedule_ctl
@@ -235,44 +236,59 @@ class ExplorerDock(dockarea.Dock):
self.datetime = QtGui.QDateTimeEdit()
self.datetime.setDisplayFormat("MMM d yyyy hh:mm:ss")
- self.datetime.setCalendarPopup(True)
self.datetime.setDate(QtCore.QDate.currentDate())
self.datetime.dateTimeChanged.connect(self.enable_duedate)
self.datetime_en = QtGui.QCheckBox("Due date:")
- grid.addWidget(self.datetime_en, 1, 0)
- grid.addWidget(self.datetime, 1, 1)
-
- self.priority = QtGui.QSpinBox()
- self.priority.setRange(-99, 99)
- grid.addWidget(QtGui.QLabel("Priority:"), 1, 2)
- grid.addWidget(self.priority, 1, 3)
+ grid.addWidget(self.datetime_en, 1, 0, colspan=2)
+ grid.addWidget(self.datetime, 1, 2, colspan=2)
self.pipeline = QtGui.QLineEdit()
self.pipeline.setText("main")
- grid.addWidget(QtGui.QLabel("Pipeline:"), 2, 0)
- grid.addWidget(self.pipeline, 2, 1)
+ grid.addWidget(QtGui.QLabel("Pipeline:"), 2, 0, colspan=2)
+ grid.addWidget(self.pipeline, 2, 2, colspan=2)
+
+ self.priority = QtGui.QSpinBox()
+ self.priority.setRange(-99, 99)
+ grid.addWidget(QtGui.QLabel("Priority:"), 3, 0)
+ grid.addWidget(self.priority, 3, 1)
self.flush = QtGui.QCheckBox("Flush")
- grid.addWidget(self.flush, 2, 2, colspan=2)
+ self.flush.setToolTip("Flush the pipeline before starting the experiment")
+ grid.addWidget(self.flush, 3, 2)
+
+ self.log_level = QtGui.QComboBox()
+ self.log_level.addItems(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])
+ self.log_level.setCurrentIndex(1)
+ self.log_level.setToolTip("Minimum level for log entry production")
+ grid.addWidget(self.log_level, 3, 3)
submit = QtGui.QPushButton("Submit")
- grid.addWidget(submit, 3, 0, colspan=4)
+ submit.setShortcut("CTRL+RETURN")
+ submit.setToolTip("Schedule the selected experiment (CTRL+ENTER)")
+ grid.addWidget(submit, 4, 0, colspan=4)
submit.clicked.connect(self.submit_clicked)
- self.argeditor = _ArgumentEditor(self.dialog_parent)
+ self.argeditor = _ArgumentEditor(self.main_window)
self.splitter.addWidget(self.argeditor)
self.splitter.setSizes([grid.minimumSizeHint().width(), 1000])
- self.state = dict()
+ self.argeditor_states = dict()
+
+ self.shortcuts = ShortcutManager(self.main_window, self)
+
+ self.el.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
+ edit_shortcuts_action = QtGui.QAction("Edit shortcuts", self.el)
+ edit_shortcuts_action.triggered.connect(self.edit_shortcuts)
+ self.el.addAction(edit_shortcuts_action)
def update_selection(self, selected, deselected):
if deselected:
- self.state[deselected] = self.argeditor.save_state()
+ self.argeditor_states[deselected] = self.argeditor.save_state()
if selected:
expinfo = self.explist_model.backing_store[selected]
self.argeditor.set_arguments(expinfo["arguments"])
- if selected in self.state:
- self.argeditor.restore_state(self.state[selected])
+ if selected in self.argeditor_states:
+ self.argeditor.restore_state(self.argeditor_states[selected])
self.splitter.insertWidget(1, self.argeditor)
self.selected_key = selected
@@ -293,11 +309,20 @@ class ExplorerDock(dockarea.Dock):
if idx:
row = idx[0].row()
key = self.explist_model.row_to_key[row]
- self.state[key] = self.argeditor.save_state()
- return self.state
+ self.argeditor_states[key] = self.argeditor.save_state()
+ return {
+ "argeditor": self.argeditor_states,
+ "shortcuts": self.shortcuts.save_state()
+ }
def restore_state(self, state):
- self.state = state
+ try:
+ argeditor_states = state["argeditor"]
+ shortcuts_state = state["shortcuts"]
+ except KeyError:
+ return
+ self.argeditor_states = argeditor_states
+ self.shortcuts.restore_state(shortcuts_state)
def enable_duedate(self):
self.datetime_en.setChecked(True)
@@ -315,9 +340,10 @@ class ExplorerDock(dockarea.Dock):
self.el.setModel(self.explist_model)
return self.explist_model
- async def submit(self, pipeline_name, file, class_name, arguments,
- priority, due_date, flush):
+ async def submit_task(self, pipeline_name, file, class_name, arguments,
+ priority, due_date, flush):
expid = {
+ "log_level": getattr(logging, self.log_level.currentText()),
"repo_rev": None,
"file": file,
"class_name": class_name,
@@ -327,20 +353,41 @@ class ExplorerDock(dockarea.Dock):
priority, due_date, flush)
self.status_bar.showMessage("Submitted RID {}".format(rid))
+ def submit(self, pipeline, key, priority, due_date, flush):
+ # TODO: refactor explorer and cleanup.
+ # Argument editors should immediately modify the global state.
+ expinfo = self.explist_model.backing_store[key]
+ if key == self.selected_key:
+ arguments = self.argeditor.get_argument_values(True)
+ if arguments is None:
+ # There has been an error. Displaying the error message box
+ # was done by argeditor.
+ return
+ else:
+ try:
+ arguments = self.argeditor_states[key]["argument_values"]
+ except KeyError:
+ arguments = dict()
+ asyncio.ensure_future(self.submit_task(self.pipeline.text(),
+ expinfo["file"],
+ expinfo["class_name"],
+ arguments,
+ priority,
+ due_date,
+ flush))
+
def submit_clicked(self):
if self.selected_key is not None:
- expinfo = self.explist_model.backing_store[self.selected_key]
if self.datetime_en.isChecked():
due_date = self.datetime.dateTime().toMSecsSinceEpoch()/1000
else:
due_date = None
- arguments = self.argeditor.get_argument_values(True)
- if arguments is None:
- return
- asyncio.ensure_future(self.submit(self.pipeline.text(),
- expinfo["file"],
- expinfo["class_name"],
- arguments,
- self.priority.value(),
- due_date,
- self.flush.isChecked()))
+ self.submit(self.pipeline.text(),
+ self.selected_key,
+ self.priority.value(),
+ due_date,
+ self.flush.isChecked())
+
+ def edit_shortcuts(self):
+ experiments = sorted(self.explist_model.backing_store.keys())
+ self.shortcuts.edit(experiments)
diff --git a/artiq/gui/log.py b/artiq/gui/log.py
index 51e6aefac..f5b4aa15e 100644
--- a/artiq/gui/log.py
+++ b/artiq/gui/log.py
@@ -3,11 +3,16 @@ import logging
import time
from quamash import QtGui, QtCore
-from pyqtgraph import dockarea
+from pyqtgraph import dockarea, LayoutWidget
from artiq.protocols.sync_struct import Subscriber
from artiq.gui.tools import ListSyncModel
+try:
+ QSortFilterProxyModel = QtCore.QSortFilterProxyModel
+except AttributeError:
+ QSortFilterProxyModel = QtGui.QSortFilterProxyModel
+
def _level_to_name(level):
if level >= logging.CRITICAL:
@@ -20,6 +25,7 @@ def _level_to_name(level):
return "INFO"
return "DEBUG"
+
class _LogModel(ListSyncModel):
def __init__(self, parent, init):
ListSyncModel.__init__(self,
@@ -66,10 +72,39 @@ class _LogModel(ListSyncModel):
return v[3]
+class _LevelFilterProxyModel(QSortFilterProxyModel):
+ def __init__(self, min_level):
+ QSortFilterProxyModel.__init__(self)
+ self.min_level = min_level
+
+ def filterAcceptsRow(self, sourceRow, sourceParent):
+ model = self.sourceModel()
+ index = model.index(sourceRow, 0, sourceParent)
+ data = model.data(index, QtCore.Qt.DisplayRole)
+ return getattr(logging, data) >= self.min_level
+
+ def set_min_level(self, min_level):
+ self.min_level = min_level
+ self.invalidateFilter()
+
+
class LogDock(dockarea.Dock):
def __init__(self):
dockarea.Dock.__init__(self, "Log", size=(1000, 300))
+ grid = LayoutWidget()
+ self.addWidget(grid)
+
+ grid.addWidget(QtGui.QLabel("Minimum level: "), 0, 0)
+ grid.layout.setColumnStretch(0, 0)
+ grid.layout.setColumnStretch(1, 0)
+ grid.layout.setColumnStretch(2, 1)
+ self.filterbox = QtGui.QComboBox()
+ self.filterbox.addItems(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])
+ self.filterbox.setToolTip("Display entries at or above this level")
+ grid.addWidget(self.filterbox, 0, 1)
+ self.filterbox.currentIndexChanged.connect(self.filter_changed)
+
self.log = QtGui.QTableView()
self.log.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.log.horizontalHeader().setResizeMode(
@@ -78,7 +113,7 @@ class LogDock(dockarea.Dock):
QtGui.QAbstractItemView.ScrollPerPixel)
self.log.setShowGrid(False)
self.log.setTextElideMode(QtCore.Qt.ElideNone)
- self.addWidget(self.log)
+ grid.addWidget(self.log, 1, 0, colspan=3)
self.scroll_at_bottom = False
async def sub_connect(self, host, port):
@@ -88,6 +123,10 @@ class LogDock(dockarea.Dock):
async def sub_close(self):
await self.subscriber.close()
+ def filter_changed(self):
+ self.table_model_filter.set_min_level(
+ getattr(logging, self.filterbox.currentText()))
+
def rows_inserted_before(self):
scrollbar = self.log.verticalScrollBar()
self.scroll_at_bottom = scrollbar.value() == scrollbar.maximum()
@@ -98,7 +137,21 @@ class LogDock(dockarea.Dock):
def init_log_model(self, init):
table_model = _LogModel(self.log, init)
- self.log.setModel(table_model)
- table_model.rowsAboutToBeInserted.connect(self.rows_inserted_before)
- table_model.rowsInserted.connect(self.rows_inserted_after)
+ self.table_model_filter = _LevelFilterProxyModel(
+ getattr(logging, self.filterbox.currentText()))
+ self.table_model_filter.setSourceModel(table_model)
+ self.log.setModel(self.table_model_filter)
+ self.table_model_filter.rowsAboutToBeInserted.connect(self.rows_inserted_before)
+ self.table_model_filter.rowsInserted.connect(self.rows_inserted_after)
return table_model
+
+ def save_state(self):
+ return {"min_level_idx": self.filterbox.currentIndex()}
+
+ def restore_state(self, state):
+ try:
+ idx = state["min_level_idx"]
+ except KeyError:
+ pass
+ else:
+ self.filterbox.setCurrentIndex(idx)
diff --git a/artiq/gui/scan.py b/artiq/gui/scan.py
index 7ed69ed46..de1d281b2 100644
--- a/artiq/gui/scan.py
+++ b/artiq/gui/scan.py
@@ -1,11 +1,9 @@
from quamash import QtGui
from pyqtgraph import LayoutWidget
-from artiq.gui.tools import si_prefix
-
class _Range(LayoutWidget):
- def __init__(self, global_min, global_max, global_step, suffix, scale, ndecimals):
+ def __init__(self, global_min, global_max, global_step, unit, scale, ndecimals):
LayoutWidget.__init__(self)
self.scale = scale
@@ -21,8 +19,8 @@ class _Range(LayoutWidget):
spinbox.setMaximum(float("inf"))
if global_step is not None:
spinbox.setSingleStep(global_step/self.scale)
- if suffix:
- spinbox.setSuffix(" " + suffix)
+ if unit:
+ spinbox.setSuffix(" " + unit)
self.addWidget(QtGui.QLabel("Min:"), 0, 0)
self.min = QtGui.QDoubleSpinBox()
@@ -68,7 +66,7 @@ class ScanController(LayoutWidget):
gmin, gmax = procdesc["global_min"], procdesc["global_max"]
gstep = procdesc["global_step"]
- suffix = si_prefix(self.scale) + procdesc["unit"]
+ unit = procdesc["unit"]
ndecimals = procdesc["ndecimals"]
self.v_noscan = QtGui.QDoubleSpinBox()
@@ -82,17 +80,17 @@ class ScanController(LayoutWidget):
else:
self.v_noscan.setMaximum(float("inf"))
self.v_noscan.setSingleStep(gstep/self.scale)
- if suffix:
- self.v_noscan.setSuffix(" " + suffix)
+ if unit:
+ self.v_noscan.setSuffix(" " + unit)
self.v_noscan_gr = LayoutWidget()
self.v_noscan_gr.addWidget(QtGui.QLabel("Value:"), 0, 0)
self.v_noscan_gr.addWidget(self.v_noscan, 0, 1)
self.stack.addWidget(self.v_noscan_gr)
- self.v_linear = _Range(gmin, gmax, gstep, suffix, self.scale, ndecimals)
+ self.v_linear = _Range(gmin, gmax, gstep, unit, self.scale, ndecimals)
self.stack.addWidget(self.v_linear)
- self.v_random = _Range(gmin, gmax, gstep, suffix, self.scale, ndecimals)
+ self.v_random = _Range(gmin, gmax, gstep, unit, self.scale, ndecimals)
self.stack.addWidget(self.v_random)
self.v_explicit = QtGui.QLineEdit()
diff --git a/artiq/gui/schedule.py b/artiq/gui/schedule.py
index 1439dfa6d..42ae01a1d 100644
--- a/artiq/gui/schedule.py
+++ b/artiq/gui/schedule.py
@@ -75,9 +75,11 @@ class ScheduleDock(dockarea.Dock):
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
request_termination_action = QtGui.QAction("Request termination", self.table)
request_termination_action.triggered.connect(partial(self.delete_clicked, True))
+ request_termination_action.setShortcut("DELETE")
self.table.addAction(request_termination_action)
delete_action = QtGui.QAction("Delete", self.table)
delete_action.triggered.connect(partial(self.delete_clicked, False))
+ delete_action.setShortcut("SHIFT+DELETE")
self.table.addAction(delete_action)
@@ -104,5 +106,9 @@ class ScheduleDock(dockarea.Dock):
if idx:
row = idx[0].row()
rid = self.table_model.row_to_key[row]
- self.status_bar.showMessage("Deleted RID {}".format(rid))
+ if graceful:
+ msg = "Requested termination of RID {}".format(rid)
+ else:
+ msg = "Deleted RID {}".format(rid)
+ self.status_bar.showMessage(msg)
asyncio.ensure_future(self.delete(rid, graceful))
diff --git a/artiq/gui/shortcuts.py b/artiq/gui/shortcuts.py
new file mode 100644
index 000000000..82308f0be
--- /dev/null
+++ b/artiq/gui/shortcuts.py
@@ -0,0 +1,98 @@
+from functools import partial
+
+from quamash import QtGui
+try:
+ from quamash import QtWidgets
+ QShortcut = QtWidgets.QShortcut
+except:
+ QShortcut = QtGui.QShortcut
+
+
+class _ShortcutEditor(QtGui.QDialog):
+ def __init__(self, parent, experiments, shortcuts):
+ QtGui.QDialog.__init__(self, parent=parent)
+ self.setWindowTitle("Shortcuts")
+
+ self.shortcuts = shortcuts
+ self.edit_widgets = dict()
+
+ grid = QtGui.QGridLayout()
+ self.setLayout(grid)
+
+ for n, title in enumerate(["Key", "Experiment", "Priority", "Pipeline"]):
+ label = QtGui.QLabel("" + title + "")
+ experiment.addItems(experiments)
+ experiment.setEditable(True)
+ experiment.setEditText(
+ existing_shortcut.get("experiment", ""))
+
+ priority = QtGui.QSpinBox()
+ grid.addWidget(priority, row, 2)
+ priority.setRange(-99, 99)
+ priority.setValue(existing_shortcut.get("priority", 0))
+
+ pipeline = QtGui.QLineEdit()
+ grid.addWidget(pipeline, row, 3)
+ pipeline.setText(existing_shortcut.get("pipeline", "main"))
+
+ self.edit_widgets[i] = {
+ "experiment": experiment,
+ "priority": priority,
+ "pipeline": pipeline
+ }
+
+ buttons = QtGui.QDialogButtonBox(
+ QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel)
+ grid.addWidget(buttons, 14, 0, 1, 4)
+ buttons.accepted.connect(self.accept)
+ buttons.rejected.connect(self.reject)
+ self.accepted.connect(self.on_accept)
+
+ def on_accept(self):
+ for n, widgets in self.edit_widgets.items():
+ self.shortcuts[n] = {
+ "experiment": widgets["experiment"].currentText(),
+ "priority": widgets["priority"].value(),
+ "pipeline": widgets["pipeline"].text()
+ }
+
+
+class ShortcutManager:
+ def __init__(self, main_window, explorer):
+ for i in range(12):
+ shortcut = QShortcut("F" + str(i+1), main_window)
+ shortcut.activated.connect(partial(self._activated, i))
+ self.main_window = main_window
+ self.explorer = explorer
+ self.shortcuts = dict()
+
+ def edit(self, experiments):
+ dlg = _ShortcutEditor(self.main_window, experiments, self.shortcuts)
+ dlg.open()
+
+ def _activated(self, nr):
+ info = self.shortcuts.get(nr, dict())
+ experiment = info.get("experiment", "")
+ if experiment and experiment != "":
+ self.explorer.submit(info["pipeline"], experiment,
+ info["priority"], None, False)
+
+ def save_state(self):
+ return self.shortcuts
+
+ def restore_state(self, state):
+ self.shortcuts = state
diff --git a/artiq/gui/tools.py b/artiq/gui/tools.py
index 242c66600..67c7efb42 100644
--- a/artiq/gui/tools.py
+++ b/artiq/gui/tools.py
@@ -1,22 +1,4 @@
from quamash import QtCore
-import numpy as np
-
-
-def si_prefix(scale):
- try:
- return {
- 1e-12: "p",
- 1e-9: "n",
- 1e-6: "u",
- 1e-3: "m",
- 1.0: "",
- 1e3: "k",
- 1e6: "M",
- 1e9: "G",
- 1e12: "T"
- }[scale]
- except KeyError:
- return "[x{}]".format(scale)
class _SyncSubstruct:
@@ -95,7 +77,7 @@ class DictSyncModel(QtCore.QAbstractTableModel):
new_row = self._find_row(k, v)
if old_row == new_row:
self.dataChanged.emit(self.index(old_row, 0),
- self.index(old_row, len(self.headers)))
+ self.index(old_row, len(self.headers)-1))
else:
self.beginMoveRows(QtCore.QModelIndex(), old_row, old_row,
QtCore.QModelIndex(), new_row)
@@ -157,7 +139,7 @@ class ListSyncModel(QtCore.QAbstractTableModel):
def __setitem__(self, k, v):
self.dataChanged.emit(self.index(k, 0),
- self.index(k, len(self.headers)))
+ self.index(k, len(self.headers)-1))
self.backing_store[k] = v
def __delitem__(self, k):
diff --git a/artiq/language/environment.py b/artiq/language/environment.py
index 8c8f11b82..8628da8d5 100644
--- a/artiq/language/environment.py
+++ b/artiq/language/environment.py
@@ -73,8 +73,7 @@ class NumberValue(_SimpleArgProcessor):
:param unit: A string representing the unit of the value, for user
interface (UI) purposes.
- :param scale: The scale of value for UI purposes. The corresponding SI
- prefix is shown in front of the unit, and the displayed value is
+ :param scale: The scale of value for UI purposes. The displayed value is
divided by the scale.
:param step: The step with which the value should be modified by up/down
buttons in a UI. The default is the scale divided by 10.
@@ -209,9 +208,15 @@ class HasEnvironment:
broadcast=False, persist=False, save=True):
"""Sets the contents and handling modes of a dataset.
+ If the dataset is broadcasted, it must be PYON-serializable.
+ If the dataset is saved, it must be a scalar (``bool``, ``int``,
+ ``float`` or NumPy scalar) or a NumPy array.
+
:param broadcast: the data is sent in real-time to the master, which
- dispatches it. Returns a Notifier that can be used to mutate the dataset.
- :param persist: the master should store the data on-disk. Implies broadcast.
+ dispatches it. Returns a Notifier that can be used to mutate the
+ dataset.
+ :param persist: the master should store the data on-disk. Implies
+ broadcast.
:param save: the data is saved into the local storage of the current
run (archived as a HDF5 file).
"""
diff --git a/artiq/language/scan.py b/artiq/language/scan.py
index 7bc5d5665..86f5ac00b 100644
--- a/artiq/language/scan.py
+++ b/artiq/language/scan.py
@@ -140,8 +140,7 @@ class Scannable:
by 10.
:param unit: A string representing the unit of the scanned variable, for user
interface (UI) purposes.
- :param scale: The scale of value for UI purposes. The corresponding SI
- prefix is shown in front of the unit, and the displayed value is
+ :param scale: The scale of value for UI purposes. The displayed value is
divided by the scale.
:param ndecimals: The number of decimals a UI should use.
"""
diff --git a/artiq/master/databases.py b/artiq/master/databases.py
index 40549683c..5e2c7d004 100644
--- a/artiq/master/databases.py
+++ b/artiq/master/databases.py
@@ -32,7 +32,10 @@ class DatasetDB(TaskObject):
self.persist_file = persist_file
self.autosave_period = autosave_period
- file_data = pyon.load_file(self.persist_file)
+ try:
+ file_data = pyon.load_file(self.persist_file)
+ except FileNotFoundError:
+ file_data = dict()
self.data = Notifier({k: (True, v) for k, v in file_data.items()})
def save(self):
diff --git a/artiq/master/log.py b/artiq/master/log.py
index 4388819d8..ae87e015d 100644
--- a/artiq/master/log.py
+++ b/artiq/master/log.py
@@ -1,6 +1,8 @@
import logging
+import logging.handlers
from artiq.protocols.sync_struct import Notifier
+from artiq.protocols.logging import parse_log_message, log_with_name, SourceFilter
class LogBuffer:
@@ -18,88 +20,65 @@ class LogBufferHandler(logging.Handler):
def __init__(self, log_buffer, *args, **kwargs):
logging.Handler.__init__(self, *args, **kwargs)
self.log_buffer = log_buffer
+ self.setFormatter(logging.Formatter("%(name)s:%(message)s"))
def emit(self, record):
message = self.format(record)
- self.log_buffer.log(record.levelno, record.source, record.created, message)
+ for part in message.split("\n"):
+ self.log_buffer.log(record.levelno, record.source, record.created,
+ part)
-name_to_level = {
- "CRITICAL": logging.CRITICAL,
- "ERROR": logging.ERROR,
- "WARN": logging.WARNING,
- "WARNING": logging.WARNING,
- "INFO": logging.INFO,
- "DEBUG": logging.DEBUG,
-}
-
-
-def parse_log_message(msg):
- for name, level in name_to_level.items():
- if msg.startswith(name + ":"):
- remainder = msg[len(name) + 1:]
- try:
- idx = remainder.index(":")
- except:
- continue
- return level, remainder[:idx], remainder[idx+1:]
- return logging.INFO, "print", msg
-
-
-fwd_logger = logging.getLogger("fwd")
-
-
-class LogForwarder:
- def log_worker(self, rid, message):
- level, name, message = parse_log_message(message)
- fwd_logger.name = name
- fwd_logger.log(level, message,
- extra={"source": "worker({})".format(rid)})
- log_worker.worker_pass_rid = True
-
-
-class SourceFilter:
- def __init__(self, master_level):
- self.master_level = master_level
-
- def filter(self, record):
- if not hasattr(record, "source"):
- record.source = "master"
- if record.source == "master":
- return record.levelno >= self.master_level
- else:
- # log messages that are forwarded from a source have already
- # been filtered, and may have a level below the master level.
- return True
+def log_worker(rid, message):
+ level, name, message = parse_log_message(message)
+ log_with_name(name, level, message,
+ extra={"source": "worker({})".format(rid)})
+log_worker.worker_pass_rid = True
def log_args(parser):
- group = parser.add_argument_group("verbosity")
+ group = parser.add_argument_group("logging")
group.add_argument("-v", "--verbose", default=0, action="count",
- help="increase logging level for the master process")
+ help="increase logging level of the master process")
group.add_argument("-q", "--quiet", default=0, action="count",
- help="decrease logging level for the master process")
+ help="decrease logging level of the master process")
+ group.add_argument("--log-file", default="",
+ help="store logs in rotated files; set the "
+ "base filename")
+ group.add_argument("--log-max-size", type=int, default=1024,
+ help="maximum size of each log file in KiB "
+ "(default: %(default)d)")
+ group.add_argument("--log-backup-count", type=int, default=6,
+ help="number of old log files to keep (. is added "
+ "to the base filename (default: %(default)d)")
def init_log(args):
root_logger = logging.getLogger()
root_logger.setLevel(logging.NOTSET) # we use our custom filter only
- flt = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10)
-
+ flt = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10,
+ "master")
handlers = []
console_handler = logging.StreamHandler()
- console_handler.setFormatter(logging.Formatter("%(levelname)s:%(source)s:%(name)s:%(message)s"))
+ console_handler.setFormatter(logging.Formatter(
+ "%(levelname)s:%(source)s:%(name)s:%(message)s"))
handlers.append(console_handler)
+
+ if args.log_file:
+ file_handler = logging.handlers.RotatingFileHandler(
+ args.log_file,
+ maxBytes=args.log_max_size*1024,
+ backupCount=args.log_backup_count)
+ file_handler.setFormatter(logging.Formatter(
+ "%(asctime)s %(levelname)s:%(source)s:%(name)s:%(message)s"))
+ handlers.append(file_handler)
log_buffer = LogBuffer(1000)
buffer_handler = LogBufferHandler(log_buffer)
- buffer_handler.setFormatter(logging.Formatter("%(name)s:%(message)s"))
handlers.append(buffer_handler)
for handler in handlers:
handler.addFilter(flt)
root_logger.addHandler(handler)
- log_forwarder = LogForwarder()
-
- return log_buffer, log_forwarder
+ return log_buffer
diff --git a/artiq/master/scheduler.py b/artiq/master/scheduler.py
index de4f80af6..11eb6384e 100644
--- a/artiq/master/scheduler.py
+++ b/artiq/master/scheduler.py
@@ -229,8 +229,8 @@ class PrepareStage(TaskObject):
await run.prepare()
except:
logger.error("got worker exception in prepare stage, "
- "deleting RID %d",
- run.rid, exc_info=True)
+ "deleting RID %d", run.rid)
+ logger.debug("worker exception details", exc_info=True)
self.delete_cb(run.rid)
else:
run.status = RunStatus.prepare_done
@@ -279,8 +279,8 @@ class RunStage(TaskObject):
completed = await run.run()
except:
logger.error("got worker exception in run stage, "
- "deleting RID %d",
- run.rid, exc_info=True)
+ "deleting RID %d", run.rid)
+ logger.debug("worker exception details", exc_info=True)
self.delete_cb(run.rid)
else:
if completed:
@@ -317,8 +317,8 @@ class AnalyzeStage(TaskObject):
await run.write_results()
except:
logger.error("got worker exception in analyze stage, "
- "deleting RID %d",
- run.rid, exc_info=True)
+ "deleting RID %d", run.rid)
+ logger.debug("worker exception details", exc_info=True)
self.delete_cb(run.rid)
else:
self.delete_cb(run.rid)
diff --git a/artiq/master/worker.py b/artiq/master/worker.py
index 59f3f9b4a..f08679239 100644
--- a/artiq/master/worker.py
+++ b/artiq/master/worker.py
@@ -21,10 +21,6 @@ class WorkerWatchdogTimeout(Exception):
pass
-class WorkerException(Exception):
- pass
-
-
class WorkerError(Exception):
pass
@@ -60,13 +56,14 @@ class Worker:
else:
return None
- async def _create_process(self):
+ async def _create_process(self, log_level):
await self.io_lock.acquire()
try:
if self.closed.is_set():
raise WorkerError("Attempting to create process after close")
self.process = await asyncio.create_subprocess_exec(
sys.executable, "-m", "artiq.master.worker_impl",
+ str(log_level),
stdout=subprocess.PIPE, stdin=subprocess.PIPE)
finally:
self.io_lock.release()
@@ -95,19 +92,26 @@ class Worker:
try:
await self._send(obj, cancellable=False)
except:
- logger.warning("failed to send terminate command to worker"
- " (RID %s), killing", self.rid, exc_info=True)
- self.process.kill()
+ logger.debug("failed to send terminate command to worker"
+ " (RID %s), killing", self.rid, exc_info=True)
+ try:
+ self.process.kill()
+ except ProcessLookupError:
+ pass
await self.process.wait()
return
try:
await asyncio.wait_for(self.process.wait(), term_timeout)
except asyncio.TimeoutError:
- logger.warning("worker did not exit (RID %s), killing", self.rid)
- self.process.kill()
+ logger.debug("worker did not exit by itself (RID %s), killing",
+ self.rid)
+ try:
+ self.process.kill()
+ except ProcessLookupError:
+ pass
await self.process.wait()
else:
- logger.debug("worker exited gracefully (RID %s)", self.rid)
+ logger.debug("worker exited by itself (RID %s)", self.rid)
finally:
self.io_lock.release()
@@ -163,10 +167,7 @@ class Worker:
return True
elif action == "pause":
return False
- elif action == "exception":
- raise WorkerException
- del obj["action"]
- if action == "create_watchdog":
+ elif action == "create_watchdog":
func = self.create_watchdog
elif action == "delete_watchdog":
func = self.delete_watchdog
@@ -177,7 +178,7 @@ class Worker:
if getattr(func, "worker_pass_rid", False):
func = partial(func, self.rid)
try:
- data = func(**obj)
+ data = func(*obj["args"], **obj["kwargs"])
reply = {"status": "ok", "data": data}
except:
reply = {"status": "failed",
@@ -208,7 +209,7 @@ class Worker:
async def build(self, rid, pipeline_name, wd, expid, priority, timeout=15.0):
self.rid = rid
- await self._create_process()
+ await self._create_process(expid["log_level"])
await self._worker_action(
{"action": "build",
"rid": rid,
@@ -245,7 +246,7 @@ class Worker:
timeout)
async def examine(self, file, timeout=20.0):
- await self._create_process()
+ await self._create_process(logging.WARNING)
r = dict()
def register(class_name, name, arguments):
r[class_name] = {"name": name, "arguments": arguments}
diff --git a/artiq/master/worker_db.py b/artiq/master/worker_db.py
index b9f76bae0..e490c3396 100644
--- a/artiq/master/worker_db.py
+++ b/artiq/master/worker_db.py
@@ -5,11 +5,11 @@ import os
import time
import re
-import numpy
+import numpy as np
import h5py
from artiq.protocols.sync_struct import Notifier
-from artiq.protocols.pc_rpc import Client, BestEffortClient
+from artiq.protocols.pc_rpc import AutoTarget, Client, BestEffortClient
logger = logging.getLogger(__name__)
@@ -22,11 +22,16 @@ def _create_device(desc, device_mgr):
device_class = getattr(module, desc["class"])
return device_class(device_mgr, **desc["arguments"])
elif ty == "controller":
- if desc["best_effort"]:
- cl = BestEffortClient
+ if desc.get("best_effort", False):
+ cls = BestEffortClient
else:
- cl = Client
- return cl(desc["host"], desc["port"], desc["target_name"])
+ cls = Client
+ # Automatic target can be specified either by the absence of
+ # the target_name parameter, or a None value.
+ target_name = desc.get("target_name", None)
+ if target_name is None:
+ target_name = AutoTarget
+ return cls(desc["host"], desc["port"], target_name)
else:
raise ValueError("Unsupported type in device DB: " + ty)
@@ -114,36 +119,53 @@ def get_last_rid():
_type_to_hdf5 = {
int: h5py.h5t.STD_I64BE,
- float: h5py.h5t.IEEE_F64BE
+ float: h5py.h5t.IEEE_F64BE,
+
+ np.int8: h5py.h5t.STD_I8BE,
+ np.int16: h5py.h5t.STD_I16BE,
+ np.int32: h5py.h5t.STD_I32BE,
+ np.int64: h5py.h5t.STD_I64BE,
+
+ np.uint8: h5py.h5t.STD_U8BE,
+ np.uint16: h5py.h5t.STD_U16BE,
+ np.uint32: h5py.h5t.STD_U32BE,
+ np.uint64: h5py.h5t.STD_U64BE,
+
+ np.float16: h5py.h5t.IEEE_F16BE,
+ np.float32: h5py.h5t.IEEE_F32BE,
+ np.float64: h5py.h5t.IEEE_F64BE
}
def result_dict_to_hdf5(f, rd):
for name, data in rd.items():
- if isinstance(data, list):
- el_ty = type(data[0])
- for d in data:
- if type(d) != el_ty:
- raise TypeError("All list elements must have the same"
- " type for HDF5 output")
- try:
- el_ty_h5 = _type_to_hdf5[el_ty]
- except KeyError:
- raise TypeError("List element type {} is not supported for"
- " HDF5 output".format(el_ty))
- dataset = f.create_dataset(name, (len(data), ), el_ty_h5)
- dataset[:] = data
- elif isinstance(data, numpy.ndarray):
- f.create_dataset(name, data=data)
+ flag = None
+ # beware: isinstance(True/False, int) == True
+ if isinstance(data, bool):
+ data = np.int8(data)
+ flag = "py_bool"
+ elif isinstance(data, int):
+ data = np.int64(data)
+ flag = "py_int"
+
+ if isinstance(data, np.ndarray):
+ dataset = f.create_dataset(name, data=data)
else:
ty = type(data)
- try:
- ty_h5 = _type_to_hdf5[ty]
- except KeyError:
- raise TypeError("Type {} is not supported for HDF5 output"
- .format(ty))
+ if ty is str:
+ ty_h5 = "S{}".format(len(data))
+ data = data.encode()
+ else:
+ try:
+ ty_h5 = _type_to_hdf5[ty]
+ except KeyError:
+ raise TypeError("Type {} is not supported for HDF5 output"
+ .format(ty)) from None
dataset = f.create_dataset(name, (), ty_h5)
dataset[()] = data
+ if flag is not None:
+ dataset.attrs[flag] = np.int8(1)
+
class DatasetManager:
def __init__(self, ddb):
@@ -168,7 +190,8 @@ class DatasetManager:
try:
return self.local[key]
except KeyError:
- return self.ddb.get(key)
+ pass
+ return self.ddb.get(key)
def write_hdf5(self, f):
result_dict_to_hdf5(f, self.local)
diff --git a/artiq/master/worker_impl.py b/artiq/master/worker_impl.py
index 8b22d3acf..722fcb75c 100644
--- a/artiq/master/worker_impl.py
+++ b/artiq/master/worker_impl.py
@@ -1,7 +1,7 @@
import sys
import time
import os
-import traceback
+import logging
from artiq.protocols import pyon
from artiq.tools import file_import
@@ -26,12 +26,9 @@ class ParentActionError(Exception):
pass
-def make_parent_action(action, argnames, exception=ParentActionError):
- argnames = argnames.split()
- def parent_action(*args):
- request = {"action": action}
- for argname, arg in zip(argnames, args):
- request[argname] = arg
+def make_parent_action(action, exception=ParentActionError):
+ def parent_action(*args, **kwargs):
+ request = {"action": action, "args": args, "kwargs": kwargs}
put_object(request)
reply = get_object()
if "action" in reply:
@@ -50,7 +47,7 @@ class LogForwarder:
def __init__(self):
self.buffer = ""
- to_parent = staticmethod(make_parent_action("log", "message"))
+ to_parent = staticmethod(make_parent_action("log"))
def write(self, data):
self.buffer += data
@@ -64,18 +61,18 @@ class LogForwarder:
class ParentDeviceDB:
- get_device_db = make_parent_action("get_device_db", "")
- get = make_parent_action("get_device", "key", KeyError)
+ get_device_db = make_parent_action("get_device_db")
+ get = make_parent_action("get_device", KeyError)
class ParentDatasetDB:
- get = make_parent_action("get_dataset", "key", KeyError)
- update = make_parent_action("update_dataset", "mod")
+ get = make_parent_action("get_dataset", KeyError)
+ update = make_parent_action("update_dataset")
class Watchdog:
- _create = make_parent_action("create_watchdog", "t")
- _delete = make_parent_action("delete_watchdog", "wid")
+ _create = make_parent_action("create_watchdog")
+ _delete = make_parent_action("delete_watchdog")
def __init__(self, t):
self.t = t
@@ -91,15 +88,14 @@ set_watchdog_factory(Watchdog)
class Scheduler:
- pause_noexc = staticmethod(make_parent_action("pause", ""))
+ pause_noexc = staticmethod(make_parent_action("pause"))
def pause(self):
if self.pause_noexc():
raise TerminationRequested
- submit = staticmethod(make_parent_action("scheduler_submit",
- "pipeline_name expid priority due_date flush"))
- cancel = staticmethod(make_parent_action("scheduler_cancel", "rid"))
+ submit = staticmethod(make_parent_action("scheduler_submit"))
+ cancel = staticmethod(make_parent_action("scheduler_cancel"))
def set_run_info(self, pipeline_name, expid, priority):
self.pipeline_name = pipeline_name
@@ -120,22 +116,21 @@ def get_exp(file, class_name):
return getattr(module, class_name)
-register_experiment = make_parent_action("register_experiment",
- "class_name name arguments")
+register_experiment = make_parent_action("register_experiment")
class ExamineDeviceMgr:
- get_device_db = make_parent_action("get_device_db", "")
+ get_device_db = make_parent_action("get_device_db")
- def get(self, name):
+ def get(name):
return None
class DummyDatasetMgr:
- def set(self, key, value, broadcast=False, persist=False, save=True):
+ def set(key, value, broadcast=False, persist=False, save=True):
return None
- def get(self, key):
+ def get(key):
pass
@@ -158,7 +153,9 @@ def examine(device_mgr, dataset_mgr, file):
def main():
- sys.stdout = sys.stderr = LogForwarder()
+ sys.stdout = LogForwarder()
+ sys.stderr = LogForwarder()
+ logging.basicConfig(level=int(sys.argv[1]))
start_time = None
rid = None
@@ -211,15 +208,15 @@ def main():
f.close()
put_object({"action": "completed"})
elif action == "examine":
- examine(ExamineDeviceMgr(), DummyDatasetMgr(), obj["file"])
+ examine(ExamineDeviceMgr, DummyDatasetMgr, obj["file"])
put_object({"action": "completed"})
elif action == "terminate":
break
except:
- traceback.print_exc()
- put_object({"action": "exception"})
+ logging.error("Worker terminating with exception", exc_info=True)
finally:
device_mgr.close_devices()
+
if __name__ == "__main__":
main()
diff --git a/artiq/protocols/logging.py b/artiq/protocols/logging.py
new file mode 100644
index 000000000..e69f76223
--- /dev/null
+++ b/artiq/protocols/logging.py
@@ -0,0 +1,133 @@
+import asyncio
+import logging
+
+from artiq.protocols.asyncio_server import AsyncioServer
+from artiq.tools import TaskObject, workaround_asyncio263
+
+
+logger = logging.getLogger(__name__)
+_fwd_logger = logging.getLogger("fwd")
+
+
+def log_with_name(name, *args, **kwargs):
+ _fwd_logger.name = name
+ _fwd_logger.log(*args, **kwargs)
+
+
+_name_to_level = {
+ "CRITICAL": logging.CRITICAL,
+ "ERROR": logging.ERROR,
+ "WARN": logging.WARNING,
+ "WARNING": logging.WARNING,
+ "INFO": logging.INFO,
+ "DEBUG": logging.DEBUG,
+}
+
+
+def parse_log_message(msg):
+ for name, level in _name_to_level.items():
+ if msg.startswith(name + ":"):
+ remainder = msg[len(name) + 1:]
+ try:
+ idx = remainder.index(":")
+ except:
+ continue
+ return level, remainder[:idx], remainder[idx+1:]
+ return logging.INFO, "print", msg
+
+
+_init_string = b"ARTIQ logging\n"
+
+
+class Server(AsyncioServer):
+ """Remote logging TCP server.
+
+ Takes one log entry per line, in the format:
+ source:levelno:name:message
+ """
+ async def _handle_connection_cr(self, reader, writer):
+ try:
+ line = await reader.readline()
+ if line != _init_string:
+ return
+
+ while True:
+ line = await reader.readline()
+ if not line:
+ break
+ try:
+ line = line.decode()
+ except:
+ return
+ line = line[:-1]
+ linesplit = line.split(":", 3)
+ if len(linesplit) != 4:
+ logger.warning("received improperly formatted message, "
+ "dropping connection")
+ return
+ source, level, name, message = linesplit
+ try:
+ level = int(level)
+ except:
+ logger.warning("received improperly formatted level, "
+ "dropping connection")
+ return
+ log_with_name(name, level, message,
+ extra={"source": source})
+ finally:
+ writer.close()
+
+
+class SourceFilter:
+ def __init__(self, local_level, local_source):
+ self.local_level = local_level
+ self.local_source = local_source
+
+ def filter(self, record):
+ if not hasattr(record, "source"):
+ record.source = self.local_source
+ if record.source == self.local_source:
+ return record.levelno >= self.local_level
+ else:
+ # log messages that are forwarded from a source have already
+ # been filtered, and may have a level below the local level.
+ return True
+
+
+class LogForwarder(logging.Handler, TaskObject):
+ def __init__(self, host, port, reconnect_timer=5.0, queue_size=1000,
+ **kwargs):
+ logging.Handler.__init__(self, **kwargs)
+ self.host = host
+ self.port = port
+ self.setFormatter(logging.Formatter(
+ "%(name)s:%(message)s"))
+ self._queue = asyncio.Queue(queue_size)
+ self.reconnect_timer = reconnect_timer
+
+ def emit(self, record):
+ message = self.format(record)
+ for part in message.split("\n"):
+ part = "{}:{}:{}".format(record.source, record.levelno, part)
+ try:
+ self._queue.put_nowait(part)
+ except asyncio.QueueFull:
+ break
+
+ async def _do(self):
+ while True:
+ try:
+ reader, writer = await asyncio.open_connection(self.host,
+ self.port)
+ writer.write(_init_string)
+ while True:
+ message = await self._queue.get() + "\n"
+ writer.write(message.encode())
+ await workaround_asyncio263()
+ await writer.drain()
+ except asyncio.CancelledError:
+ return
+ except:
+ await asyncio.sleep(self.reconnect_timer)
+ finally:
+ writer.close()
diff --git a/artiq/protocols/pc_rpc.py b/artiq/protocols/pc_rpc.py
index beab88224..d39fbb68b 100644
--- a/artiq/protocols/pc_rpc.py
+++ b/artiq/protocols/pc_rpc.py
@@ -27,6 +27,12 @@ from artiq.protocols.asyncio_server import AsyncioServer as _AsyncioServer
logger = logging.getLogger(__name__)
+class AutoTarget:
+ """Use this as target value in clients for them to automatically connect
+ to the target exposed by the server. Servers must have only one target."""
+ pass
+
+
class RemoteError(Exception):
"""Raised when a RPC failed or raised an exception on the remote (server)
side."""
@@ -42,6 +48,20 @@ class IncompatibleServer(Exception):
_init_string = b"ARTIQ pc_rpc\n"
+def _validate_target_name(target_name, target_names):
+ if target_name is AutoTarget:
+ if len(target_names) > 1:
+ raise ValueError("Server has multiple targets: " +
+ " ".join(sorted(target_names)))
+ else:
+ target_name = target_names[0]
+ elif target_name not in target_names:
+ raise IncompatibleServer(
+ "valid target name(s): " +
+ " ".join(sorted(target_names)))
+ return target_name
+
+
class Client:
"""This class proxies the methods available on the server so that they
can be used as if they were local methods.
@@ -67,11 +87,13 @@ class Client:
:param port: TCP port to use.
:param target_name: Target name to select. ``IncompatibleServer`` is
raised if the target does not exist.
+ Use ``AutoTarget`` for automatic selection if the server has only one
+ target.
Use ``None`` to skip selecting a target. The list of targets can then
be retrieved using ``get_rpc_id`` and then one can be selected later
using ``select_rpc_target``.
"""
- def __init__(self, host, port, target_name):
+ def __init__(self, host, port, target_name=AutoTarget):
self.__socket = socket.create_connection((host, port))
try:
@@ -89,8 +111,7 @@ class Client:
def select_rpc_target(self, target_name):
"""Selects a RPC target by name. This function should be called
exactly once if the object was created with ``target_name=None``."""
- if target_name not in self.__target_names:
- raise IncompatibleServer
+ target_name = _validate_target_name(target_name, self.__target_names)
self.__socket.sendall((target_name + "\n").encode())
def get_rpc_id(self):
@@ -180,8 +201,7 @@ class AsyncioClient:
"""Selects a RPC target by name. This function should be called
exactly once if the connection was created with ``target_name=None``.
"""
- if target_name not in self.__target_names:
- raise IncompatibleServer
+ target_name = _validate_target_name(target_name, self.__target_names)
self.__writer.write((target_name + "\n").encode())
def get_rpc_id(self):
@@ -259,7 +279,8 @@ class BestEffortClient:
except:
logger.warning("first connection attempt to %s:%d[%s] failed, "
"retrying in the background",
- self.__host, self.__port, self.__target_name)
+ self.__host, self.__port, self.__target_name,
+ exc_info=True)
self.__start_conretry()
else:
self.__conretry_thread = None
@@ -273,9 +294,9 @@ class BestEffortClient:
(self.__host, self.__port), timeout)
self.__socket.sendall(_init_string)
server_identification = self.__recv()
- if self.__target_name not in server_identification["targets"]:
- raise IncompatibleServer
- self.__socket.sendall((self.__target_name + "\n").encode())
+ target_name = _validate_target_name(self.__target_name,
+ server_identification["targets"])
+ self.__socket.sendall((target_name + "\n").encode())
def __start_conretry(self):
self.__conretry_thread = threading.Thread(target=self.__conretry)
diff --git a/artiq/protocols/pyon.py b/artiq/protocols/pyon.py
index 471025b43..ee50af99f 100644
--- a/artiq/protocols/pyon.py
+++ b/artiq/protocols/pyon.py
@@ -132,7 +132,10 @@ class _Encoder:
return r
def encode(self, x):
- return getattr(self, "encode_" + _encode_map[type(x)])(x)
+ ty = _encode_map.get(type(x), None)
+ if ty is None:
+ raise TypeError(repr(x) + " is not PYON serializable")
+ return getattr(self, "encode_" + ty)(x)
def encode(x, pretty=False):
@@ -145,6 +148,7 @@ def encode(x, pretty=False):
def _nparray(shape, dtype, data):
a = numpy.frombuffer(base64.b64decode(data), dtype=dtype)
+ a = a.copy()
return a.reshape(shape)
diff --git a/artiq/protocols/sync_struct.py b/artiq/protocols/sync_struct.py
index e2c1021ad..e42534cdb 100644
--- a/artiq/protocols/sync_struct.py
+++ b/artiq/protocols/sync_struct.py
@@ -16,6 +16,7 @@ from functools import partial
from artiq.protocols import pyon
from artiq.protocols.asyncio_server import AsyncioServer
+from artiq.tools import workaround_asyncio263
_init_string = b"ARTIQ sync_struct\n"
@@ -233,10 +234,11 @@ class Publisher(AsyncioServer):
line = await queue.get()
writer.write(line)
# raise exception on connection error
+ await workaround_asyncio263()
await writer.drain()
finally:
self._recipients[notifier_name].remove(queue)
- except ConnectionResetError:
+ except (ConnectionResetError, BrokenPipeError):
# subscribers disconnecting are a normal occurence
pass
finally:
diff --git a/artiq/test/h5types.py b/artiq/test/h5types.py
new file mode 100644
index 000000000..4a3b1ca19
--- /dev/null
+++ b/artiq/test/h5types.py
@@ -0,0 +1,25 @@
+import unittest
+
+import h5py
+import numpy as np
+
+from artiq.master.worker_db import result_dict_to_hdf5
+
+
+class TypesCase(unittest.TestCase):
+ def test_types(self):
+ d = {
+ "bool": True,
+ "int": 42,
+ "float": 42.0,
+ "string": "abcdef",
+ }
+
+ for size in 8, 16, 32, 64:
+ d["i"+str(size)] = getattr(np, "int" + str(size))(42)
+ d["u"+str(size)] = getattr(np, "uint" + str(size))(42)
+ for size in 16, 32, 64:
+ d["f"+str(size)] = getattr(np, "float" + str(size))(42)
+
+ with h5py.File("h5types.h5", "w") as f:
+ result_dict_to_hdf5(f, d)
diff --git a/artiq/test/pc_rpc.py b/artiq/test/pc_rpc.py
index 19000c659..423b4f31d 100644
--- a/artiq/test/pc_rpc.py
+++ b/artiq/test/pc_rpc.py
@@ -17,12 +17,12 @@ test_object = [5, 2.1, None, True, False,
class RPCCase(unittest.TestCase):
- def _run_server_and_test(self, test):
+ def _run_server_and_test(self, test, *args):
# running this file outside of unittest starts the echo server
with subprocess.Popen([sys.executable,
sys.modules[__name__].__file__]) as proc:
try:
- test()
+ test(*args)
finally:
try:
proc.wait(timeout=1)
@@ -30,12 +30,12 @@ class RPCCase(unittest.TestCase):
proc.kill()
raise
- def _blocking_echo(self):
+ def _blocking_echo(self, target):
for attempt in range(100):
time.sleep(.2)
try:
remote = pc_rpc.Client(test_address, test_port,
- "test")
+ target)
except ConnectionRefusedError:
pass
else:
@@ -50,14 +50,17 @@ class RPCCase(unittest.TestCase):
remote.close_rpc()
def test_blocking_echo(self):
- self._run_server_and_test(self._blocking_echo)
+ self._run_server_and_test(self._blocking_echo, "test")
- async def _asyncio_echo(self):
+ def test_blocking_echo_autotarget(self):
+ self._run_server_and_test(self._blocking_echo, pc_rpc.AutoTarget)
+
+ async def _asyncio_echo(self, target):
remote = pc_rpc.AsyncioClient()
for attempt in range(100):
await asyncio.sleep(.2)
try:
- await remote.connect_rpc(test_address, test_port, "test")
+ await remote.connect_rpc(test_address, test_port, target)
except ConnectionRefusedError:
pass
else:
@@ -71,16 +74,19 @@ class RPCCase(unittest.TestCase):
finally:
remote.close_rpc()
- def _loop_asyncio_echo(self):
+ def _loop_asyncio_echo(self, target):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
- loop.run_until_complete(self._asyncio_echo())
+ loop.run_until_complete(self._asyncio_echo(target))
finally:
loop.close()
def test_asyncio_echo(self):
- self._run_server_and_test(self._loop_asyncio_echo)
+ self._run_server_and_test(self._loop_asyncio_echo, "test")
+
+ def test_asyncio_echo_autotarget(self):
+ self._run_server_and_test(self._loop_asyncio_echo, pc_rpc.AutoTarget)
class FireAndForgetCase(unittest.TestCase):
diff --git a/artiq/test/scheduler.py b/artiq/test/scheduler.py
index c8d29759b..f6a029ee5 100644
--- a/artiq/test/scheduler.py
+++ b/artiq/test/scheduler.py
@@ -1,4 +1,5 @@
import unittest
+import logging
import asyncio
import sys
import os
@@ -32,6 +33,7 @@ class BackgroundExperiment(EnvExperiment):
def _get_expid(name):
return {
+ "log_level": logging.WARNING,
"file": sys.modules[__name__].__file__,
"class_name": name,
"arguments": dict()
diff --git a/artiq/test/sync_struct.py b/artiq/test/sync_struct.py
index ece90360b..e5e950535 100644
--- a/artiq/test/sync_struct.py
+++ b/artiq/test/sync_struct.py
@@ -17,15 +17,17 @@ def write_test_data(test_dict):
for key, value in enumerate(test_values):
test_dict[key] = value
test_dict[1.5] = 1.5
- test_dict["array"] = []
- test_dict["array"].append(42)
- test_dict["array"].insert(1, 1)
+ test_dict["list"] = []
+ test_dict["list"].append(42)
+ test_dict["list"].insert(1, 1)
test_dict[100] = 0
test_dict[100] = 1
test_dict[101] = 1
test_dict.pop(101)
test_dict[102] = 1
del test_dict[102]
+ test_dict["array"] = np.zeros(1)
+ test_dict["array"][0] = 10
test_dict["finished"] = True
diff --git a/artiq/test/worker.py b/artiq/test/worker.py
index 59847f6a5..c74b6f46a 100644
--- a/artiq/test/worker.py
+++ b/artiq/test/worker.py
@@ -1,4 +1,5 @@
import unittest
+import logging
import asyncio
import sys
import os
@@ -64,6 +65,7 @@ async def _call_worker(worker, expid):
def _run_experiment(class_name):
expid = {
+ "log_level": logging.WARNING,
"file": sys.modules[__name__].__file__,
"class_name": class_name,
"arguments": dict()
@@ -85,7 +87,7 @@ class WorkerCase(unittest.TestCase):
_run_experiment("SimpleExperiment")
def test_exception(self):
- with self.assertRaises(WorkerException):
+ with self.assertRaises(WorkerError):
_run_experiment("ExceptionTermination")
def test_watchdog_no_timeout(self):
diff --git a/artiq/tools.py b/artiq/tools.py
index 185e79fd8..2b634ac54 100644
--- a/artiq/tools.py
+++ b/artiq/tools.py
@@ -49,14 +49,16 @@ def short_format(v):
if v is None:
return "None"
t = type(v)
- if np.issubdtype(t, int) or np.issubdtype(t, float):
+ if t is bool or np.issubdtype(t, int) or np.issubdtype(t, float):
return str(v)
elif t is str:
- return "\"" + elide(v, 15) + "\""
+ return "\"" + elide(v, 50) + "\""
else:
r = t.__name__
if t is list or t is dict or t is set:
r += " ({})".format(len(v))
+ if t is np.ndarray:
+ r += " " + str(np.shape(v))
return r
@@ -175,3 +177,9 @@ class Condition:
for fut in self._waiters:
if not fut.done():
fut.set_result(False)
+
+
+# See: https://github.com/python/asyncio/issues/263
+@asyncio.coroutine
+def workaround_asyncio263():
+ yield
diff --git a/conda/README.md b/conda/README.md
deleted file mode 100644
index 017742a8b..000000000
--- a/conda/README.md
+++ /dev/null
@@ -1,17 +0,0 @@
-Uploading conda packages (Python 3.5)
-=====================================
-
-Preparing:
-
- 1. [Install miniconda][miniconda]
- 2. `conda update -q conda`
- 3. `conda install conda-build jinja2 anaconda`
- 4. `conda create -q -n py35 python=3.5`
- 5. `conda config --add channels https://conda.anaconda.org/m-labs/channel/dev`
-
-Building:
-
- 1. `conda build pkgname --python 3.5`; this command displays a path to the freshly built package
- 2. `anaconda upload -c main -c dev`
-
-[miniconda]: http://conda.pydata.org/docs/install/quick.html#linux-miniconda-install
diff --git a/conda/aiohttp/bld.bat b/conda/aiohttp/bld.bat
deleted file mode 100644
index c40a9bbef..000000000
--- a/conda/aiohttp/bld.bat
+++ /dev/null
@@ -1,2 +0,0 @@
-"%PYTHON%" setup.py install
-if errorlevel 1 exit 1
diff --git a/conda/aiohttp/build.sh b/conda/aiohttp/build.sh
deleted file mode 100644
index 8e25a1455..000000000
--- a/conda/aiohttp/build.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-$PYTHON setup.py install
diff --git a/conda/aiohttp/meta.yaml b/conda/aiohttp/meta.yaml
deleted file mode 100644
index 2b196ffc1..000000000
--- a/conda/aiohttp/meta.yaml
+++ /dev/null
@@ -1,36 +0,0 @@
-package:
- name: aiohttp
- version: "0.17.2"
-
-source:
- fn: aiohttp-0.17.2.tar.gz
- url: https://pypi.python.org/packages/source/a/aiohttp/aiohttp-0.17.2.tar.gz
- md5: 7640928fd4b5c1ccf1f8bcad276d39d6
-
-build:
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
- - chardet
-
- run:
- - python
- - chardet
-
-test:
- # Python imports
- imports:
- - aiohttp
-
- requires:
- - chardet
- - gunicorn # [not win]
- - nose
-
-about:
- home: https://github.com/KeepSafe/aiohttp/
- license: Apache Software License
- summary: 'http client/server for asyncio'
diff --git a/conda/artiq-kc705-nist_qc1/build.sh b/conda/artiq-kc705-nist_qc1/build.sh
new file mode 100644
index 000000000..495a0e74c
--- /dev/null
+++ b/conda/artiq-kc705-nist_qc1/build.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+BUILD_SETTINGS_FILE=$HOME/.m-labs/build_settings.sh
+[ -f $BUILD_SETTINGS_FILE ] && . $BUILD_SETTINGS_FILE
+
+SOC_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq/binaries/kc705
+mkdir -p $SOC_PREFIX/nist_qc1
+
+SOC_ROOT=$PWD/soc
+
+# build bitstream
+
+(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
+cp $MSCDIR/build/artiq_kc705-nist_qc1-kc705.bit $SOC_PREFIX/
+wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
+mv bscan_spi_kc705.bit $SOC_PREFIX/
+
+# build BIOS
+
+(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_kc705 build-headers build-bios)
+cp $MSCDIR/software/bios/bios.bin $SOC_PREFIX/
+
+# build runtime
+
+make -C soc/runtime clean runtime.fbi
+cp soc/runtime/runtime.fbi $SOC_PREFIX/nist_qc1/
diff --git a/conda/artiq-kc705-nist_qc1/meta.yaml b/conda/artiq-kc705-nist_qc1/meta.yaml
new file mode 100644
index 000000000..765cb2c96
--- /dev/null
+++ b/conda/artiq-kc705-nist_qc1/meta.yaml
@@ -0,0 +1,27 @@
+package:
+ name: artiq-kc705-nist_qc1
+ version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
+
+source:
+ git_url: https://github.com/m-labs/artiq
+ git_tag: master
+
+build:
+ noarch_python: true
+ number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
+ string: py_{{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}+git{{ environ.get("GIT_DESCRIBE_HASH", "")[1:] }}
+
+requirements:
+ build:
+ # We don't get meaningful GIT_DESCRIBE_* values until before conda installs build dependencies.
+ - artiq 0.0
+ - migen 0.0
+ - llvm-or1k
+ - binutils-or1k-linux
+ run:
+ - artiq 0.0
+
+about:
+ home: http://m-labs.hk/artiq
+ license: GPL
+ summary: 'Bitstream, BIOS and runtime for NIST_QC1 on the KC705 board'
diff --git a/conda/artiq-kc705-nist_qc2/build.sh b/conda/artiq-kc705-nist_qc2/build.sh
new file mode 100644
index 000000000..a65294b85
--- /dev/null
+++ b/conda/artiq-kc705-nist_qc2/build.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+BUILD_SETTINGS_FILE=$HOME/.m-labs/build_settings.sh
+[ -f $BUILD_SETTINGS_FILE ] && . $BUILD_SETTINGS_FILE
+
+SOC_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq/binaries/kc705
+mkdir -p $SOC_PREFIX/nist_qc2
+
+SOC_ROOT=$PWD/soc
+
+# build bitstream
+
+(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
+cp $MSCDIR/build/artiq_kc705-nist_qc2-kc705.bit $SOC_PREFIX/
+wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
+mv bscan_spi_kc705.bit $SOC_PREFIX/
+
+# build BIOS
+
+(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_kc705 -s NIST_QC2 build-headers build-bios)
+cp $MSCDIR/software/bios/bios.bin $SOC_PREFIX/
+
+# build runtime
+
+make -C soc/runtime clean runtime.fbi
+cp soc/runtime/runtime.fbi $SOC_PREFIX/nist_qc2/
diff --git a/conda/artiq-kc705-nist_qc2/meta.yaml b/conda/artiq-kc705-nist_qc2/meta.yaml
new file mode 100644
index 000000000..166163a1c
--- /dev/null
+++ b/conda/artiq-kc705-nist_qc2/meta.yaml
@@ -0,0 +1,27 @@
+package:
+ name: artiq-kc705-nist_qc2
+ version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
+
+source:
+ git_url: https://github.com/m-labs/artiq
+ git_tag: master
+
+build:
+ noarch_python: true
+ number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
+ string: py_{{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}+git{{ environ.get("GIT_DESCRIBE_HASH", "")[1:] }}
+
+requirements:
+ build:
+ # We don't get meaningful GIT_DESCRIBE_* values until before conda installs build dependencies.
+ - artiq 0.0
+ - migen 0.0
+ - llvm-or1k
+ - binutils-or1k-linux
+ run:
+ - artiq 0.0
+
+about:
+ home: http://m-labs.hk/artiq
+ license: GPL
+ summary: 'Bitstream, BIOS and runtime for NIST_QC2 on the KC705 board'
diff --git a/conda/artiq-pipistrello-nist_qc1/build.sh b/conda/artiq-pipistrello-nist_qc1/build.sh
new file mode 100644
index 000000000..f35b22a39
--- /dev/null
+++ b/conda/artiq-pipistrello-nist_qc1/build.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+BUILD_SETTINGS_FILE=$HOME/.m-labs/build_settings.sh
+[ -f $BUILD_SETTINGS_FILE ] && . $BUILD_SETTINGS_FILE
+
+SOC_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq/binaries/pipistrello
+mkdir -p $SOC_PREFIX
+
+SOC_ROOT=$PWD/soc
+
+# build bitstream
+
+(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream)
+cp $MSCDIR/build/artiq_pipistrello-nist_qc1-pipistrello.bit $SOC_PREFIX/
+wget https://people.phys.ethz.ch/~robertjo/bscan_spi_lx45_csg324.bit
+mv bscan_spi_lx45_csg324.bit $SOC_PREFIX/
+
+# build BIOS
+
+(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_pipistrello build-headers build-bios)
+cp $MSCDIR/software/bios/bios.bin $SOC_PREFIX/
+
+# build runtime
+
+make -C soc/runtime clean runtime.fbi
+cp soc/runtime/runtime.fbi $SOC_PREFIX/
diff --git a/conda/artiq-pipistrello-nist_qc1/meta.yaml b/conda/artiq-pipistrello-nist_qc1/meta.yaml
new file mode 100644
index 000000000..11a62058a
--- /dev/null
+++ b/conda/artiq-pipistrello-nist_qc1/meta.yaml
@@ -0,0 +1,27 @@
+package:
+ name: artiq-pipistrello-nist_qc1
+ version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
+
+source:
+ git_url: https://github.com/m-labs/artiq
+ git_tag: master
+
+build:
+ noarch_python: true
+ number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
+ string: py_{{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}+git{{ environ.get("GIT_DESCRIBE_HASH", "")[1:] }}
+
+requirements:
+ build:
+ # We don't get meaningful GIT_DESCRIBE_* values until before conda installs build dependencies.
+ - artiq 0.0
+ - migen 0.0
+ - llvm-or1k
+ - binutils-or1k-linux
+ run:
+ - artiq 0.0
+
+about:
+ home: http://m-labs.hk/artiq
+ license: GPL
+ summary: 'Bitstream, BIOS and runtime for NIST_QC1 on the Pipistrello board'
diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh
index 4d574de48..1bbc544de 100755
--- a/conda/artiq/build.sh
+++ b/conda/artiq/build.sh
@@ -1,64 +1,15 @@
#!/bin/bash
-BUILD_SETTINGS_FILE=$HOME/.mlabs/build_settings.sh
-
-if [ -f $BUILD_SETTINGS_FILE ]
-then
- source $BUILD_SETTINGS_FILE
-fi
+ARTIQ_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq
$PYTHON setup.py install --single-version-externally-managed --record=record.txt
-git clone --recursive https://github.com/m-labs/misoc
-export MSCDIR=$SRC_DIR/misoc
-ARTIQ_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq
-BIN_PREFIX=$ARTIQ_PREFIX/binaries/
-mkdir -p $ARTIQ_PREFIX/misc
-mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello
-
-# build for KC705 NIST_QC1
-
-cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd -
-make -C soc/runtime clean runtime.fbi
-[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
-
-# install KC705 NIST_QC1 binaries
-
-mkdir -p $BIN_PREFIX/kc705/nist_qc1
-cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc1/
-cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/kc705/
-[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc1-kc705.bit $BIN_PREFIX/kc705/
-wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
-mv bscan_spi_kc705.bit $BIN_PREFIX/kc705/
-
-# build for Pipistrello
-
-cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello build-headers build-bios; cd -
-make -C soc/runtime clean runtime.fbi
-[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream)
-
-# install Pipistrello binaries
-
-cp soc/runtime/runtime.fbi $BIN_PREFIX/pipistrello/
-cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/pipistrello/
-[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_pipistrello-nist_qc1-pipistrello.bit $BIN_PREFIX/pipistrello/
-wget https://people.phys.ethz.ch/~robertjo/bscan_spi_lx45_csg324.bit
-mv bscan_spi_lx45_csg324.bit $BIN_PREFIX/pipistrello/
-
-# build for KC705 NIST_QC2
-
-cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 build-headers; cd -
-make -C soc/runtime clean runtime.fbi
-[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
-
-# install KC705 NIST_QC2 binaries
-
-mkdir -p $BIN_PREFIX/kc705/nist_qc2
-cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc2/
-[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc2-kc705.bit $BIN_PREFIX/kc705/
+# install scripts
cp artiq/frontend/artiq_flash.sh $PREFIX/bin
-# misc
+# install udev rules
+
+mkdir -p $ARTIQ_PREFIX/misc
cp misc/99-papilio.rules $ARTIQ_PREFIX/misc/
cp misc/99-kc705.rules $ARTIQ_PREFIX/misc/
diff --git a/conda/artiq/meta.yaml b/conda/artiq/meta.yaml
index e96b8d4bb..9fa02839d 100644
--- a/conda/artiq/meta.yaml
+++ b/conda/artiq/meta.yaml
@@ -1,65 +1,66 @@
package:
- name: artiq
- version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
+ name: artiq
+ version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
source:
- git_url: https://github.com/m-labs/artiq
- git_tag: master
+ git_url: https://github.com/m-labs/artiq
+ git_tag: master
build:
- number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
- entry_points:
- - artiq_client = artiq.frontend.artiq_client:main
- - artiq_compile = artiq.frontend.artiq_compile:main
- - artiq_coretool = artiq.frontend.artiq_coretool:main
- - artiq_ctlmgr = artiq.frontend.artiq_ctlmgr:main
- - artiq_gui = artiq.frontend.artiq_gui:main
- - artiq_influxdb = artiq.frontend.artiq_influxdb:main
- - artiq_master = artiq.frontend.artiq_master:main
- - artiq_mkfs = artiq.frontend.artiq_mkfs:main
- - artiq_rpctool = artiq.frontend.artiq_rpctool:main
- - artiq_run = artiq.frontend.artiq_run:main
- - lda_controller = artiq.frontend.lda_controller:main
- - novatech409b_controller = artiq.frontend.novatech409b_controller:main
- - pdq2_client = artiq.frontend.pdq2_client:main
- - pdq2_controller = artiq.frontend.pdq2_controller:main
- - pxi6733_controller = artiq.frontend.pxi6733_controller:main
- - thorlabs_tcube_controller = artiq.frontend.thorlabs_tcube_controller:main
+ noarch_python: true
+ number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
+ string: py_{{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}+git{{ environ.get("GIT_DESCRIBE_HASH", "")[1:] }}
+ entry_points:
+ - artiq_client = artiq.frontend.artiq_client:main
+ - artiq_compile = artiq.frontend.artiq_compile:main
+ - artiq_coretool = artiq.frontend.artiq_coretool:main
+ - artiq_ctlmgr = artiq.frontend.artiq_ctlmgr:main
+ - artiq_gui = artiq.frontend.artiq_gui:main
+ - artiq_influxdb = artiq.frontend.artiq_influxdb:main
+ - artiq_master = artiq.frontend.artiq_master:main
+ - artiq_mkfs = artiq.frontend.artiq_mkfs:main
+ - artiq_rpctool = artiq.frontend.artiq_rpctool:main
+ - artiq_run = artiq.frontend.artiq_run:main
+ - lda_controller = artiq.frontend.lda_controller:main
+ - novatech409b_controller = artiq.frontend.novatech409b_controller:main
+ - pdq2_client = artiq.frontend.pdq2_client:main
+ - pdq2_controller = artiq.frontend.pdq2_controller:main
+ - pxi6733_controller = artiq.frontend.pxi6733_controller:main
+ - thorlabs_tcube_controller = artiq.frontend.thorlabs_tcube_controller:main
requirements:
- build:
- - python >=3.5.0
- - setuptools
- - numpy
- - migen 0.0
- - pyelftools
- - binutils-or1k-linux
- run:
- - python >=3.5.0
- - llvmlite-artiq
- - scipy
- - numpy
- - prettytable
- - pyserial
- - sphinx
- - sphinx-argparse
- - h5py
- - dateutil
- - pydaqmx
- - pyelftools
- - quamash
- - pyqtgraph
- - flterm # [linux]
- - pygit2
- - aiohttp
- - binutils-or1k-linux
+ build:
+ - python >=3.5.0
+ - setuptools
+ - numpy
+ - migen 0.0
+ - pyelftools
+ - binutils-or1k-linux
+ run:
+ - python >=3.5.0
+ - llvmlite-artiq
+ - scipy
+ - numpy
+ - prettytable
+ - pyserial
+ - sphinx
+ - sphinx-argparse
+ - h5py
+ - dateutil
+ - pydaqmx
+ - pyelftools
+ - quamash
+ - pyqtgraph
+ - flterm # [linux]
+ - pygit2
+ - aiohttp
+ - binutils-or1k-linux
test:
- imports:
- - artiq
-
+ imports:
+ - artiq
about:
- home: http://m-labs.hk/artiq
- license: 3-clause BSD
- summary: 'ARTIQ (Advanced Real-Time Infrastructure for Quantum physics) is a next-generation control system for quantum information experiments. It is being developed in partnership with the Ion Storage Group at NIST, and its applicability reaches beyond ion trapping.'
+ home: http://m-labs.hk/artiq
+ license: GPL
+ summary: 'ARTIQ (Advanced Real-Time Infrastructure for Quantum physics) is a next-generation control system for quantum information experiments. It is being developed in partnership with the Ion Storage Group at NIST, and its applicability reaches beyond ion trapping.'
diff --git a/conda/binutils-or1k-linux/README.md b/conda/binutils-or1k-linux/README.md
deleted file mode 100755
index d812cc7b2..000000000
--- a/conda/binutils-or1k-linux/README.md
+++ /dev/null
@@ -1,8 +0,0 @@
-binutils-or1k-linux
-===================
-
-To build this package on Windows:
-
-* Install cygwin
-* Install the following packages: gcc-core g++-core make texinfo patch
-* Run cygwin terminal and execute $ conda build binutils-or1k-linux
\ No newline at end of file
diff --git a/conda/binutils-or1k-linux/bld.bat b/conda/binutils-or1k-linux/bld.bat
deleted file mode 100644
index 6c709129f..000000000
--- a/conda/binutils-or1k-linux/bld.bat
+++ /dev/null
@@ -1,10 +0,0 @@
-FOR /F "tokens=* USEBACKQ" %%F IN (`cygpath -u %PREFIX%`) DO (
-SET var=%%F
-)
-set PREFIX=%var%
-FOR /F "tokens=* USEBACKQ" %%F IN (`cygpath -u %RECIPE_DIR%`) DO (
-SET var=%%F
-)
-set RECIPE_DIR=%var%
-sh %RECIPE_DIR%/build.sh
-if errorlevel 1 exit 1
diff --git a/conda/binutils-or1k-linux/build.sh b/conda/binutils-or1k-linux/build.sh
deleted file mode 100755
index faa6aa8e4..000000000
--- a/conda/binutils-or1k-linux/build.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-patch -p1 < $RECIPE_DIR/../../misc/binutils-2.25.1-or1k-R_PCREL-pcrel_offset.patch
-mkdir build
-cd build
-../configure --target=or1k-linux --prefix=$PREFIX
-make -j2
-make install
diff --git a/conda/binutils-or1k-linux/meta.yaml b/conda/binutils-or1k-linux/meta.yaml
deleted file mode 100644
index d8e8f9e71..000000000
--- a/conda/binutils-or1k-linux/meta.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
-package:
- name: binutils-or1k-linux
- version: 2.25.1
-
-source:
- fn: binutils-2.25.1.tar.bz2
- url: https://ftp.gnu.org/gnu/binutils/binutils-2.25.1.tar.bz2
- sha256: b5b14added7d78a8d1ca70b5cb75fef57ce2197264f4f5835326b0df22ac9f22
-
-build:
- number: 0
-
-requirements:
- build:
- - system # [not win]
-
-about:
- home: https://www.gnu.org/software/binutils/
- license: GPL
- summary: 'A set of programming tools for creating and managing binary programs, object files, libraries, profile data, and assembly source code.'
diff --git a/conda/chardet/bld.bat b/conda/chardet/bld.bat
deleted file mode 100644
index c40a9bbef..000000000
--- a/conda/chardet/bld.bat
+++ /dev/null
@@ -1,2 +0,0 @@
-"%PYTHON%" setup.py install
-if errorlevel 1 exit 1
diff --git a/conda/chardet/build.sh b/conda/chardet/build.sh
deleted file mode 100644
index 5a5aeeb48..000000000
--- a/conda/chardet/build.sh
+++ /dev/null
@@ -1 +0,0 @@
-$PYTHON setup.py install
diff --git a/conda/chardet/meta.yaml b/conda/chardet/meta.yaml
deleted file mode 100644
index e9b7c795c..000000000
--- a/conda/chardet/meta.yaml
+++ /dev/null
@@ -1,33 +0,0 @@
-package:
- name: chardet
- version: 2.2.1
-
-source:
- fn: chardet-2.2.1.tar.gz
- url: https://pypi.python.org/packages/source/c/chardet/chardet-2.2.1.tar.gz
- md5: 4a758402eaefd0331bdedc7ecb6f452c
-
-build:
- entry_points:
- - chardetect = chardet.chardetect:main
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
-
- run:
- - python
-
-test:
- # Python imports
- imports:
- - chardet
-
- commands:
- - chardetect run_test.py
-
-about:
- home: https://github.com/chardet/chardet
- license: GNU Library or Lesser General Public License (LGPL)
diff --git a/conda/dateutil/bld.bat b/conda/dateutil/bld.bat
deleted file mode 100644
index 39b5e1fee..000000000
--- a/conda/dateutil/bld.bat
+++ /dev/null
@@ -1 +0,0 @@
-%PYTHON% setup.py install
diff --git a/conda/dateutil/build.sh b/conda/dateutil/build.sh
deleted file mode 100644
index 5a5aeeb48..000000000
--- a/conda/dateutil/build.sh
+++ /dev/null
@@ -1 +0,0 @@
-$PYTHON setup.py install
diff --git a/conda/dateutil/meta.yaml b/conda/dateutil/meta.yaml
deleted file mode 100644
index fd9d40a3e..000000000
--- a/conda/dateutil/meta.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
-package:
- name: dateutil
- version: 2.4.2
-
-source:
- fn: python-dateutil-2.4.2.tar.gz
- url: https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.2.tar.gz
- md5: 4ef68e1c485b09e9f034e10473e5add2
-
-build:
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
- - six >=1.5
- run:
- - python
- - six >=1.5
-
-test:
- imports:
- - dateutil
- - dateutil.zoneinfo
-
-about:
- home: https://dateutil.readthedocs.org
- license: BSD
- summary: 'Extensions to the standard Python datetime module'
diff --git a/conda/flterm/build.sh b/conda/flterm/build.sh
deleted file mode 100644
index 1121beb65..000000000
--- a/conda/flterm/build.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-make -C $SRC_DIR/tools flterm
-mkdir -p $PREFIX/bin
-cp $SRC_DIR/tools/flterm $PREFIX/bin/
diff --git a/conda/flterm/meta.yaml b/conda/flterm/meta.yaml
deleted file mode 100644
index 16afb47cb..000000000
--- a/conda/flterm/meta.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-package:
- name: flterm
- version: 0
-
-source:
- git_url: https://github.com/m-labs/misoc
- git_tag: master
-
-about:
- home: https://github.com/m-labs/misoc/blob/master/tools/flterm.c
- license: 3-clause BSD
- summary: 'Serial terminal to connect to MiSoC uart.'
diff --git a/conda/libgit2/bld.bat b/conda/libgit2/bld.bat
deleted file mode 100644
index 268c18cd9..000000000
--- a/conda/libgit2/bld.bat
+++ /dev/null
@@ -1,20 +0,0 @@
-mkdir build
-cd build
-REM Configure step
-if "%ARCH%"=="32" (
-set CMAKE_GENERATOR=Visual Studio 12 2013
-) else (
-set CMAKE_GENERATOR=Visual Studio 12 2013 Win64
-)
-set CMAKE_GENERATOR_TOOLSET=v120_xp
-cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DSTDCALL=OFF -DCMAKE_PREFIX_PATH=$PREFIX %SRC_DIR%
-if errorlevel 1 exit 1
-REM Build step
-cmake --build .
-if errorlevel 1 exit 1
-REM Install step
-cmake --build . --target install
-if errorlevel 1 exit 1
-REM Hack to help pygit2 to find libgit2
-mkdir %PREFIX%\Scripts
-copy "%PREFIX%\bin\git2.dll" "%PREFIX%\Scripts\"
\ No newline at end of file
diff --git a/conda/libgit2/build.sh b/conda/libgit2/build.sh
deleted file mode 100644
index dc4a85aa0..000000000
--- a/conda/libgit2/build.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-mkdir build
-cd build
-cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DCMAKE_PREFIX_PATH=$PREFIX
-make -j2
-make install
diff --git a/conda/libgit2/meta.yaml b/conda/libgit2/meta.yaml
deleted file mode 100644
index 5741b44b4..000000000
--- a/conda/libgit2/meta.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-package:
- name: libgit2
- version: 0.22.3
-
-source:
- git_url: https://github.com/libgit2/libgit2
- git_tag: v0.22.3
-
-build:
- number: 1
-
-requirements:
- build:
- - system # [linux]
- - cmake # [linux]
- - openssl
- - libssh2
- - zlib
- run:
- - openssl
- - zlib
- - libssh2
-
-about:
- home: https://libgit2.github.com/
- license: GPLv2 with a special Linking Exception
- summary: 'libgit2 is a portable, pure C implementation of the Git core methods provided as a re-entrant linkable library with a solid API, allowing you to write native speed custom Git applications in any language with bindings.'
diff --git a/conda/libssh2/bld.bat b/conda/libssh2/bld.bat
deleted file mode 100644
index ed957bd42..000000000
--- a/conda/libssh2/bld.bat
+++ /dev/null
@@ -1,17 +0,0 @@
-mkdir build
-cd build
-REM Configure step
-if "%ARCH%"=="32" (
-set CMAKE_GENERATOR=Visual Studio 12 2013
-) else (
-set CMAKE_GENERATOR=Visual Studio 12 2013 Win64
-)
-set CMAKE_GENERATOR_TOOLSET=v120_xp
-cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DOPENSSL_ROOT_DIR=%PREFIX%\Library -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=OFF -DBUILD_EXAMPLES=OFF -DCMAKE_PREFIX_PATH=$PREFIX %SRC_DIR%
-if errorlevel 1 exit 1
-REM Build step
-cmake --build .
-if errorlevel 1 exit 1
-REM Install step
-cmake --build . --target install
-if errorlevel 1 exit 1
\ No newline at end of file
diff --git a/conda/libssh2/build.sh b/conda/libssh2/build.sh
deleted file mode 100644
index 773dda78b..000000000
--- a/conda/libssh2/build.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-mkdir build
-cd build
-cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DOPENSSL_ROOT_DIR=$PREFIX -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=OFF -DBUILD_EXAMPLES=OFF -DCMAKE_PREFIX_PATH=$PREFIX
-make -j2
-make install
diff --git a/conda/libssh2/meta.yaml b/conda/libssh2/meta.yaml
deleted file mode 100644
index 28c0f59b6..000000000
--- a/conda/libssh2/meta.yaml
+++ /dev/null
@@ -1,23 +0,0 @@
-package:
- name: libssh2
- version: 1.6.0
-
-source:
- git_url: https://github.com/libssh2/libssh2
- git_tag: libssh2-1.6.0
-
-build:
- number: 1
-
-requirements:
- build:
- - system # [linux]
- - cmake # [linux]
- - openssl
- run:
- - openssl
-
-about:
- home: http://www.libssh2.org/
- license: BSD
- summary: 'libssh2 is a client-side C library implementing the SSH2 protocol'
diff --git a/conda/lit/bld.bat b/conda/lit/bld.bat
deleted file mode 100644
index c40a9bbef..000000000
--- a/conda/lit/bld.bat
+++ /dev/null
@@ -1,2 +0,0 @@
-"%PYTHON%" setup.py install
-if errorlevel 1 exit 1
diff --git a/conda/lit/build.sh b/conda/lit/build.sh
deleted file mode 100644
index 5a5aeeb48..000000000
--- a/conda/lit/build.sh
+++ /dev/null
@@ -1 +0,0 @@
-$PYTHON setup.py install
diff --git a/conda/lit/meta.yaml b/conda/lit/meta.yaml
deleted file mode 100644
index 14cf41555..000000000
--- a/conda/lit/meta.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-package:
- name: lit
- version: 0.4.1
-
-source:
- fn: lit-0.4.1.tar.gz
- url: https://pypi.python.org/packages/source/l/lit/lit-0.4.1.tar.gz
- md5: ea6f00470e1bf7ed9e4edcff0f650fe6
-
-build:
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
-
- run:
- - python
-
-test:
- commands:
- - lit --version
-
-about:
- home: http://llvm.org/docs/CommandGuide/lit.html
- license: MIT
diff --git a/conda/llvmdev-or1k/bld.bat b/conda/llvmdev-or1k/bld.bat
deleted file mode 100644
index 654b44d64..000000000
--- a/conda/llvmdev-or1k/bld.bat
+++ /dev/null
@@ -1,25 +0,0 @@
-mkdir build
-cd build
-set BUILD_CONFIG=Release
-REM Configure step
-if "%ARCH%"=="32" (
-set CMAKE_GENERATOR=Visual Studio 12 2013
-) else (
-set CMAKE_GENERATOR=Visual Studio 12 2013 Win64
-)
-set CMAKE_GENERATOR_TOOLSET=v120_xp
-@rem Reduce build times and package size by removing unused stuff
-set CMAKE_CUSTOM=-DLLVM_TARGETS_TO_BUILD="OR1K;X86" -DLLVM_INCLUDE_TESTS=OFF ^
--DLLVM_INCLUDE_TOOLS=OFF -DLLVM_INCLUDE_UTILS=OFF ^
--DLLVM_INCLUDE_DOCS=OFF -DLLVM_INCLUDE_EXAMPLES=OFF ^
--DLLVM_ENABLE_ASSERTIONS=ON
-cmake -G "%CMAKE_GENERATOR%" -T "%CMAKE_GENERATOR_TOOLSET%" ^
--DCMAKE_BUILD_TYPE="%BUILD_CONFIG%" -DCMAKE_PREFIX_PATH=%LIBRARY_PREFIX% ^
--DCMAKE_INSTALL_PREFIX:PATH=%LIBRARY_PREFIX% %CMAKE_CUSTOM% %SRC_DIR%
-if errorlevel 1 exit 1
-REM Build step
-cmake --build . --config "%BUILD_CONFIG%"
-if errorlevel 1 exit 1
-REM Install step
-cmake --build . --config "%BUILD_CONFIG%" --target install
-if errorlevel 1 exit 1
diff --git a/conda/llvmdev-or1k/build.sh b/conda/llvmdev-or1k/build.sh
deleted file mode 100644
index 391f592cc..000000000
--- a/conda/llvmdev-or1k/build.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-cd tools
-git clone https://github.com/openrisc/clang-or1k clang
-cd ..
-mkdir build
-cd build
-cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DLLVM_TARGETS_TO_BUILD="OR1K;X86" -DCMAKE_BUILD_TYPE=Rel -DLLVM_ENABLE_ASSERTIONS=ON
-make -j2
-make install
diff --git a/conda/llvmdev-or1k/meta.yaml b/conda/llvmdev-or1k/meta.yaml
deleted file mode 100644
index 09ca7046a..000000000
--- a/conda/llvmdev-or1k/meta.yaml
+++ /dev/null
@@ -1,22 +0,0 @@
-package:
- name: llvmdev-or1k
- version: "3.5.0"
-
-source:
- git_url: https://github.com/openrisc/llvm-or1k
- git_tag: master
-
-build:
- number: 5
-
-requirements:
- build:
- - system [linux]
- - cmake [linux]
- run:
- - system [linux]
-
-about:
- home: http://llvm.org/
- license: Open Source (http://llvm.org/releases/3.5.0/LICENSE.TXT)
- summary: Development headers and libraries for LLVM
diff --git a/conda/llvmlite-artiq/bld.bat b/conda/llvmlite-artiq/bld.bat
deleted file mode 100644
index 8b58512c1..000000000
--- a/conda/llvmlite-artiq/bld.bat
+++ /dev/null
@@ -1,8 +0,0 @@
-@rem Let CMake know about the LLVM install path, for find_package()
-set CMAKE_PREFIX_PATH=%LIBRARY_PREFIX%
-
-@rem Ensure there are no build leftovers (CMake can complain)
-if exist ffi\build rmdir /S /Q ffi\build
-
-%PYTHON% setup.py install
-if errorlevel 1 exit 1
diff --git a/conda/llvmlite-artiq/build.sh b/conda/llvmlite-artiq/build.sh
deleted file mode 100755
index 8e25a1455..000000000
--- a/conda/llvmlite-artiq/build.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-$PYTHON setup.py install
diff --git a/conda/llvmlite-artiq/meta.yaml b/conda/llvmlite-artiq/meta.yaml
deleted file mode 100644
index 45c964b0f..000000000
--- a/conda/llvmlite-artiq/meta.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-package:
- name: llvmlite-artiq
- version: "0.5.1"
-
-source:
- git_url: https://github.com/m-labs/llvmlite
- git_tag: artiq
-
-requirements:
- build:
- - python
- - llvmdev-or1k
- - setuptools
- run:
- - python
-
-build:
- number: 5
-
-test:
- imports:
- - llvmlite_artiq
- - llvmlite_artiq.binding
-
-about:
- home: https://pypi.python.org/pypi/llvmlite/
- license: BSD
diff --git a/conda/prettytable/bld.bat b/conda/prettytable/bld.bat
deleted file mode 100644
index 87b1481d7..000000000
--- a/conda/prettytable/bld.bat
+++ /dev/null
@@ -1,8 +0,0 @@
-"%PYTHON%" setup.py install
-if errorlevel 1 exit 1
-
-:: Add more build steps here, if they are necessary.
-
-:: See
-:: http://docs.continuum.io/conda/build.html
-:: for a list of environment variables that are set during the build process.
diff --git a/conda/prettytable/build.sh b/conda/prettytable/build.sh
deleted file mode 100644
index 8e25a1455..000000000
--- a/conda/prettytable/build.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-$PYTHON setup.py install
diff --git a/conda/prettytable/meta.yaml b/conda/prettytable/meta.yaml
deleted file mode 100644
index ca359db92..000000000
--- a/conda/prettytable/meta.yaml
+++ /dev/null
@@ -1,26 +0,0 @@
-package:
- name: prettytable
- version: !!str 0.7.2
-
-source:
- fn: prettytable-0.7.2.tar.bz2
- url: https://pypi.python.org/packages/source/P/PrettyTable/prettytable-0.7.2.tar.bz2
- md5: 760dc900590ac3c46736167e09fa463a
-
-requirements:
- build:
- - python
- - setuptools
-
- run:
- - python
-
-test:
- imports:
- - prettytable
-
-
-about:
- home: http://code.google.com/p/prettytable/
- license: BSD License
- summary: 'A simple Python library for easily displaying tabular data in a visually appealing ASCII table format.'
diff --git a/conda/pydaqmx/bld.bat b/conda/pydaqmx/bld.bat
deleted file mode 100644
index add2c3c60..000000000
--- a/conda/pydaqmx/bld.bat
+++ /dev/null
@@ -1,2 +0,0 @@
-"%PYTHON%" setup.py build
-"%PYTHON%" setup.py install
diff --git a/conda/pydaqmx/build.sh b/conda/pydaqmx/build.sh
deleted file mode 100644
index f1d91245e..000000000
--- a/conda/pydaqmx/build.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-$PYTHON setup.py build
-$PYTHON setup.py install
diff --git a/conda/pydaqmx/meta.yaml b/conda/pydaqmx/meta.yaml
deleted file mode 100644
index 36f4636f9..000000000
--- a/conda/pydaqmx/meta.yaml
+++ /dev/null
@@ -1,22 +0,0 @@
-package:
- name: pydaqmx
- version: "1.3.1"
-
-source:
- git_url: https://github.com/clade/pydaqmx
- git_tag: master
-
-build:
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
- run:
- - python
-
-about:
- home: http://pythonhosted.org/PyDAQmx/
- license: BSD
- summary: PyDAQmx allows users to use data acquisition hardware from National Instruments with Python. It provides an interface between the NIDAQmx driver and Python. The package works on Windows and Linux.'
diff --git a/conda/pyelftools/bld.bat b/conda/pyelftools/bld.bat
deleted file mode 100644
index 39b5e1fee..000000000
--- a/conda/pyelftools/bld.bat
+++ /dev/null
@@ -1 +0,0 @@
-%PYTHON% setup.py install
diff --git a/conda/pyelftools/build.sh b/conda/pyelftools/build.sh
deleted file mode 100644
index 5a5aeeb48..000000000
--- a/conda/pyelftools/build.sh
+++ /dev/null
@@ -1 +0,0 @@
-$PYTHON setup.py install
diff --git a/conda/pyelftools/meta.yaml b/conda/pyelftools/meta.yaml
deleted file mode 100644
index f65b271dd..000000000
--- a/conda/pyelftools/meta.yaml
+++ /dev/null
@@ -1,26 +0,0 @@
-package:
- name: pyelftools
- version: 0.23
-
-source:
- git_url: https://github.com/eliben/pyelftools.git
- git_tag: v0.23
-
-build:
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
- run:
- - python
-
-test:
- imports:
- - elftools
-
-about:
- home: https://github.com/eliben/pyelftools.git
- license: Public domain
- summary: 'Library for analyzing ELF files and DWARF debugging information'
diff --git a/conda/pygit2/bld.bat b/conda/pygit2/bld.bat
deleted file mode 100644
index 0b9010888..000000000
--- a/conda/pygit2/bld.bat
+++ /dev/null
@@ -1,3 +0,0 @@
-set LIBGIT2=%PREFIX%
-set VS100COMNTOOLS=%VS120COMNTOOLS%
-%PYTHON% setup.py install
\ No newline at end of file
diff --git a/conda/pygit2/build.sh b/conda/pygit2/build.sh
deleted file mode 100644
index 833768d01..000000000
--- a/conda/pygit2/build.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-export LIBGIT2=$PREFIX
-$PYTHON setup.py install
diff --git a/conda/pygit2/meta.yaml b/conda/pygit2/meta.yaml
deleted file mode 100644
index fcc222f29..000000000
--- a/conda/pygit2/meta.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-package:
- name: pygit2
- version: 0.22.1
-
-source:
- git_url: https://github.com/libgit2/pygit2
- git_tag: v0.22.1
-
-build:
- number: 1
-
-requirements:
- build:
- - system # [linux]
- - python
- - libgit2
- - cffi >=0.8.1
- - pkgconfig # [linux]
- run:
- - system # [linux]
- - python
- - libgit2
- - cffi >=0.8.1
-
-about:
- home: http://www.pygit2.org/
- license: GPLv2 with a special Linking Exception
- summary: 'Pygit2 is a set of Python bindings to the libgit2 shared library, libgit2 implements the core of Git.'
diff --git a/conda/pyqtgraph/bld.bat b/conda/pyqtgraph/bld.bat
deleted file mode 100644
index c40a9bbef..000000000
--- a/conda/pyqtgraph/bld.bat
+++ /dev/null
@@ -1,2 +0,0 @@
-"%PYTHON%" setup.py install
-if errorlevel 1 exit 1
diff --git a/conda/pyqtgraph/build.sh b/conda/pyqtgraph/build.sh
deleted file mode 100644
index 8e25a1455..000000000
--- a/conda/pyqtgraph/build.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-$PYTHON setup.py install
diff --git a/conda/pyqtgraph/meta.yaml b/conda/pyqtgraph/meta.yaml
deleted file mode 100644
index f060acf0b..000000000
--- a/conda/pyqtgraph/meta.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-package:
- name: pyqtgraph
- version: 0.9.10.1036edf
-
-source:
- git_url: https://github.com/pyqtgraph/pyqtgraph.git
- git_rev: 1036edf
-
-requirements:
- build:
- - python
- - setuptools
- - numpy
-
- run:
- - python
- - numpy
- - pyqt >=4.7
-
-test:
- imports:
- - pyqtgraph
-
-about:
- home: http://www.pyqtgraph.org
- license: MIT License
- summary: 'Scientific Graphics and GUI Library for Python'
diff --git a/conda/pythonparser/bld.bat b/conda/pythonparser/bld.bat
deleted file mode 100644
index c8c1ee0d1..000000000
--- a/conda/pythonparser/bld.bat
+++ /dev/null
@@ -1,2 +0,0 @@
-pip install regex
-%PYTHON% setup.py install
diff --git a/conda/pythonparser/build.sh b/conda/pythonparser/build.sh
deleted file mode 100644
index 1e07e90fb..000000000
--- a/conda/pythonparser/build.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-pip install regex
-$PYTHON setup.py install
diff --git a/conda/pythonparser/meta.yaml b/conda/pythonparser/meta.yaml
deleted file mode 100644
index 6ef508192..000000000
--- a/conda/pythonparser/meta.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-package:
- name: pythonparser
- version: 0.0
-
-source:
- git_url: https://github.com/m-labs/pythonparser
- git_tag: master
-
-build:
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
-
-test:
- imports:
- - pythonparser
-
-about:
- home: http://m-labs.hk/pythonparser/
- license: BSD
- summary: 'PythonParser is a Python parser written specifically for use in tooling. It parses source code into an AST that is a superset of Python’s built-in ast module, but returns precise location information for every token.'
diff --git a/conda/quamash/bld.bat b/conda/quamash/bld.bat
deleted file mode 100644
index 39b5e1fee..000000000
--- a/conda/quamash/bld.bat
+++ /dev/null
@@ -1 +0,0 @@
-%PYTHON% setup.py install
diff --git a/conda/quamash/build.sh b/conda/quamash/build.sh
deleted file mode 100644
index 8e25a1455..000000000
--- a/conda/quamash/build.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-$PYTHON setup.py install
diff --git a/conda/quamash/meta.yaml b/conda/quamash/meta.yaml
deleted file mode 100644
index 724e9b674..000000000
--- a/conda/quamash/meta.yaml
+++ /dev/null
@@ -1,29 +0,0 @@
-package:
- name: quamash
- version: 0.5.1
-
-source:
- fn: Quamash-0.5.1.tar.gz
- url: https://pypi.python.org/packages/source/Q/Quamash/Quamash-0.5.1.tar.gz#md5=c5fa317f615eafd492560771bc2caeca
- md5: c5fa317f615eafd492560771bc2caeca
-
-build:
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
- - pyqt 4.*
- run:
- - python
- - pyqt 4.*
-
-test:
- imports:
- - quamash
-
-about:
- home: https://github.com/harvimt/quamash
- license: BSD
- summary: 'Implementation of the PEP 3156 Event-Loop with Qt'
diff --git a/conda/sphinx-argparse/bld.bat b/conda/sphinx-argparse/bld.bat
deleted file mode 100644
index 39b5e1fee..000000000
--- a/conda/sphinx-argparse/bld.bat
+++ /dev/null
@@ -1 +0,0 @@
-%PYTHON% setup.py install
diff --git a/conda/sphinx-argparse/build.sh b/conda/sphinx-argparse/build.sh
deleted file mode 100644
index 5a5aeeb48..000000000
--- a/conda/sphinx-argparse/build.sh
+++ /dev/null
@@ -1 +0,0 @@
-$PYTHON setup.py install
diff --git a/conda/sphinx-argparse/meta.yaml b/conda/sphinx-argparse/meta.yaml
deleted file mode 100644
index 6ead92292..000000000
--- a/conda/sphinx-argparse/meta.yaml
+++ /dev/null
@@ -1,28 +0,0 @@
-package:
- name: sphinx-argparse
- version: 0.1.13
-
-source:
- fn: sphinx-argparse-0.1.13.tar.gz
- url: https://pypi.python.org/packages/source/s/sphinx-argparse/sphinx-argparse-0.1.13.tar.gz
- md5: 5ec84e75e1c4b2ae7ca5fb92a6abd738
-
-build:
- number: 0
-
-requirements:
- build:
- - python
- - setuptools
- - sphinx
- run:
- - python
- - sphinx
-
-test:
- imports:
- - sphinxarg
-
-about:
- license: MIT
- summary: 'Sphinx extension that automatically documents argparse commands and options'
diff --git a/doc/manual/default_network_ports.rst b/doc/manual/default_network_ports.rst
index 35e576d2b..ab50291af 100644
--- a/doc/manual/default_network_ports.rst
+++ b/doc/manual/default_network_ports.rst
@@ -8,6 +8,8 @@ Default network ports
+--------------------------+--------------+
| Core device (mon/inj) | 3250 (UDP) |
+--------------------------+--------------+
+| Master (logging) | 1066 |
++--------------------------+--------------+
| InfluxDB bridge | 3248 |
+--------------------------+--------------+
| Controller manager | 3249 |
diff --git a/doc/manual/getting_started_core.rst b/doc/manual/getting_started_core.rst
index a9e78518c..1f42d152d 100644
--- a/doc/manual/getting_started_core.rst
+++ b/doc/manual/getting_started_core.rst
@@ -22,7 +22,7 @@ As a very first step, we will turn on a LED on the core device. Create a file ``
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``setattr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``setattr_device`` like any other.
-Copy the files ``device_db.pyon`` and ``dataset_db.pyon`` (containing the device and dataset databases) from the ``examples/master`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``--device-db`` and ``--dataset-db`` options of ``artiq_run``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``device_db.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`device-db` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform.
+Copy the file ``device_db.pyon`` (containing the device database) from the ``examples/master`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``--device-db`` option of ``artiq_run``). You can open PYON database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``device_db.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`device-db` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform.
.. note::
If the ``led`` device is a bidirectional TTL (i.e. ``TTLInOut`` instead of ``TTLOut``), you need to put it in output (driving) mode. Add the following at the beginning of ``run``: ::
diff --git a/doc/manual/getting_started_mgmt.rst b/doc/manual/getting_started_mgmt.rst
index 9ddf82f1f..c030ad69f 100644
--- a/doc/manual/getting_started_mgmt.rst
+++ b/doc/manual/getting_started_mgmt.rst
@@ -10,7 +10,7 @@ Starting your first experiment with the master
In the previous tutorial, we used the ``artiq_run`` utility to execute our experiments, which is a simple stand-alone tool that bypasses the ARTIQ management system. We will now see how to run an experiment using the master (the central program in the management system that schedules and executes experiments) and the GUI client (that connects to the master and controls it).
-First, create a folder ``~/artiq-master`` and copy the ``device_db.pyon`` and ``dataset_db.pyon`` (containing the device and dataset databases) found in the ``examples/master`` directory from the ARTIQ sources. The master uses those files in the same way as ``artiq_run``.
+First, create a folder ``~/artiq-master`` and copy the file ``device_db.pyon`` (containing the device database) found in the ``examples/master`` directory from the ARTIQ sources. The master uses those files in the same way as ``artiq_run``.
Then create a ``~/artiq-master/repository`` sub-folder to contain experiments. The master scans this ``repository`` folder to determine what experiments are available (the name of the folder can be changed using ``-r``).
diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst
index 8cb1727a2..263f5fceb 100644
--- a/doc/manual/installing.rst
+++ b/doc/manual/installing.rst
@@ -13,12 +13,12 @@ Installing using conda
Installing Anaconda or Miniconda
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-* You can either install Anaconda (chose Python 3.5) from https://store.continuum.io/cshop/anaconda/
+* You can either install Anaconda (choose Python 3.5) from https://store.continuum.io/cshop/anaconda/
-* Or install the more minimalistic Miniconda (chose Python 3.5) from http://conda.pydata.org/miniconda.html
+* Or install the more minimalistic Miniconda (choose Python 3.5) from http://conda.pydata.org/miniconda.html
.. warning::
- If you are installing on Windows, chose the Windows 32-bit version regardless of whether you have
+ If you are installing on Windows, choose the Windows 32-bit version regardless of whether you have
a 32-bit or 64-bit Windows.
After installing either Anaconda or Miniconda, open a new terminal and make sure the following command works::
@@ -29,7 +29,7 @@ If it prints the help of the ``conda`` command, your install is OK.
If not, then make sure your ``$PATH`` environment variable contains the path to anaconda3/bin (or miniconda3/bin)::
$ echo $PATH
- /home/..../miniconda3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games
+ /home/.../miniconda3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin
If your ``$PATH`` misses reference the miniconda3/bin or anaconda3/bin you can fix this by typing::
@@ -38,13 +38,37 @@ If your ``$PATH`` misses reference the miniconda3/bin or anaconda3/bin you can f
Installing the host side software
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-For this, you need to add our binstar repository to your conda configuration::
+For this, you need to add our Anaconda repository to your conda configuration::
+ $ conda config --add channels http://conda.anaconda.org/m-labs/channel/main
$ conda config --add channels http://conda.anaconda.org/m-labs/channel/dev
-Then you can install the ARTIQ package, it will pull all the necessary dependencies::
+Then you can install the ARTIQ package, it will pull all the necessary dependencies.
- $ conda install artiq
+* For the Pipistrello board::
+
+ $ ENV=$(date +artiq-%Y-%m-%d); conda create -n $ENV artiq-pipistrello-nist_qc1; \
+ echo "Created environment $ENV for ARTIQ"
+
+* For the KC705 board::
+
+ $ ENV=$(date +artiq-%Y-%m-%d); conda create -n $ENV artiq-kc705-nist_qc1 artiq-kc705-nist_qc2; \
+ echo "Created environment $ENV for ARTIQ"
+
+This creates a new Conda "environment" (i.e. an isolated installation) and prints its name.
+If you ever need to upgrade ARTIQ, it is advised to install it again
+in a new environment so that you can roll back to a version that is known to
+work correctly.
+
+After this, add the newly created environment to your ``$PATH``. This can be easily
+done using the following command::
+
+ $ source activate artiq-[date]
+
+You will need to invoke this command in every new shell. When in doubt, you can list
+the existing environments using::
+
+ $ conda env list
Preparing the core device FPGA board
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/examples/master/dataset_db.pyon b/examples/master/dataset_db.pyon
deleted file mode 100644
index 00f15d739..000000000
--- a/examples/master/dataset_db.pyon
+++ /dev/null
@@ -1 +0,0 @@
-{"flopping_freq": 1499.9876804260716}
diff --git a/examples/master/device_db.pyon b/examples/master/device_db.pyon
index cafe5e538..f1502abb4 100644
--- a/examples/master/device_db.pyon
+++ b/examples/master/device_db.pyon
@@ -92,34 +92,29 @@
"qc_q1_0": {
"type": "controller",
- "best_effort": false,
+ # ::1 is the IPv6 localhost address. If this controller is running on a remote machine,
+ # replace it with the IP or hostname of the machine. If using the hostname, make sure
+ # that it always resolves to a network-visible IP address (see documentation).
"host": "::1",
"port": 4000,
- "target_name": "pdq2",
"command": "pdq2_controller -p {port} --bind {bind} --simulation --dump qc_q1_0.bin"
},
"qc_q1_1": {
"type": "controller",
- "best_effort": false,
"host": "::1",
"port": 4001,
- "target_name": "pdq2",
"command": "pdq2_controller -p {port} --bind {bind} --simulation --dump qc_q1_1.bin"
},
"qc_q1_2": {
"type": "controller",
- "best_effort": false,
"host": "::1",
"port": 4002,
- "target_name": "pdq2",
"command": "pdq2_controller -p {port} --bind {bind} --simulation --dump qc_q1_2.bin"
},
"qc_q1_3": {
"type": "controller",
- "best_effort": false,
"host": "::1",
"port": 4003,
- "target_name": "pdq2",
"command": "pdq2_controller -p {port} --bind {bind} --simulation --dump qc_q1_3.bin"
},
"electrodes": {
@@ -138,7 +133,6 @@
"best_effort": true,
"host": "::1",
"port": 3253,
- "target_name": "lda",
"command": "lda_controller -p {port} --bind {bind} --simulation"
},
diff --git a/examples/master/repository/arguments_demo.py b/examples/master/repository/arguments_demo.py
index ffb6e2ac2..3bfcde254 100644
--- a/examples/master/repository/arguments_demo.py
+++ b/examples/master/repository/arguments_demo.py
@@ -6,7 +6,7 @@ from artiq import *
class SubComponent1(HasEnvironment):
def build(self):
self.setattr_argument("sc1_scan", Scannable(default=NoScan(3250),
- scale=1e3, unit="Hz"),
+ scale=1e3, unit="kHz"),
"Flux capacitor")
self.setattr_argument("sc1_enum", EnumerationValue(["1", "2", "3"]),
"Flux capacitor")
@@ -39,7 +39,7 @@ class ArgumentsDemo(EnvExperiment):
def build(self):
self.setattr_argument("free_value", FreeValue(None))
self.setattr_argument("number", NumberValue(42e-6,
- unit="s", scale=1e-6,
+ unit="us", scale=1e-6,
ndecimals=4))
self.setattr_argument("string", StringValue("Hello World"))
self.setattr_argument("scan", Scannable(global_max=400,
@@ -53,7 +53,6 @@ class ArgumentsDemo(EnvExperiment):
self.sc2 = SubComponent2(parent=self)
def run(self):
- logging.basicConfig(level=logging.DEBUG)
logging.error("logging test: error")
logging.warning("logging test: warning")
logging.info("logging test: info")
diff --git a/examples/master/repository/flopping_f_simulation.py b/examples/master/repository/flopping_f_simulation.py
index 3624502c9..a2f0f04c9 100644
--- a/examples/master/repository/flopping_f_simulation.py
+++ b/examples/master/repository/flopping_f_simulation.py
@@ -37,16 +37,20 @@ class FloppingF(EnvExperiment):
self.setattr_device("scheduler")
def run(self):
- frequency = self.set_dataset("flopping_f_frequency", [],
+ l = len(self.frequency_scan)
+ frequency = self.set_dataset("flopping_f_frequency",
+ np.full(l, np.nan),
broadcast=True, save=False)
- brightness = self.set_dataset("flopping_f_brightness", [],
+ brightness = self.set_dataset("flopping_f_brightness",
+ np.full(l, np.nan),
broadcast=True)
- self.set_dataset("flopping_f_fit", [], broadcast=True, save=False)
+ self.set_dataset("flopping_f_fit", np.full(l, np.nan),
+ broadcast=True, save=False)
- for f in self.frequency_scan:
+ for i, f in enumerate(self.frequency_scan):
m_brightness = model(f, self.F0) + self.noise_amplitude*random.random()
- frequency.append(f)
- brightness.append(m_brightness)
+ frequency[i] = f
+ brightness[i] = m_brightness
time.sleep(0.1)
self.scheduler.submit(self.scheduler.pipeline_name, self.scheduler.expid,
self.scheduler.priority, time.time() + 20, False)
@@ -57,11 +61,11 @@ class FloppingF(EnvExperiment):
brightness = self.get_dataset("flopping_f_brightness")
popt, pcov = curve_fit(model_numpy,
frequency, brightness,
- p0=[self.get_dataset("flopping_freq")])
+ p0=[self.get_dataset("flopping_freq", 1500.0)])
perr = np.sqrt(np.diag(pcov))
if perr < 0.1:
F0 = float(popt)
self.set_dataset("flopping_freq", F0, persist=True, save=False)
self.set_dataset("flopping_f_fit",
- [model(x, F0) for x in frequency],
+ np.array([model(x, F0) for x in frequency]),
broadcast=True, save=False)
diff --git a/examples/sim/dataset_db.pyon b/examples/sim/dataset_db.pyon
deleted file mode 100644
index 0967ef424..000000000
--- a/examples/sim/dataset_db.pyon
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/setup.py b/setup.py
index 9d4bdcae5..c76831ad2 100755
--- a/setup.py
+++ b/setup.py
@@ -57,7 +57,7 @@ setup(
url="http://m-labs.hk/artiq",
description="A control system for trapped-ion experiments",
long_description=open("README.rst").read(),
- license="BSD",
+ license="GPL",
install_requires=requirements,
extras_require={},
dependency_links=[