mirror of https://github.com/m-labs/artiq.git
Merge branch 'master' into new-py2llvm
This commit is contained in:
commit
f70f7fb89b
|
@ -13,6 +13,9 @@ doc/manual/_build
|
||||||
/*.egg-info
|
/*.egg-info
|
||||||
/.coverage
|
/.coverage
|
||||||
artiq/test/results
|
artiq/test/results
|
||||||
|
artiq/test/h5types.h5
|
||||||
examples/master/results
|
examples/master/results
|
||||||
|
examples/master/dataset_db.pyon
|
||||||
|
examples/sim/dataset_db.pyon
|
||||||
Output/
|
Output/
|
||||||
/lit-test/libartiq_support/libartiq_support.so
|
/lit-test/libartiq_support/libartiq_support.so
|
||||||
|
|
47
.travis.yml
47
.travis.yml
|
@ -9,29 +9,40 @@ env:
|
||||||
global:
|
global:
|
||||||
- secure: "DUk/Ihg8KbbzEgPF0qrHqlxU8e8eET9i/BtzNvFddIGX4HP/P2qz0nk3cVkmjuWhqJXSbC22RdKME9qqPzw6fJwJ6dpJ3OR6dDmSd7rewavq+niwxu52PVa+yK8mL4yf1terM7QQ5tIRf+yUL9qGKrZ2xyvEuRit6d4cFep43Ws="
|
- secure: "DUk/Ihg8KbbzEgPF0qrHqlxU8e8eET9i/BtzNvFddIGX4HP/P2qz0nk3cVkmjuWhqJXSbC22RdKME9qqPzw6fJwJ6dpJ3OR6dDmSd7rewavq+niwxu52PVa+yK8mL4yf1terM7QQ5tIRf+yUL9qGKrZ2xyvEuRit6d4cFep43Ws="
|
||||||
matrix:
|
matrix:
|
||||||
- BUILD_SOC=0
|
- BUILD_SOC=none
|
||||||
- BUILD_SOC=1
|
- BUILD_SOC=pipistrello-nist_qc1
|
||||||
before_install:
|
- BUILD_SOC=kc705-nist_qc1
|
||||||
- mkdir -p $HOME/.mlabs
|
- BUILD_SOC=kc705-nist_qc2
|
||||||
- if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=0; fi
|
|
||||||
- if [ $BUILD_SOC -ne 0 ]; then ./.travis/get-xilinx.sh; fi
|
|
||||||
- . ./.travis/get-toolchain.sh
|
|
||||||
- . ./.travis/get-anaconda.sh
|
|
||||||
- echo "BUILD_SOC=$BUILD_SOC" >> $HOME/.mlabs/build_settings.sh
|
|
||||||
- source $HOME/miniconda/bin/activate py35
|
|
||||||
- conda install -q pip coverage anaconda-client migen cython
|
|
||||||
- pip install coveralls
|
|
||||||
install:
|
install:
|
||||||
- conda build --python 3.5 conda/artiq
|
- mkdir -p $HOME/.m-labs
|
||||||
- conda install -q artiq --use-local
|
- if [ $TRAVIS_PULL_REQUEST != false ]; then BUILD_SOC=none; fi
|
||||||
|
- if [ $BUILD_SOC != none ]; then ./.travis/get-xilinx.sh; fi
|
||||||
|
- if [ $BUILD_SOC != none ]; then ./.travis/get-toolchain.sh; fi
|
||||||
|
- if [ $BUILD_SOC != none ]; then ./.travis/get-misoc.sh; fi
|
||||||
|
- . ./.travis/get-anaconda.sh
|
||||||
|
- source $HOME/miniconda/bin/activate py35
|
||||||
|
- conda install -q pip coverage anaconda-client cython
|
||||||
|
- pip install coveralls
|
||||||
|
# workaround for https://github.com/conda/conda-build/issues/466
|
||||||
|
- mkdir -p /home/travis/miniconda/conda-bld/linux-64
|
||||||
|
- conda index /home/travis/miniconda/conda-bld/linux-64
|
||||||
script:
|
script:
|
||||||
- coverage run --source=artiq setup.py test
|
- conda build --python 3.5 conda/artiq
|
||||||
- make -C doc/manual html
|
- conda install -q --use-local artiq
|
||||||
|
- |
|
||||||
|
if [ $BUILD_SOC == none ]; then
|
||||||
|
PACKAGES="$(conda build --output --python 3.5 conda/artiq) $PACKAGES"
|
||||||
|
coverage run --source=artiq setup.py test
|
||||||
|
make -C doc/manual html
|
||||||
|
else
|
||||||
|
PACKAGES="$(conda build --output --python 3.5 conda/artiq-$BUILD_SOC) $PACKAGES"
|
||||||
|
conda build --python 3.5 conda/artiq-$BUILD_SOC
|
||||||
|
fi
|
||||||
after_success:
|
after_success:
|
||||||
- |
|
- |
|
||||||
if [ "$TRAVIS_BRANCH" == "master" -a $BUILD_SOC -eq 1 ]; then
|
if [ "$TRAVIS_BRANCH" == "master" -a "$PACKAGES" != "" ]; then
|
||||||
anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password
|
anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password
|
||||||
anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2
|
anaconda -q upload --user $binstar_login --channel dev $PACKAGES
|
||||||
anaconda -q logout
|
anaconda -q logout
|
||||||
fi
|
fi
|
||||||
- coveralls
|
- coveralls
|
||||||
|
|
|
@ -10,4 +10,5 @@ conda update -q conda
|
||||||
conda info -a
|
conda info -a
|
||||||
conda install conda-build jinja2
|
conda install conda-build jinja2
|
||||||
conda create -q -n py35 python=$TRAVIS_PYTHON_VERSION
|
conda create -q -n py35 python=$TRAVIS_PYTHON_VERSION
|
||||||
|
conda config --add channels https://conda.anaconda.org/m-labs/channel/main
|
||||||
conda config --add channels https://conda.anaconda.org/m-labs/channel/dev
|
conda config --add channels https://conda.anaconda.org/m-labs/channel/dev
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
git clone --recursive https://github.com/m-labs/misoc $HOME/misoc
|
||||||
|
echo "export MSCDIR=$HOME/misoc" >> $HOME/.m-labs/build_settings.sh
|
|
@ -1,7 +1,6 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
packages="http://us.archive.ubuntu.com/ubuntu/pool/universe/i/iverilog/iverilog_0.9.7-1_amd64.deb"
|
packages="http://us.archive.ubuntu.com/ubuntu/pool/universe/i/iverilog/iverilog_0.9.7-1_amd64.deb"
|
||||||
archives="http://fehu.whitequark.org/files/llvm-or1k.tbz2"
|
|
||||||
|
|
||||||
mkdir -p packages
|
mkdir -p packages
|
||||||
|
|
||||||
|
@ -12,18 +11,5 @@ do
|
||||||
dpkg -x $pkg_name packages
|
dpkg -x $pkg_name packages
|
||||||
done
|
done
|
||||||
|
|
||||||
for a in $archives
|
echo "export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu" >> $HOME/.m-labs/build_settings.sh
|
||||||
do
|
echo "export PATH=$PWD/packages/usr/bin:\$PATH" >> $HOME/.m-labs/build_settings.sh
|
||||||
wget $a
|
|
||||||
(cd packages && tar xf ../$(basename $a))
|
|
||||||
done
|
|
||||||
|
|
||||||
export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:$PATH
|
|
||||||
export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH
|
|
||||||
|
|
||||||
echo "export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh
|
|
||||||
echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:\$PATH" >> $HOME/.mlabs/build_settings.sh
|
|
||||||
|
|
||||||
or1k-linux-as --version
|
|
||||||
llc --version
|
|
||||||
clang --version
|
|
||||||
|
|
|
@ -30,7 +30,7 @@ git clone https://github.com/fallen/impersonate_macaddress
|
||||||
make -C impersonate_macaddress
|
make -C impersonate_macaddress
|
||||||
# Tell mibuild where Xilinx toolchains are installed
|
# Tell mibuild where Xilinx toolchains are installed
|
||||||
# and feed it the mac address corresponding to the license
|
# and feed it the mac address corresponding to the license
|
||||||
cat >> $HOME/.mlabs/build_settings.sh << EOF
|
cat >> $HOME/.m-labs/build_settings.sh << EOF
|
||||||
MISOC_EXTRA_VIVADO_CMDLINE="-Ob vivado_path $HOME/Xilinx/Vivado"
|
MISOC_EXTRA_VIVADO_CMDLINE="-Ob vivado_path $HOME/Xilinx/Vivado"
|
||||||
MISOC_EXTRA_ISE_CMDLINE="-Ob ise_path $HOME/opt/Xilinx/"
|
MISOC_EXTRA_ISE_CMDLINE="-Ob ise_path $HOME/opt/Xilinx/"
|
||||||
export MACADDR=$macaddress
|
export MACADDR=$macaddress
|
||||||
|
|
|
@ -139,6 +139,12 @@ class Pdq2:
|
||||||
self.num_channels = self.num_dacs * self.num_boards
|
self.num_channels = self.num_dacs * self.num_boards
|
||||||
self.channels = [Channel() for i in range(self.num_channels)]
|
self.channels = [Channel() for i in range(self.num_channels)]
|
||||||
|
|
||||||
|
def get_num_boards(self):
|
||||||
|
return self.num_boards
|
||||||
|
|
||||||
|
def get_num_channels(self):
|
||||||
|
return self.num_channels
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.dev.close()
|
self.dev.close()
|
||||||
del self.dev
|
del self.dev
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import logging
|
||||||
import time
|
import time
|
||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
|
@ -51,6 +52,10 @@ def get_argparser():
|
||||||
"(defaults to head, ignored without -R)")
|
"(defaults to head, ignored without -R)")
|
||||||
parser_add.add_argument("-c", "--class-name", default=None,
|
parser_add.add_argument("-c", "--class-name", default=None,
|
||||||
help="name of the class to run")
|
help="name of the class to run")
|
||||||
|
parser_add.add_argument("-v", "--verbose", default=0, action="count",
|
||||||
|
help="increase logging level of the experiment")
|
||||||
|
parser_add.add_argument("-q", "--quiet", default=0, action="count",
|
||||||
|
help="decrease logging level of the experiment")
|
||||||
parser_add.add_argument("file",
|
parser_add.add_argument("file",
|
||||||
help="file containing the experiment to run")
|
help="file containing the experiment to run")
|
||||||
parser_add.add_argument("arguments", nargs="*",
|
parser_add.add_argument("arguments", nargs="*",
|
||||||
|
@ -110,6 +115,7 @@ def _action_submit(remote, args):
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
expid = {
|
expid = {
|
||||||
|
"log_level": logging.WARNING + args.quiet*10 - args.verbose*10,
|
||||||
"file": args.file,
|
"file": args.file,
|
||||||
"class_name": args.class_name,
|
"class_name": args.class_name,
|
||||||
"arguments": arguments,
|
"arguments": arguments,
|
||||||
|
|
|
@ -5,39 +5,22 @@ import atexit
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
import subprocess
|
||||||
import shlex
|
import shlex
|
||||||
import socket
|
import socket
|
||||||
|
import platform
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.protocols.pc_rpc import AsyncioClient, Server
|
from artiq.protocols.pc_rpc import AsyncioClient, Server
|
||||||
from artiq.tools import verbosity_args, init_logger
|
from artiq.protocols.logging import (LogForwarder,
|
||||||
|
parse_log_message, log_with_name,
|
||||||
|
SourceFilter)
|
||||||
from artiq.tools import TaskObject, Condition
|
from artiq.tools import TaskObject, Condition
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_argparser():
|
|
||||||
parser = argparse.ArgumentParser(description="ARTIQ controller manager")
|
|
||||||
verbosity_args(parser)
|
|
||||||
parser.add_argument(
|
|
||||||
"-s", "--server", default="::1",
|
|
||||||
help="hostname or IP of the master to connect to")
|
|
||||||
parser.add_argument(
|
|
||||||
"--port", default=3250, type=int,
|
|
||||||
help="TCP port to use to connect to the master")
|
|
||||||
parser.add_argument(
|
|
||||||
"--retry-master", default=5.0, type=float,
|
|
||||||
help="retry timer for reconnecting to master")
|
|
||||||
parser.add_argument(
|
|
||||||
"--bind", default="::1",
|
|
||||||
help="hostname or IP address to bind to")
|
|
||||||
parser.add_argument(
|
|
||||||
"--bind-port", default=3249, type=int,
|
|
||||||
help="TCP port to listen to for control (default: %(default)d)")
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
class Controller:
|
class Controller:
|
||||||
def __init__(self, name, ddb_entry):
|
def __init__(self, name, ddb_entry):
|
||||||
self.name = name
|
self.name = name
|
||||||
|
@ -96,6 +79,23 @@ class Controller:
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
async def forward_logs(self, stream):
|
||||||
|
source = "controller({})".format(self.name)
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
entry = (await stream.readline())
|
||||||
|
if not entry:
|
||||||
|
break
|
||||||
|
entry = entry[:-1]
|
||||||
|
level, name, message = parse_log_message(entry.decode())
|
||||||
|
log_with_name(name, level, message, extra={"source": source})
|
||||||
|
except:
|
||||||
|
logger.debug("exception in log forwarding", exc_info=True)
|
||||||
|
break
|
||||||
|
logger.debug("stopped log forwarding of stream %s of %s",
|
||||||
|
stream, self.name)
|
||||||
|
|
||||||
|
|
||||||
async def launcher(self):
|
async def launcher(self):
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
|
@ -103,7 +103,12 @@ class Controller:
|
||||||
self.name, self.command)
|
self.name, self.command)
|
||||||
try:
|
try:
|
||||||
self.process = await asyncio.create_subprocess_exec(
|
self.process = await asyncio.create_subprocess_exec(
|
||||||
*shlex.split(self.command))
|
*shlex.split(self.command),
|
||||||
|
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
asyncio.ensure_future(self.forward_logs(
|
||||||
|
self.process.stdout))
|
||||||
|
asyncio.ensure_future(self.forward_logs(
|
||||||
|
self.process.stderr))
|
||||||
await self._wait_and_ping()
|
await self._wait_and_ping()
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
logger.warning("Controller %s failed to start", self.name)
|
logger.warning("Controller %s failed to start", self.name)
|
||||||
|
@ -129,14 +134,20 @@ class Controller:
|
||||||
except:
|
except:
|
||||||
logger.warning("Controller %s did not respond to terminate "
|
logger.warning("Controller %s did not respond to terminate "
|
||||||
"command, killing", self.name)
|
"command, killing", self.name)
|
||||||
self.process.kill()
|
try:
|
||||||
|
self.process.kill()
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
try:
|
try:
|
||||||
await asyncio.wait_for(self.process.wait(),
|
await asyncio.wait_for(self.process.wait(),
|
||||||
self.term_timeout)
|
self.term_timeout)
|
||||||
except:
|
except:
|
||||||
logger.warning("Controller %s failed to exit, killing",
|
logger.warning("Controller %s failed to exit, killing",
|
||||||
self.name)
|
self.name)
|
||||||
self.process.kill()
|
try:
|
||||||
|
self.process.kill()
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
await self.process.wait()
|
await self.process.wait()
|
||||||
logger.debug("Controller %s terminated", self.name)
|
logger.debug("Controller %s terminated", self.name)
|
||||||
|
|
||||||
|
@ -252,9 +263,48 @@ class ControllerManager(TaskObject):
|
||||||
self.controller_db.current_controllers.active[k].retry_now.notify()
|
self.controller_db.current_controllers.active[k].retry_now.notify()
|
||||||
|
|
||||||
|
|
||||||
|
def get_argparser():
|
||||||
|
parser = argparse.ArgumentParser(description="ARTIQ controller manager")
|
||||||
|
|
||||||
|
group = parser.add_argument_group("verbosity")
|
||||||
|
group.add_argument("-v", "--verbose", default=0, action="count",
|
||||||
|
help="increase logging level of the manager process")
|
||||||
|
group.add_argument("-q", "--quiet", default=0, action="count",
|
||||||
|
help="decrease logging level of the manager process")
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"-s", "--server", default="::1",
|
||||||
|
help="hostname or IP of the master to connect to")
|
||||||
|
parser.add_argument(
|
||||||
|
"--port-notify", default=3250, type=int,
|
||||||
|
help="TCP port to connect to for notifications")
|
||||||
|
parser.add_argument(
|
||||||
|
"--port-logging", default=1066, type=int,
|
||||||
|
help="TCP port to connect to for logging")
|
||||||
|
parser.add_argument(
|
||||||
|
"--retry-master", default=5.0, type=float,
|
||||||
|
help="retry timer for reconnecting to master")
|
||||||
|
parser.add_argument(
|
||||||
|
"--bind", default="::1",
|
||||||
|
help="hostname or IP address to bind to")
|
||||||
|
parser.add_argument(
|
||||||
|
"--bind-port", default=3249, type=int,
|
||||||
|
help="TCP port to listen to for control (default: %(default)d)")
|
||||||
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
root_logger.setLevel(logging.NOTSET)
|
||||||
|
source_adder = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10,
|
||||||
|
"ctlmgr({})".format(platform.node()))
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
console_handler.setFormatter(logging.Formatter(
|
||||||
|
"%(levelname)s:%(source)s:%(name)s:%(message)s"))
|
||||||
|
console_handler.addFilter(source_adder)
|
||||||
|
root_logger.addHandler(console_handler)
|
||||||
|
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
loop = asyncio.ProactorEventLoop()
|
loop = asyncio.ProactorEventLoop()
|
||||||
|
@ -263,7 +313,15 @@ def main():
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
atexit.register(lambda: loop.close())
|
atexit.register(lambda: loop.close())
|
||||||
|
|
||||||
ctlmgr = ControllerManager(args.server, args.port, args.retry_master)
|
logfwd = LogForwarder(args.server, args.port_logging,
|
||||||
|
args.retry_master)
|
||||||
|
logfwd.addFilter(source_adder)
|
||||||
|
root_logger.addHandler(logfwd)
|
||||||
|
logfwd.start()
|
||||||
|
atexit.register(lambda: loop.run_until_complete(logfwd.stop()))
|
||||||
|
|
||||||
|
ctlmgr = ControllerManager(args.server, args.port_notify,
|
||||||
|
args.retry_master)
|
||||||
ctlmgr.start()
|
ctlmgr.start()
|
||||||
atexit.register(lambda: loop.run_until_complete(ctlmgr.stop()))
|
atexit.register(lambda: loop.run_until_complete(ctlmgr.stop()))
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,16 @@
|
||||||
#!/bin/bash
|
#!/usr/bin/env python
|
||||||
|
# conda-build requires all scripts to have a python shebang.
|
||||||
|
# see https://github.com/conda/conda-build/blob/6921f067a/conda_build/noarch_python.py#L36-L38
|
||||||
|
|
||||||
|
def run(script):
|
||||||
|
import sys, tempfile, subprocess
|
||||||
|
file = tempfile.NamedTemporaryFile(mode='w+t', suffix='sh')
|
||||||
|
file.write(script)
|
||||||
|
file.flush()
|
||||||
|
subprocess.run(["/bin/bash", file.name] + sys.argv[1:])
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
run("""
|
||||||
# exit on error
|
# exit on error
|
||||||
set -e
|
set -e
|
||||||
# print commands
|
# print commands
|
||||||
|
@ -72,7 +83,7 @@ do
|
||||||
echo ""
|
echo ""
|
||||||
echo "To flash everything, do not use any of the -b|-B|-r option."
|
echo "To flash everything, do not use any of the -b|-B|-r option."
|
||||||
echo ""
|
echo ""
|
||||||
echo "usage: $0 [-b] [-B] [-r] [-h] [-m nist_qc1|nist_qc2] [-t kc705|pipistrello] [-d path] [-f path]"
|
echo "usage: artiq_flash.sh [-b] [-B] [-r] [-h] [-m nist_qc1|nist_qc2] [-t kc705|pipistrello] [-d path] [-f path]"
|
||||||
echo "-b Flash bitstream"
|
echo "-b Flash bitstream"
|
||||||
echo "-B Flash BIOS"
|
echo "-B Flash BIOS"
|
||||||
echo "-r Flash ARTIQ runtime"
|
echo "-r Flash ARTIQ runtime"
|
||||||
|
@ -193,3 +204,4 @@ then
|
||||||
fi
|
fi
|
||||||
echo "Done."
|
echo "Done."
|
||||||
xc3sprog -v -c $CABLE -R > /dev/null 2>&1
|
xc3sprog -v -c $CABLE -R > /dev/null 2>&1
|
||||||
|
""")
|
||||||
|
|
|
@ -116,6 +116,7 @@ def main():
|
||||||
atexit.register(lambda: loop.run_until_complete(d_schedule.sub_close()))
|
atexit.register(lambda: loop.run_until_complete(d_schedule.sub_close()))
|
||||||
|
|
||||||
d_log = LogDock()
|
d_log = LogDock()
|
||||||
|
smgr.register(d_log)
|
||||||
loop.run_until_complete(d_log.sub_connect(
|
loop.run_until_complete(d_log.sub_connect(
|
||||||
args.server, args.port_notify))
|
args.server, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))
|
atexit.register(lambda: loop.run_until_complete(d_log.sub_close()))
|
||||||
|
|
|
@ -5,9 +5,10 @@ import argparse
|
||||||
import atexit
|
import atexit
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from artiq.protocols.pc_rpc import Server
|
from artiq.protocols.pc_rpc import Server as RPCServer
|
||||||
from artiq.protocols.sync_struct import Publisher
|
from artiq.protocols.sync_struct import Publisher
|
||||||
from artiq.master.log import log_args, init_log
|
from artiq.protocols.logging import Server as LoggingServer
|
||||||
|
from artiq.master.log import log_args, init_log, log_worker
|
||||||
from artiq.master.databases import DeviceDB, DatasetDB
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
from artiq.master.scheduler import Scheduler
|
from artiq.master.scheduler import Scheduler
|
||||||
from artiq.master.worker_db import get_last_rid
|
from artiq.master.worker_db import get_last_rid
|
||||||
|
@ -27,6 +28,9 @@ def get_argparser():
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--port-control", default=3251, type=int,
|
"--port-control", default=3251, type=int,
|
||||||
help="TCP port to listen to for control (default: %(default)d)")
|
help="TCP port to listen to for control (default: %(default)d)")
|
||||||
|
group.add_argument(
|
||||||
|
"--port-logging", default=1066, type=int,
|
||||||
|
help="TCP port to listen to for remote logging (default: %(default)d)")
|
||||||
|
|
||||||
group = parser.add_argument_group("databases")
|
group = parser.add_argument_group("databases")
|
||||||
group.add_argument("--device-db", default="device_db.pyon",
|
group.add_argument("--device-db", default="device_db.pyon",
|
||||||
|
@ -49,7 +53,7 @@ def get_argparser():
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
log_buffer, log_forwarder = init_log(args)
|
log_buffer = init_log(args)
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
loop = asyncio.ProactorEventLoop()
|
loop = asyncio.ProactorEventLoop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
|
@ -67,7 +71,7 @@ def main():
|
||||||
else:
|
else:
|
||||||
repo_backend = FilesystemBackend(args.repository)
|
repo_backend = FilesystemBackend(args.repository)
|
||||||
repository = Repository(repo_backend, device_db.get_device_db,
|
repository = Repository(repo_backend, device_db.get_device_db,
|
||||||
log_forwarder.log_worker)
|
log_worker)
|
||||||
atexit.register(repository.close)
|
atexit.register(repository.close)
|
||||||
repository.scan_async()
|
repository.scan_async()
|
||||||
|
|
||||||
|
@ -76,14 +80,14 @@ def main():
|
||||||
"get_device": device_db.get,
|
"get_device": device_db.get,
|
||||||
"get_dataset": dataset_db.get,
|
"get_dataset": dataset_db.get,
|
||||||
"update_dataset": dataset_db.update,
|
"update_dataset": dataset_db.update,
|
||||||
"log": log_forwarder.log_worker
|
"log": log_worker
|
||||||
}
|
}
|
||||||
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
|
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
|
||||||
worker_handlers["scheduler_submit"] = scheduler.submit
|
worker_handlers["scheduler_submit"] = scheduler.submit
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
|
atexit.register(lambda: loop.run_until_complete(scheduler.stop()))
|
||||||
|
|
||||||
server_control = Server({
|
server_control = RPCServer({
|
||||||
"master_device_db": device_db,
|
"master_device_db": device_db,
|
||||||
"master_dataset_db": dataset_db,
|
"master_dataset_db": dataset_db,
|
||||||
"master_schedule": scheduler,
|
"master_schedule": scheduler,
|
||||||
|
@ -104,6 +108,11 @@ def main():
|
||||||
args.bind, args.port_notify))
|
args.bind, args.port_notify))
|
||||||
atexit.register(lambda: loop.run_until_complete(server_notify.stop()))
|
atexit.register(lambda: loop.run_until_complete(server_notify.stop()))
|
||||||
|
|
||||||
|
server_logging = LoggingServer()
|
||||||
|
loop.run_until_complete(server_logging.start(
|
||||||
|
args.bind, args.port_logging))
|
||||||
|
atexit.register(lambda: loop.run_until_complete(server_logging.stop()))
|
||||||
|
|
||||||
loop.run_forever()
|
loop.run_forever()
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -6,7 +6,7 @@ import sys
|
||||||
import numpy as np # Needed to use numpy in RPC call arguments on cmd line
|
import numpy as np # Needed to use numpy in RPC call arguments on cmd line
|
||||||
import pprint
|
import pprint
|
||||||
|
|
||||||
from artiq.protocols.pc_rpc import Client
|
from artiq.protocols.pc_rpc import AutoTarget, Client
|
||||||
|
|
||||||
|
|
||||||
def get_argparser():
|
def get_argparser():
|
||||||
|
@ -85,19 +85,9 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
|
|
||||||
remote = Client(args.server, args.port, None)
|
remote = Client(args.server, args.port, None)
|
||||||
|
|
||||||
targets, description = remote.get_rpc_id()
|
targets, description = remote.get_rpc_id()
|
||||||
|
|
||||||
if args.action != "list-targets":
|
if args.action != "list-targets":
|
||||||
# If no target specified and remote has only one, then use this one.
|
remote.select_rpc_target(AutoTarget)
|
||||||
# Exit otherwise.
|
|
||||||
if len(targets) > 1 and args.target is None:
|
|
||||||
print("Remote server has several targets, please supply one with "
|
|
||||||
"-t")
|
|
||||||
sys.exit(1)
|
|
||||||
elif args.target is None:
|
|
||||||
args.target = targets[0]
|
|
||||||
remote.select_rpc_target(args.target)
|
|
||||||
|
|
||||||
if args.action == "list-targets":
|
if args.action == "list-targets":
|
||||||
list_targets(targets, description)
|
list_targets(targets, description)
|
||||||
|
|
|
@ -55,7 +55,6 @@ def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
init_logger(args)
|
||||||
dev = Client(args.server, args.port, "pdq2")
|
dev = Client(args.server, args.port, "pdq2")
|
||||||
dev.init()
|
|
||||||
|
|
||||||
if args.reset:
|
if args.reset:
|
||||||
dev.write(b"\x00\x00") # flush any escape
|
dev.write(b"\x00\x00") # flush any escape
|
||||||
|
@ -66,8 +65,6 @@ def main():
|
||||||
dev.cmd("DCM", args.dcm)
|
dev.cmd("DCM", args.dcm)
|
||||||
freq = 100e6 if args.dcm else 50e6
|
freq = 100e6 if args.dcm else 50e6
|
||||||
dev.set_freq(freq)
|
dev.set_freq(freq)
|
||||||
num_channels = dev.get_num_channels()
|
|
||||||
num_frames = dev.get_num_frames()
|
|
||||||
times = eval(args.times, globals(), {})
|
times = eval(args.times, globals(), {})
|
||||||
voltages = eval(args.voltages, globals(), dict(t=times))
|
voltages = eval(args.voltages, globals(), dict(t=times))
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ def get_argparser():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("-P", "--product", required=True,
|
parser.add_argument("-P", "--product", required=True,
|
||||||
help="type of the Thorlabs T-Cube device to control",
|
help="type of the Thorlabs T-Cube device to control",
|
||||||
choices=["TDC001", "TPZ001"])
|
choices=["tdc001", "tpz001"])
|
||||||
parser.add_argument("-d", "--device", default=None,
|
parser.add_argument("-d", "--device", default=None,
|
||||||
help="serial device. See documentation for how to "
|
help="serial device. See documentation for how to "
|
||||||
"specify a USB Serial Number.")
|
"specify a USB Serial Number.")
|
||||||
|
@ -33,19 +33,20 @@ def main():
|
||||||
"argument. Use --help for more information.")
|
"argument. Use --help for more information.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
product = args.product.lower()
|
||||||
if args.simulation:
|
if args.simulation:
|
||||||
if args.product == "TDC001":
|
if product == "tdc001":
|
||||||
dev = TdcSim()
|
dev = TdcSim()
|
||||||
elif args.product == "TPZ001":
|
elif product == "tpz001":
|
||||||
dev = TpzSim()
|
dev = TpzSim()
|
||||||
else:
|
else:
|
||||||
if args.product == "TDC001":
|
if product == "tdc001":
|
||||||
dev = Tdc(args.device)
|
dev = Tdc(args.device)
|
||||||
elif args.product == "TPZ001":
|
elif product == "tpz001":
|
||||||
dev = Tpz(args.device)
|
dev = Tpz(args.device)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
simple_server_loop({args.product.lower(): dev}, args.bind, args.port)
|
simple_server_loop({product: dev}, args.bind, args.port)
|
||||||
finally:
|
finally:
|
||||||
dev.close()
|
dev.close()
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,11 @@ from artiq.tools import short_format
|
||||||
from artiq.gui.tools import DictSyncModel
|
from artiq.gui.tools import DictSyncModel
|
||||||
from artiq.gui.displays import *
|
from artiq.gui.displays import *
|
||||||
|
|
||||||
|
try:
|
||||||
|
QSortFilterProxyModel = QtCore.QSortFilterProxyModel
|
||||||
|
except AttributeError:
|
||||||
|
QSortFilterProxyModel = QtGui.QSortFilterProxyModel
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -74,15 +79,7 @@ class DatasetsDock(dockarea.Dock):
|
||||||
self.displays = dict()
|
self.displays = dict()
|
||||||
|
|
||||||
def _search_datasets(self):
|
def _search_datasets(self):
|
||||||
model = self.table_model
|
self.table_model_filter.setFilterFixedString(self.search.displayText())
|
||||||
search = self.search.displayText()
|
|
||||||
for row in range(model.rowCount(model.index(0, 0))):
|
|
||||||
index = model.index(row, 0)
|
|
||||||
dataset = model.data(index, QtCore.Qt.DisplayRole)
|
|
||||||
if search in dataset:
|
|
||||||
self.table.showRow(row)
|
|
||||||
else:
|
|
||||||
self.table.hideRow(row)
|
|
||||||
|
|
||||||
def get_dataset(self, key):
|
def get_dataset(self, key):
|
||||||
return self.table_model.backing_store[key][1]
|
return self.table_model.backing_store[key][1]
|
||||||
|
@ -97,12 +94,16 @@ class DatasetsDock(dockarea.Dock):
|
||||||
|
|
||||||
def init_datasets_model(self, init):
|
def init_datasets_model(self, init):
|
||||||
self.table_model = DatasetsModel(self.table, init)
|
self.table_model = DatasetsModel(self.table, init)
|
||||||
self.table.setModel(self.table_model)
|
self.table_model_filter = QSortFilterProxyModel()
|
||||||
|
self.table_model_filter.setSourceModel(self.table_model)
|
||||||
|
self.table.setModel(self.table_model_filter)
|
||||||
return self.table_model
|
return self.table_model
|
||||||
|
|
||||||
def update_display_data(self, dsp):
|
def update_display_data(self, dsp):
|
||||||
dsp.update_data({k: self.table_model.backing_store[k][1]
|
filtered_data = {k: self.table_model.backing_store[k][1]
|
||||||
for k in dsp.data_sources()})
|
for k in dsp.data_sources()
|
||||||
|
if k in self.table_model.backing_store}
|
||||||
|
dsp.update_data(filtered_data)
|
||||||
|
|
||||||
def on_mod(self, mod):
|
def on_mod(self, mod):
|
||||||
if mod["action"] == "init":
|
if mod["action"] == "init":
|
||||||
|
@ -110,10 +111,10 @@ class DatasetsDock(dockarea.Dock):
|
||||||
display.update_data(self.table_model.backing_store)
|
display.update_data(self.table_model.backing_store)
|
||||||
return
|
return
|
||||||
|
|
||||||
if mod["action"] == "setitem":
|
if mod["path"]:
|
||||||
source = mod["key"]
|
|
||||||
elif mod["path"]:
|
|
||||||
source = mod["path"][0]
|
source = mod["path"][0]
|
||||||
|
elif mod["action"] == "setitem":
|
||||||
|
source = mod["key"]
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
|
@ -137,7 +137,7 @@ class XYDisplay(dockarea.Dock):
|
||||||
error = data.get(result_error, None)
|
error = data.get(result_error, None)
|
||||||
fit = data.get(result_fit, None)
|
fit = data.get(result_fit, None)
|
||||||
|
|
||||||
if not y or len(y) != len(x):
|
if not len(y) or len(y) != len(x):
|
||||||
return
|
return
|
||||||
if error is not None and hasattr(error, "__len__"):
|
if error is not None and hasattr(error, "__len__"):
|
||||||
if not len(error):
|
if not len(error):
|
||||||
|
@ -201,7 +201,7 @@ class HistogramDisplay(dockarea.Dock):
|
||||||
if x is None:
|
if x is None:
|
||||||
x = list(range(len(y)+1))
|
x = list(range(len(y)+1))
|
||||||
|
|
||||||
if y and len(x) == len(y) + 1:
|
if len(y) and len(x) == len(y) + 1:
|
||||||
self.plot.clear()
|
self.plot.clear()
|
||||||
self.plot.plot(x, y, stepMode=True, fillLevel=0,
|
self.plot.plot(x, y, stepMode=True, fillLevel=0,
|
||||||
brush=(0, 0, 255, 150))
|
brush=(0, 0, 255, 150))
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
from quamash import QtGui, QtCore
|
from quamash import QtGui, QtCore
|
||||||
from pyqtgraph import dockarea
|
from pyqtgraph import dockarea
|
||||||
|
@ -6,8 +7,9 @@ from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.gui.tools import si_prefix, DictSyncModel
|
from artiq.gui.tools import DictSyncModel
|
||||||
from artiq.gui.scan import ScanController
|
from artiq.gui.scan import ScanController
|
||||||
|
from artiq.gui.shortcuts import ShortcutManager
|
||||||
|
|
||||||
|
|
||||||
class _ExplistModel(DictSyncModel):
|
class _ExplistModel(DictSyncModel):
|
||||||
|
@ -85,9 +87,8 @@ class _NumberEntry(QtGui.QDoubleSpinBox):
|
||||||
self.setMaximum(procdesc["max"]/self.scale)
|
self.setMaximum(procdesc["max"]/self.scale)
|
||||||
else:
|
else:
|
||||||
self.setMaximum(float("inf"))
|
self.setMaximum(float("inf"))
|
||||||
suffix = si_prefix(self.scale) + procdesc["unit"]
|
if procdesc["unit"]:
|
||||||
if suffix:
|
self.setSuffix(" " + procdesc["unit"])
|
||||||
self.setSuffix(" " + suffix)
|
|
||||||
if "default" in procdesc:
|
if "default" in procdesc:
|
||||||
self.set_argument_value(procdesc["default"])
|
self.set_argument_value(procdesc["default"])
|
||||||
|
|
||||||
|
@ -122,14 +123,14 @@ _procty_to_entry = {
|
||||||
|
|
||||||
|
|
||||||
class _ArgumentEditor(QtGui.QTreeWidget):
|
class _ArgumentEditor(QtGui.QTreeWidget):
|
||||||
def __init__(self, dialog_parent):
|
def __init__(self, main_window):
|
||||||
QtGui.QTreeWidget.__init__(self)
|
QtGui.QTreeWidget.__init__(self)
|
||||||
self.setColumnCount(2)
|
self.setColumnCount(2)
|
||||||
self.header().setResizeMode(QtGui.QHeaderView.ResizeToContents)
|
self.header().setResizeMode(QtGui.QHeaderView.ResizeToContents)
|
||||||
self.header().setVisible(False)
|
self.header().setVisible(False)
|
||||||
self.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
self.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
||||||
|
|
||||||
self.dialog_parent = dialog_parent
|
self.main_window = main_window
|
||||||
self._groups = dict()
|
self._groups = dict()
|
||||||
self.set_arguments([])
|
self.set_arguments([])
|
||||||
|
|
||||||
|
@ -176,7 +177,7 @@ class _ArgumentEditor(QtGui.QTreeWidget):
|
||||||
r[arg] = entry.get_argument_value()
|
r[arg] = entry.get_argument_value()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if show_error_message:
|
if show_error_message:
|
||||||
msgbox = QtGui.QMessageBox(self.dialog_parent)
|
msgbox = QtGui.QMessageBox(self.main_window)
|
||||||
msgbox.setWindowTitle("Error")
|
msgbox.setWindowTitle("Error")
|
||||||
msgbox.setText("Failed to obtain value for argument '{}':\n{}"
|
msgbox.setText("Failed to obtain value for argument '{}':\n{}"
|
||||||
.format(arg, str(e)))
|
.format(arg, str(e)))
|
||||||
|
@ -215,10 +216,10 @@ class _ArgumentEditor(QtGui.QTreeWidget):
|
||||||
|
|
||||||
|
|
||||||
class ExplorerDock(dockarea.Dock):
|
class ExplorerDock(dockarea.Dock):
|
||||||
def __init__(self, dialog_parent, status_bar, schedule_ctl):
|
def __init__(self, main_window, status_bar, schedule_ctl):
|
||||||
dockarea.Dock.__init__(self, "Explorer", size=(1500, 500))
|
dockarea.Dock.__init__(self, "Explorer", size=(1500, 500))
|
||||||
|
|
||||||
self.dialog_parent = dialog_parent
|
self.main_window = main_window
|
||||||
self.status_bar = status_bar
|
self.status_bar = status_bar
|
||||||
self.schedule_ctl = schedule_ctl
|
self.schedule_ctl = schedule_ctl
|
||||||
|
|
||||||
|
@ -235,44 +236,59 @@ class ExplorerDock(dockarea.Dock):
|
||||||
|
|
||||||
self.datetime = QtGui.QDateTimeEdit()
|
self.datetime = QtGui.QDateTimeEdit()
|
||||||
self.datetime.setDisplayFormat("MMM d yyyy hh:mm:ss")
|
self.datetime.setDisplayFormat("MMM d yyyy hh:mm:ss")
|
||||||
self.datetime.setCalendarPopup(True)
|
|
||||||
self.datetime.setDate(QtCore.QDate.currentDate())
|
self.datetime.setDate(QtCore.QDate.currentDate())
|
||||||
self.datetime.dateTimeChanged.connect(self.enable_duedate)
|
self.datetime.dateTimeChanged.connect(self.enable_duedate)
|
||||||
self.datetime_en = QtGui.QCheckBox("Due date:")
|
self.datetime_en = QtGui.QCheckBox("Due date:")
|
||||||
grid.addWidget(self.datetime_en, 1, 0)
|
grid.addWidget(self.datetime_en, 1, 0, colspan=2)
|
||||||
grid.addWidget(self.datetime, 1, 1)
|
grid.addWidget(self.datetime, 1, 2, colspan=2)
|
||||||
|
|
||||||
self.priority = QtGui.QSpinBox()
|
|
||||||
self.priority.setRange(-99, 99)
|
|
||||||
grid.addWidget(QtGui.QLabel("Priority:"), 1, 2)
|
|
||||||
grid.addWidget(self.priority, 1, 3)
|
|
||||||
|
|
||||||
self.pipeline = QtGui.QLineEdit()
|
self.pipeline = QtGui.QLineEdit()
|
||||||
self.pipeline.setText("main")
|
self.pipeline.setText("main")
|
||||||
grid.addWidget(QtGui.QLabel("Pipeline:"), 2, 0)
|
grid.addWidget(QtGui.QLabel("Pipeline:"), 2, 0, colspan=2)
|
||||||
grid.addWidget(self.pipeline, 2, 1)
|
grid.addWidget(self.pipeline, 2, 2, colspan=2)
|
||||||
|
|
||||||
|
self.priority = QtGui.QSpinBox()
|
||||||
|
self.priority.setRange(-99, 99)
|
||||||
|
grid.addWidget(QtGui.QLabel("Priority:"), 3, 0)
|
||||||
|
grid.addWidget(self.priority, 3, 1)
|
||||||
|
|
||||||
self.flush = QtGui.QCheckBox("Flush")
|
self.flush = QtGui.QCheckBox("Flush")
|
||||||
grid.addWidget(self.flush, 2, 2, colspan=2)
|
self.flush.setToolTip("Flush the pipeline before starting the experiment")
|
||||||
|
grid.addWidget(self.flush, 3, 2)
|
||||||
|
|
||||||
|
self.log_level = QtGui.QComboBox()
|
||||||
|
self.log_level.addItems(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])
|
||||||
|
self.log_level.setCurrentIndex(1)
|
||||||
|
self.log_level.setToolTip("Minimum level for log entry production")
|
||||||
|
grid.addWidget(self.log_level, 3, 3)
|
||||||
|
|
||||||
submit = QtGui.QPushButton("Submit")
|
submit = QtGui.QPushButton("Submit")
|
||||||
grid.addWidget(submit, 3, 0, colspan=4)
|
submit.setShortcut("CTRL+RETURN")
|
||||||
|
submit.setToolTip("Schedule the selected experiment (CTRL+ENTER)")
|
||||||
|
grid.addWidget(submit, 4, 0, colspan=4)
|
||||||
submit.clicked.connect(self.submit_clicked)
|
submit.clicked.connect(self.submit_clicked)
|
||||||
|
|
||||||
self.argeditor = _ArgumentEditor(self.dialog_parent)
|
self.argeditor = _ArgumentEditor(self.main_window)
|
||||||
self.splitter.addWidget(self.argeditor)
|
self.splitter.addWidget(self.argeditor)
|
||||||
self.splitter.setSizes([grid.minimumSizeHint().width(), 1000])
|
self.splitter.setSizes([grid.minimumSizeHint().width(), 1000])
|
||||||
self.state = dict()
|
self.argeditor_states = dict()
|
||||||
|
|
||||||
|
self.shortcuts = ShortcutManager(self.main_window, self)
|
||||||
|
|
||||||
|
self.el.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
||||||
|
edit_shortcuts_action = QtGui.QAction("Edit shortcuts", self.el)
|
||||||
|
edit_shortcuts_action.triggered.connect(self.edit_shortcuts)
|
||||||
|
self.el.addAction(edit_shortcuts_action)
|
||||||
|
|
||||||
def update_selection(self, selected, deselected):
|
def update_selection(self, selected, deselected):
|
||||||
if deselected:
|
if deselected:
|
||||||
self.state[deselected] = self.argeditor.save_state()
|
self.argeditor_states[deselected] = self.argeditor.save_state()
|
||||||
|
|
||||||
if selected:
|
if selected:
|
||||||
expinfo = self.explist_model.backing_store[selected]
|
expinfo = self.explist_model.backing_store[selected]
|
||||||
self.argeditor.set_arguments(expinfo["arguments"])
|
self.argeditor.set_arguments(expinfo["arguments"])
|
||||||
if selected in self.state:
|
if selected in self.argeditor_states:
|
||||||
self.argeditor.restore_state(self.state[selected])
|
self.argeditor.restore_state(self.argeditor_states[selected])
|
||||||
self.splitter.insertWidget(1, self.argeditor)
|
self.splitter.insertWidget(1, self.argeditor)
|
||||||
self.selected_key = selected
|
self.selected_key = selected
|
||||||
|
|
||||||
|
@ -293,11 +309,20 @@ class ExplorerDock(dockarea.Dock):
|
||||||
if idx:
|
if idx:
|
||||||
row = idx[0].row()
|
row = idx[0].row()
|
||||||
key = self.explist_model.row_to_key[row]
|
key = self.explist_model.row_to_key[row]
|
||||||
self.state[key] = self.argeditor.save_state()
|
self.argeditor_states[key] = self.argeditor.save_state()
|
||||||
return self.state
|
return {
|
||||||
|
"argeditor": self.argeditor_states,
|
||||||
|
"shortcuts": self.shortcuts.save_state()
|
||||||
|
}
|
||||||
|
|
||||||
def restore_state(self, state):
|
def restore_state(self, state):
|
||||||
self.state = state
|
try:
|
||||||
|
argeditor_states = state["argeditor"]
|
||||||
|
shortcuts_state = state["shortcuts"]
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
self.argeditor_states = argeditor_states
|
||||||
|
self.shortcuts.restore_state(shortcuts_state)
|
||||||
|
|
||||||
def enable_duedate(self):
|
def enable_duedate(self):
|
||||||
self.datetime_en.setChecked(True)
|
self.datetime_en.setChecked(True)
|
||||||
|
@ -315,9 +340,10 @@ class ExplorerDock(dockarea.Dock):
|
||||||
self.el.setModel(self.explist_model)
|
self.el.setModel(self.explist_model)
|
||||||
return self.explist_model
|
return self.explist_model
|
||||||
|
|
||||||
async def submit(self, pipeline_name, file, class_name, arguments,
|
async def submit_task(self, pipeline_name, file, class_name, arguments,
|
||||||
priority, due_date, flush):
|
priority, due_date, flush):
|
||||||
expid = {
|
expid = {
|
||||||
|
"log_level": getattr(logging, self.log_level.currentText()),
|
||||||
"repo_rev": None,
|
"repo_rev": None,
|
||||||
"file": file,
|
"file": file,
|
||||||
"class_name": class_name,
|
"class_name": class_name,
|
||||||
|
@ -327,20 +353,41 @@ class ExplorerDock(dockarea.Dock):
|
||||||
priority, due_date, flush)
|
priority, due_date, flush)
|
||||||
self.status_bar.showMessage("Submitted RID {}".format(rid))
|
self.status_bar.showMessage("Submitted RID {}".format(rid))
|
||||||
|
|
||||||
|
def submit(self, pipeline, key, priority, due_date, flush):
|
||||||
|
# TODO: refactor explorer and cleanup.
|
||||||
|
# Argument editors should immediately modify the global state.
|
||||||
|
expinfo = self.explist_model.backing_store[key]
|
||||||
|
if key == self.selected_key:
|
||||||
|
arguments = self.argeditor.get_argument_values(True)
|
||||||
|
if arguments is None:
|
||||||
|
# There has been an error. Displaying the error message box
|
||||||
|
# was done by argeditor.
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
arguments = self.argeditor_states[key]["argument_values"]
|
||||||
|
except KeyError:
|
||||||
|
arguments = dict()
|
||||||
|
asyncio.ensure_future(self.submit_task(self.pipeline.text(),
|
||||||
|
expinfo["file"],
|
||||||
|
expinfo["class_name"],
|
||||||
|
arguments,
|
||||||
|
priority,
|
||||||
|
due_date,
|
||||||
|
flush))
|
||||||
|
|
||||||
def submit_clicked(self):
|
def submit_clicked(self):
|
||||||
if self.selected_key is not None:
|
if self.selected_key is not None:
|
||||||
expinfo = self.explist_model.backing_store[self.selected_key]
|
|
||||||
if self.datetime_en.isChecked():
|
if self.datetime_en.isChecked():
|
||||||
due_date = self.datetime.dateTime().toMSecsSinceEpoch()/1000
|
due_date = self.datetime.dateTime().toMSecsSinceEpoch()/1000
|
||||||
else:
|
else:
|
||||||
due_date = None
|
due_date = None
|
||||||
arguments = self.argeditor.get_argument_values(True)
|
self.submit(self.pipeline.text(),
|
||||||
if arguments is None:
|
self.selected_key,
|
||||||
return
|
self.priority.value(),
|
||||||
asyncio.ensure_future(self.submit(self.pipeline.text(),
|
due_date,
|
||||||
expinfo["file"],
|
self.flush.isChecked())
|
||||||
expinfo["class_name"],
|
|
||||||
arguments,
|
def edit_shortcuts(self):
|
||||||
self.priority.value(),
|
experiments = sorted(self.explist_model.backing_store.keys())
|
||||||
due_date,
|
self.shortcuts.edit(experiments)
|
||||||
self.flush.isChecked()))
|
|
||||||
|
|
|
@ -3,11 +3,16 @@ import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from quamash import QtGui, QtCore
|
from quamash import QtGui, QtCore
|
||||||
from pyqtgraph import dockarea
|
from pyqtgraph import dockarea, LayoutWidget
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Subscriber
|
from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.gui.tools import ListSyncModel
|
from artiq.gui.tools import ListSyncModel
|
||||||
|
|
||||||
|
try:
|
||||||
|
QSortFilterProxyModel = QtCore.QSortFilterProxyModel
|
||||||
|
except AttributeError:
|
||||||
|
QSortFilterProxyModel = QtGui.QSortFilterProxyModel
|
||||||
|
|
||||||
|
|
||||||
def _level_to_name(level):
|
def _level_to_name(level):
|
||||||
if level >= logging.CRITICAL:
|
if level >= logging.CRITICAL:
|
||||||
|
@ -20,6 +25,7 @@ def _level_to_name(level):
|
||||||
return "INFO"
|
return "INFO"
|
||||||
return "DEBUG"
|
return "DEBUG"
|
||||||
|
|
||||||
|
|
||||||
class _LogModel(ListSyncModel):
|
class _LogModel(ListSyncModel):
|
||||||
def __init__(self, parent, init):
|
def __init__(self, parent, init):
|
||||||
ListSyncModel.__init__(self,
|
ListSyncModel.__init__(self,
|
||||||
|
@ -66,10 +72,39 @@ class _LogModel(ListSyncModel):
|
||||||
return v[3]
|
return v[3]
|
||||||
|
|
||||||
|
|
||||||
|
class _LevelFilterProxyModel(QSortFilterProxyModel):
|
||||||
|
def __init__(self, min_level):
|
||||||
|
QSortFilterProxyModel.__init__(self)
|
||||||
|
self.min_level = min_level
|
||||||
|
|
||||||
|
def filterAcceptsRow(self, sourceRow, sourceParent):
|
||||||
|
model = self.sourceModel()
|
||||||
|
index = model.index(sourceRow, 0, sourceParent)
|
||||||
|
data = model.data(index, QtCore.Qt.DisplayRole)
|
||||||
|
return getattr(logging, data) >= self.min_level
|
||||||
|
|
||||||
|
def set_min_level(self, min_level):
|
||||||
|
self.min_level = min_level
|
||||||
|
self.invalidateFilter()
|
||||||
|
|
||||||
|
|
||||||
class LogDock(dockarea.Dock):
|
class LogDock(dockarea.Dock):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
dockarea.Dock.__init__(self, "Log", size=(1000, 300))
|
dockarea.Dock.__init__(self, "Log", size=(1000, 300))
|
||||||
|
|
||||||
|
grid = LayoutWidget()
|
||||||
|
self.addWidget(grid)
|
||||||
|
|
||||||
|
grid.addWidget(QtGui.QLabel("Minimum level: "), 0, 0)
|
||||||
|
grid.layout.setColumnStretch(0, 0)
|
||||||
|
grid.layout.setColumnStretch(1, 0)
|
||||||
|
grid.layout.setColumnStretch(2, 1)
|
||||||
|
self.filterbox = QtGui.QComboBox()
|
||||||
|
self.filterbox.addItems(["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"])
|
||||||
|
self.filterbox.setToolTip("Display entries at or above this level")
|
||||||
|
grid.addWidget(self.filterbox, 0, 1)
|
||||||
|
self.filterbox.currentIndexChanged.connect(self.filter_changed)
|
||||||
|
|
||||||
self.log = QtGui.QTableView()
|
self.log = QtGui.QTableView()
|
||||||
self.log.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
self.log.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
|
||||||
self.log.horizontalHeader().setResizeMode(
|
self.log.horizontalHeader().setResizeMode(
|
||||||
|
@ -78,7 +113,7 @@ class LogDock(dockarea.Dock):
|
||||||
QtGui.QAbstractItemView.ScrollPerPixel)
|
QtGui.QAbstractItemView.ScrollPerPixel)
|
||||||
self.log.setShowGrid(False)
|
self.log.setShowGrid(False)
|
||||||
self.log.setTextElideMode(QtCore.Qt.ElideNone)
|
self.log.setTextElideMode(QtCore.Qt.ElideNone)
|
||||||
self.addWidget(self.log)
|
grid.addWidget(self.log, 1, 0, colspan=3)
|
||||||
self.scroll_at_bottom = False
|
self.scroll_at_bottom = False
|
||||||
|
|
||||||
async def sub_connect(self, host, port):
|
async def sub_connect(self, host, port):
|
||||||
|
@ -88,6 +123,10 @@ class LogDock(dockarea.Dock):
|
||||||
async def sub_close(self):
|
async def sub_close(self):
|
||||||
await self.subscriber.close()
|
await self.subscriber.close()
|
||||||
|
|
||||||
|
def filter_changed(self):
|
||||||
|
self.table_model_filter.set_min_level(
|
||||||
|
getattr(logging, self.filterbox.currentText()))
|
||||||
|
|
||||||
def rows_inserted_before(self):
|
def rows_inserted_before(self):
|
||||||
scrollbar = self.log.verticalScrollBar()
|
scrollbar = self.log.verticalScrollBar()
|
||||||
self.scroll_at_bottom = scrollbar.value() == scrollbar.maximum()
|
self.scroll_at_bottom = scrollbar.value() == scrollbar.maximum()
|
||||||
|
@ -98,7 +137,21 @@ class LogDock(dockarea.Dock):
|
||||||
|
|
||||||
def init_log_model(self, init):
|
def init_log_model(self, init):
|
||||||
table_model = _LogModel(self.log, init)
|
table_model = _LogModel(self.log, init)
|
||||||
self.log.setModel(table_model)
|
self.table_model_filter = _LevelFilterProxyModel(
|
||||||
table_model.rowsAboutToBeInserted.connect(self.rows_inserted_before)
|
getattr(logging, self.filterbox.currentText()))
|
||||||
table_model.rowsInserted.connect(self.rows_inserted_after)
|
self.table_model_filter.setSourceModel(table_model)
|
||||||
|
self.log.setModel(self.table_model_filter)
|
||||||
|
self.table_model_filter.rowsAboutToBeInserted.connect(self.rows_inserted_before)
|
||||||
|
self.table_model_filter.rowsInserted.connect(self.rows_inserted_after)
|
||||||
return table_model
|
return table_model
|
||||||
|
|
||||||
|
def save_state(self):
|
||||||
|
return {"min_level_idx": self.filterbox.currentIndex()}
|
||||||
|
|
||||||
|
def restore_state(self, state):
|
||||||
|
try:
|
||||||
|
idx = state["min_level_idx"]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self.filterbox.setCurrentIndex(idx)
|
||||||
|
|
|
@ -1,11 +1,9 @@
|
||||||
from quamash import QtGui
|
from quamash import QtGui
|
||||||
from pyqtgraph import LayoutWidget
|
from pyqtgraph import LayoutWidget
|
||||||
|
|
||||||
from artiq.gui.tools import si_prefix
|
|
||||||
|
|
||||||
|
|
||||||
class _Range(LayoutWidget):
|
class _Range(LayoutWidget):
|
||||||
def __init__(self, global_min, global_max, global_step, suffix, scale, ndecimals):
|
def __init__(self, global_min, global_max, global_step, unit, scale, ndecimals):
|
||||||
LayoutWidget.__init__(self)
|
LayoutWidget.__init__(self)
|
||||||
|
|
||||||
self.scale = scale
|
self.scale = scale
|
||||||
|
@ -21,8 +19,8 @@ class _Range(LayoutWidget):
|
||||||
spinbox.setMaximum(float("inf"))
|
spinbox.setMaximum(float("inf"))
|
||||||
if global_step is not None:
|
if global_step is not None:
|
||||||
spinbox.setSingleStep(global_step/self.scale)
|
spinbox.setSingleStep(global_step/self.scale)
|
||||||
if suffix:
|
if unit:
|
||||||
spinbox.setSuffix(" " + suffix)
|
spinbox.setSuffix(" " + unit)
|
||||||
|
|
||||||
self.addWidget(QtGui.QLabel("Min:"), 0, 0)
|
self.addWidget(QtGui.QLabel("Min:"), 0, 0)
|
||||||
self.min = QtGui.QDoubleSpinBox()
|
self.min = QtGui.QDoubleSpinBox()
|
||||||
|
@ -68,7 +66,7 @@ class ScanController(LayoutWidget):
|
||||||
|
|
||||||
gmin, gmax = procdesc["global_min"], procdesc["global_max"]
|
gmin, gmax = procdesc["global_min"], procdesc["global_max"]
|
||||||
gstep = procdesc["global_step"]
|
gstep = procdesc["global_step"]
|
||||||
suffix = si_prefix(self.scale) + procdesc["unit"]
|
unit = procdesc["unit"]
|
||||||
ndecimals = procdesc["ndecimals"]
|
ndecimals = procdesc["ndecimals"]
|
||||||
|
|
||||||
self.v_noscan = QtGui.QDoubleSpinBox()
|
self.v_noscan = QtGui.QDoubleSpinBox()
|
||||||
|
@ -82,17 +80,17 @@ class ScanController(LayoutWidget):
|
||||||
else:
|
else:
|
||||||
self.v_noscan.setMaximum(float("inf"))
|
self.v_noscan.setMaximum(float("inf"))
|
||||||
self.v_noscan.setSingleStep(gstep/self.scale)
|
self.v_noscan.setSingleStep(gstep/self.scale)
|
||||||
if suffix:
|
if unit:
|
||||||
self.v_noscan.setSuffix(" " + suffix)
|
self.v_noscan.setSuffix(" " + unit)
|
||||||
self.v_noscan_gr = LayoutWidget()
|
self.v_noscan_gr = LayoutWidget()
|
||||||
self.v_noscan_gr.addWidget(QtGui.QLabel("Value:"), 0, 0)
|
self.v_noscan_gr.addWidget(QtGui.QLabel("Value:"), 0, 0)
|
||||||
self.v_noscan_gr.addWidget(self.v_noscan, 0, 1)
|
self.v_noscan_gr.addWidget(self.v_noscan, 0, 1)
|
||||||
self.stack.addWidget(self.v_noscan_gr)
|
self.stack.addWidget(self.v_noscan_gr)
|
||||||
|
|
||||||
self.v_linear = _Range(gmin, gmax, gstep, suffix, self.scale, ndecimals)
|
self.v_linear = _Range(gmin, gmax, gstep, unit, self.scale, ndecimals)
|
||||||
self.stack.addWidget(self.v_linear)
|
self.stack.addWidget(self.v_linear)
|
||||||
|
|
||||||
self.v_random = _Range(gmin, gmax, gstep, suffix, self.scale, ndecimals)
|
self.v_random = _Range(gmin, gmax, gstep, unit, self.scale, ndecimals)
|
||||||
self.stack.addWidget(self.v_random)
|
self.stack.addWidget(self.v_random)
|
||||||
|
|
||||||
self.v_explicit = QtGui.QLineEdit()
|
self.v_explicit = QtGui.QLineEdit()
|
||||||
|
|
|
@ -75,9 +75,11 @@ class ScheduleDock(dockarea.Dock):
|
||||||
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
|
||||||
request_termination_action = QtGui.QAction("Request termination", self.table)
|
request_termination_action = QtGui.QAction("Request termination", self.table)
|
||||||
request_termination_action.triggered.connect(partial(self.delete_clicked, True))
|
request_termination_action.triggered.connect(partial(self.delete_clicked, True))
|
||||||
|
request_termination_action.setShortcut("DELETE")
|
||||||
self.table.addAction(request_termination_action)
|
self.table.addAction(request_termination_action)
|
||||||
delete_action = QtGui.QAction("Delete", self.table)
|
delete_action = QtGui.QAction("Delete", self.table)
|
||||||
delete_action.triggered.connect(partial(self.delete_clicked, False))
|
delete_action.triggered.connect(partial(self.delete_clicked, False))
|
||||||
|
delete_action.setShortcut("SHIFT+DELETE")
|
||||||
self.table.addAction(delete_action)
|
self.table.addAction(delete_action)
|
||||||
|
|
||||||
|
|
||||||
|
@ -104,5 +106,9 @@ class ScheduleDock(dockarea.Dock):
|
||||||
if idx:
|
if idx:
|
||||||
row = idx[0].row()
|
row = idx[0].row()
|
||||||
rid = self.table_model.row_to_key[row]
|
rid = self.table_model.row_to_key[row]
|
||||||
self.status_bar.showMessage("Deleted RID {}".format(rid))
|
if graceful:
|
||||||
|
msg = "Requested termination of RID {}".format(rid)
|
||||||
|
else:
|
||||||
|
msg = "Deleted RID {}".format(rid)
|
||||||
|
self.status_bar.showMessage(msg)
|
||||||
asyncio.ensure_future(self.delete(rid, graceful))
|
asyncio.ensure_future(self.delete(rid, graceful))
|
||||||
|
|
|
@ -0,0 +1,98 @@
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from quamash import QtGui
|
||||||
|
try:
|
||||||
|
from quamash import QtWidgets
|
||||||
|
QShortcut = QtWidgets.QShortcut
|
||||||
|
except:
|
||||||
|
QShortcut = QtGui.QShortcut
|
||||||
|
|
||||||
|
|
||||||
|
class _ShortcutEditor(QtGui.QDialog):
|
||||||
|
def __init__(self, parent, experiments, shortcuts):
|
||||||
|
QtGui.QDialog.__init__(self, parent=parent)
|
||||||
|
self.setWindowTitle("Shortcuts")
|
||||||
|
|
||||||
|
self.shortcuts = shortcuts
|
||||||
|
self.edit_widgets = dict()
|
||||||
|
|
||||||
|
grid = QtGui.QGridLayout()
|
||||||
|
self.setLayout(grid)
|
||||||
|
|
||||||
|
for n, title in enumerate(["Key", "Experiment", "Priority", "Pipeline"]):
|
||||||
|
label = QtGui.QLabel("<b>" + title + "</b")
|
||||||
|
grid.addWidget(label, 0, n)
|
||||||
|
label.setMaximumHeight(label.sizeHint().height())
|
||||||
|
grid.setColumnStretch(1, 1)
|
||||||
|
grid.setColumnStretch(3, 1)
|
||||||
|
|
||||||
|
for i in range(12):
|
||||||
|
row = i + 1
|
||||||
|
existing_shortcut = self.shortcuts.get(i, dict())
|
||||||
|
|
||||||
|
grid.addWidget(QtGui.QLabel("F" + str(i+1)), row, 0)
|
||||||
|
|
||||||
|
experiment = QtGui.QComboBox()
|
||||||
|
grid.addWidget(experiment, row, 1)
|
||||||
|
experiment.addItem("<None>")
|
||||||
|
experiment.addItems(experiments)
|
||||||
|
experiment.setEditable(True)
|
||||||
|
experiment.setEditText(
|
||||||
|
existing_shortcut.get("experiment", "<None>"))
|
||||||
|
|
||||||
|
priority = QtGui.QSpinBox()
|
||||||
|
grid.addWidget(priority, row, 2)
|
||||||
|
priority.setRange(-99, 99)
|
||||||
|
priority.setValue(existing_shortcut.get("priority", 0))
|
||||||
|
|
||||||
|
pipeline = QtGui.QLineEdit()
|
||||||
|
grid.addWidget(pipeline, row, 3)
|
||||||
|
pipeline.setText(existing_shortcut.get("pipeline", "main"))
|
||||||
|
|
||||||
|
self.edit_widgets[i] = {
|
||||||
|
"experiment": experiment,
|
||||||
|
"priority": priority,
|
||||||
|
"pipeline": pipeline
|
||||||
|
}
|
||||||
|
|
||||||
|
buttons = QtGui.QDialogButtonBox(
|
||||||
|
QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel)
|
||||||
|
grid.addWidget(buttons, 14, 0, 1, 4)
|
||||||
|
buttons.accepted.connect(self.accept)
|
||||||
|
buttons.rejected.connect(self.reject)
|
||||||
|
self.accepted.connect(self.on_accept)
|
||||||
|
|
||||||
|
def on_accept(self):
|
||||||
|
for n, widgets in self.edit_widgets.items():
|
||||||
|
self.shortcuts[n] = {
|
||||||
|
"experiment": widgets["experiment"].currentText(),
|
||||||
|
"priority": widgets["priority"].value(),
|
||||||
|
"pipeline": widgets["pipeline"].text()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ShortcutManager:
|
||||||
|
def __init__(self, main_window, explorer):
|
||||||
|
for i in range(12):
|
||||||
|
shortcut = QShortcut("F" + str(i+1), main_window)
|
||||||
|
shortcut.activated.connect(partial(self._activated, i))
|
||||||
|
self.main_window = main_window
|
||||||
|
self.explorer = explorer
|
||||||
|
self.shortcuts = dict()
|
||||||
|
|
||||||
|
def edit(self, experiments):
|
||||||
|
dlg = _ShortcutEditor(self.main_window, experiments, self.shortcuts)
|
||||||
|
dlg.open()
|
||||||
|
|
||||||
|
def _activated(self, nr):
|
||||||
|
info = self.shortcuts.get(nr, dict())
|
||||||
|
experiment = info.get("experiment", "")
|
||||||
|
if experiment and experiment != "<None>":
|
||||||
|
self.explorer.submit(info["pipeline"], experiment,
|
||||||
|
info["priority"], None, False)
|
||||||
|
|
||||||
|
def save_state(self):
|
||||||
|
return self.shortcuts
|
||||||
|
|
||||||
|
def restore_state(self, state):
|
||||||
|
self.shortcuts = state
|
|
@ -1,22 +1,4 @@
|
||||||
from quamash import QtCore
|
from quamash import QtCore
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
|
|
||||||
def si_prefix(scale):
|
|
||||||
try:
|
|
||||||
return {
|
|
||||||
1e-12: "p",
|
|
||||||
1e-9: "n",
|
|
||||||
1e-6: "u",
|
|
||||||
1e-3: "m",
|
|
||||||
1.0: "",
|
|
||||||
1e3: "k",
|
|
||||||
1e6: "M",
|
|
||||||
1e9: "G",
|
|
||||||
1e12: "T"
|
|
||||||
}[scale]
|
|
||||||
except KeyError:
|
|
||||||
return "[x{}]".format(scale)
|
|
||||||
|
|
||||||
|
|
||||||
class _SyncSubstruct:
|
class _SyncSubstruct:
|
||||||
|
@ -95,7 +77,7 @@ class DictSyncModel(QtCore.QAbstractTableModel):
|
||||||
new_row = self._find_row(k, v)
|
new_row = self._find_row(k, v)
|
||||||
if old_row == new_row:
|
if old_row == new_row:
|
||||||
self.dataChanged.emit(self.index(old_row, 0),
|
self.dataChanged.emit(self.index(old_row, 0),
|
||||||
self.index(old_row, len(self.headers)))
|
self.index(old_row, len(self.headers)-1))
|
||||||
else:
|
else:
|
||||||
self.beginMoveRows(QtCore.QModelIndex(), old_row, old_row,
|
self.beginMoveRows(QtCore.QModelIndex(), old_row, old_row,
|
||||||
QtCore.QModelIndex(), new_row)
|
QtCore.QModelIndex(), new_row)
|
||||||
|
@ -157,7 +139,7 @@ class ListSyncModel(QtCore.QAbstractTableModel):
|
||||||
|
|
||||||
def __setitem__(self, k, v):
|
def __setitem__(self, k, v):
|
||||||
self.dataChanged.emit(self.index(k, 0),
|
self.dataChanged.emit(self.index(k, 0),
|
||||||
self.index(k, len(self.headers)))
|
self.index(k, len(self.headers)-1))
|
||||||
self.backing_store[k] = v
|
self.backing_store[k] = v
|
||||||
|
|
||||||
def __delitem__(self, k):
|
def __delitem__(self, k):
|
||||||
|
|
|
@ -73,8 +73,7 @@ class NumberValue(_SimpleArgProcessor):
|
||||||
|
|
||||||
:param unit: A string representing the unit of the value, for user
|
:param unit: A string representing the unit of the value, for user
|
||||||
interface (UI) purposes.
|
interface (UI) purposes.
|
||||||
:param scale: The scale of value for UI purposes. The corresponding SI
|
:param scale: The scale of value for UI purposes. The displayed value is
|
||||||
prefix is shown in front of the unit, and the displayed value is
|
|
||||||
divided by the scale.
|
divided by the scale.
|
||||||
:param step: The step with which the value should be modified by up/down
|
:param step: The step with which the value should be modified by up/down
|
||||||
buttons in a UI. The default is the scale divided by 10.
|
buttons in a UI. The default is the scale divided by 10.
|
||||||
|
@ -209,9 +208,15 @@ class HasEnvironment:
|
||||||
broadcast=False, persist=False, save=True):
|
broadcast=False, persist=False, save=True):
|
||||||
"""Sets the contents and handling modes of a dataset.
|
"""Sets the contents and handling modes of a dataset.
|
||||||
|
|
||||||
|
If the dataset is broadcasted, it must be PYON-serializable.
|
||||||
|
If the dataset is saved, it must be a scalar (``bool``, ``int``,
|
||||||
|
``float`` or NumPy scalar) or a NumPy array.
|
||||||
|
|
||||||
:param broadcast: the data is sent in real-time to the master, which
|
:param broadcast: the data is sent in real-time to the master, which
|
||||||
dispatches it. Returns a Notifier that can be used to mutate the dataset.
|
dispatches it. Returns a Notifier that can be used to mutate the
|
||||||
:param persist: the master should store the data on-disk. Implies broadcast.
|
dataset.
|
||||||
|
:param persist: the master should store the data on-disk. Implies
|
||||||
|
broadcast.
|
||||||
:param save: the data is saved into the local storage of the current
|
:param save: the data is saved into the local storage of the current
|
||||||
run (archived as a HDF5 file).
|
run (archived as a HDF5 file).
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -140,8 +140,7 @@ class Scannable:
|
||||||
by 10.
|
by 10.
|
||||||
:param unit: A string representing the unit of the scanned variable, for user
|
:param unit: A string representing the unit of the scanned variable, for user
|
||||||
interface (UI) purposes.
|
interface (UI) purposes.
|
||||||
:param scale: The scale of value for UI purposes. The corresponding SI
|
:param scale: The scale of value for UI purposes. The displayed value is
|
||||||
prefix is shown in front of the unit, and the displayed value is
|
|
||||||
divided by the scale.
|
divided by the scale.
|
||||||
:param ndecimals: The number of decimals a UI should use.
|
:param ndecimals: The number of decimals a UI should use.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -32,7 +32,10 @@ class DatasetDB(TaskObject):
|
||||||
self.persist_file = persist_file
|
self.persist_file = persist_file
|
||||||
self.autosave_period = autosave_period
|
self.autosave_period = autosave_period
|
||||||
|
|
||||||
file_data = pyon.load_file(self.persist_file)
|
try:
|
||||||
|
file_data = pyon.load_file(self.persist_file)
|
||||||
|
except FileNotFoundError:
|
||||||
|
file_data = dict()
|
||||||
self.data = Notifier({k: (True, v) for k, v in file_data.items()})
|
self.data = Notifier({k: (True, v) for k, v in file_data.items()})
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import logging
|
import logging
|
||||||
|
import logging.handlers
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Notifier
|
from artiq.protocols.sync_struct import Notifier
|
||||||
|
from artiq.protocols.logging import parse_log_message, log_with_name, SourceFilter
|
||||||
|
|
||||||
|
|
||||||
class LogBuffer:
|
class LogBuffer:
|
||||||
|
@ -18,88 +20,65 @@ class LogBufferHandler(logging.Handler):
|
||||||
def __init__(self, log_buffer, *args, **kwargs):
|
def __init__(self, log_buffer, *args, **kwargs):
|
||||||
logging.Handler.__init__(self, *args, **kwargs)
|
logging.Handler.__init__(self, *args, **kwargs)
|
||||||
self.log_buffer = log_buffer
|
self.log_buffer = log_buffer
|
||||||
|
self.setFormatter(logging.Formatter("%(name)s:%(message)s"))
|
||||||
|
|
||||||
def emit(self, record):
|
def emit(self, record):
|
||||||
message = self.format(record)
|
message = self.format(record)
|
||||||
self.log_buffer.log(record.levelno, record.source, record.created, message)
|
for part in message.split("\n"):
|
||||||
|
self.log_buffer.log(record.levelno, record.source, record.created,
|
||||||
|
part)
|
||||||
|
|
||||||
|
|
||||||
name_to_level = {
|
def log_worker(rid, message):
|
||||||
"CRITICAL": logging.CRITICAL,
|
level, name, message = parse_log_message(message)
|
||||||
"ERROR": logging.ERROR,
|
log_with_name(name, level, message,
|
||||||
"WARN": logging.WARNING,
|
extra={"source": "worker({})".format(rid)})
|
||||||
"WARNING": logging.WARNING,
|
log_worker.worker_pass_rid = True
|
||||||
"INFO": logging.INFO,
|
|
||||||
"DEBUG": logging.DEBUG,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def parse_log_message(msg):
|
|
||||||
for name, level in name_to_level.items():
|
|
||||||
if msg.startswith(name + ":"):
|
|
||||||
remainder = msg[len(name) + 1:]
|
|
||||||
try:
|
|
||||||
idx = remainder.index(":")
|
|
||||||
except:
|
|
||||||
continue
|
|
||||||
return level, remainder[:idx], remainder[idx+1:]
|
|
||||||
return logging.INFO, "print", msg
|
|
||||||
|
|
||||||
|
|
||||||
fwd_logger = logging.getLogger("fwd")
|
|
||||||
|
|
||||||
|
|
||||||
class LogForwarder:
|
|
||||||
def log_worker(self, rid, message):
|
|
||||||
level, name, message = parse_log_message(message)
|
|
||||||
fwd_logger.name = name
|
|
||||||
fwd_logger.log(level, message,
|
|
||||||
extra={"source": "worker({})".format(rid)})
|
|
||||||
log_worker.worker_pass_rid = True
|
|
||||||
|
|
||||||
|
|
||||||
class SourceFilter:
|
|
||||||
def __init__(self, master_level):
|
|
||||||
self.master_level = master_level
|
|
||||||
|
|
||||||
def filter(self, record):
|
|
||||||
if not hasattr(record, "source"):
|
|
||||||
record.source = "master"
|
|
||||||
if record.source == "master":
|
|
||||||
return record.levelno >= self.master_level
|
|
||||||
else:
|
|
||||||
# log messages that are forwarded from a source have already
|
|
||||||
# been filtered, and may have a level below the master level.
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def log_args(parser):
|
def log_args(parser):
|
||||||
group = parser.add_argument_group("verbosity")
|
group = parser.add_argument_group("logging")
|
||||||
group.add_argument("-v", "--verbose", default=0, action="count",
|
group.add_argument("-v", "--verbose", default=0, action="count",
|
||||||
help="increase logging level for the master process")
|
help="increase logging level of the master process")
|
||||||
group.add_argument("-q", "--quiet", default=0, action="count",
|
group.add_argument("-q", "--quiet", default=0, action="count",
|
||||||
help="decrease logging level for the master process")
|
help="decrease logging level of the master process")
|
||||||
|
group.add_argument("--log-file", default="",
|
||||||
|
help="store logs in rotated files; set the "
|
||||||
|
"base filename")
|
||||||
|
group.add_argument("--log-max-size", type=int, default=1024,
|
||||||
|
help="maximum size of each log file in KiB "
|
||||||
|
"(default: %(default)d)")
|
||||||
|
group.add_argument("--log-backup-count", type=int, default=6,
|
||||||
|
help="number of old log files to keep (.<n> is added "
|
||||||
|
"to the base filename (default: %(default)d)")
|
||||||
|
|
||||||
|
|
||||||
def init_log(args):
|
def init_log(args):
|
||||||
root_logger = logging.getLogger()
|
root_logger = logging.getLogger()
|
||||||
root_logger.setLevel(logging.NOTSET) # we use our custom filter only
|
root_logger.setLevel(logging.NOTSET) # we use our custom filter only
|
||||||
flt = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10)
|
flt = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10,
|
||||||
|
"master")
|
||||||
handlers = []
|
handlers = []
|
||||||
console_handler = logging.StreamHandler()
|
console_handler = logging.StreamHandler()
|
||||||
console_handler.setFormatter(logging.Formatter("%(levelname)s:%(source)s:%(name)s:%(message)s"))
|
console_handler.setFormatter(logging.Formatter(
|
||||||
|
"%(levelname)s:%(source)s:%(name)s:%(message)s"))
|
||||||
handlers.append(console_handler)
|
handlers.append(console_handler)
|
||||||
|
|
||||||
|
if args.log_file:
|
||||||
|
file_handler = logging.handlers.RotatingFileHandler(
|
||||||
|
args.log_file,
|
||||||
|
maxBytes=args.log_max_size*1024,
|
||||||
|
backupCount=args.log_backup_count)
|
||||||
|
file_handler.setFormatter(logging.Formatter(
|
||||||
|
"%(asctime)s %(levelname)s:%(source)s:%(name)s:%(message)s"))
|
||||||
|
handlers.append(file_handler)
|
||||||
|
|
||||||
log_buffer = LogBuffer(1000)
|
log_buffer = LogBuffer(1000)
|
||||||
buffer_handler = LogBufferHandler(log_buffer)
|
buffer_handler = LogBufferHandler(log_buffer)
|
||||||
buffer_handler.setFormatter(logging.Formatter("%(name)s:%(message)s"))
|
|
||||||
handlers.append(buffer_handler)
|
handlers.append(buffer_handler)
|
||||||
|
|
||||||
for handler in handlers:
|
for handler in handlers:
|
||||||
handler.addFilter(flt)
|
handler.addFilter(flt)
|
||||||
root_logger.addHandler(handler)
|
root_logger.addHandler(handler)
|
||||||
|
|
||||||
log_forwarder = LogForwarder()
|
return log_buffer
|
||||||
|
|
||||||
return log_buffer, log_forwarder
|
|
||||||
|
|
|
@ -229,8 +229,8 @@ class PrepareStage(TaskObject):
|
||||||
await run.prepare()
|
await run.prepare()
|
||||||
except:
|
except:
|
||||||
logger.error("got worker exception in prepare stage, "
|
logger.error("got worker exception in prepare stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d", run.rid)
|
||||||
run.rid, exc_info=True)
|
logger.debug("worker exception details", exc_info=True)
|
||||||
self.delete_cb(run.rid)
|
self.delete_cb(run.rid)
|
||||||
else:
|
else:
|
||||||
run.status = RunStatus.prepare_done
|
run.status = RunStatus.prepare_done
|
||||||
|
@ -279,8 +279,8 @@ class RunStage(TaskObject):
|
||||||
completed = await run.run()
|
completed = await run.run()
|
||||||
except:
|
except:
|
||||||
logger.error("got worker exception in run stage, "
|
logger.error("got worker exception in run stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d", run.rid)
|
||||||
run.rid, exc_info=True)
|
logger.debug("worker exception details", exc_info=True)
|
||||||
self.delete_cb(run.rid)
|
self.delete_cb(run.rid)
|
||||||
else:
|
else:
|
||||||
if completed:
|
if completed:
|
||||||
|
@ -317,8 +317,8 @@ class AnalyzeStage(TaskObject):
|
||||||
await run.write_results()
|
await run.write_results()
|
||||||
except:
|
except:
|
||||||
logger.error("got worker exception in analyze stage, "
|
logger.error("got worker exception in analyze stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d", run.rid)
|
||||||
run.rid, exc_info=True)
|
logger.debug("worker exception details", exc_info=True)
|
||||||
self.delete_cb(run.rid)
|
self.delete_cb(run.rid)
|
||||||
else:
|
else:
|
||||||
self.delete_cb(run.rid)
|
self.delete_cb(run.rid)
|
||||||
|
|
|
@ -21,10 +21,6 @@ class WorkerWatchdogTimeout(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class WorkerException(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class WorkerError(Exception):
|
class WorkerError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -60,13 +56,14 @@ class Worker:
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def _create_process(self):
|
async def _create_process(self, log_level):
|
||||||
await self.io_lock.acquire()
|
await self.io_lock.acquire()
|
||||||
try:
|
try:
|
||||||
if self.closed.is_set():
|
if self.closed.is_set():
|
||||||
raise WorkerError("Attempting to create process after close")
|
raise WorkerError("Attempting to create process after close")
|
||||||
self.process = await asyncio.create_subprocess_exec(
|
self.process = await asyncio.create_subprocess_exec(
|
||||||
sys.executable, "-m", "artiq.master.worker_impl",
|
sys.executable, "-m", "artiq.master.worker_impl",
|
||||||
|
str(log_level),
|
||||||
stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
stdout=subprocess.PIPE, stdin=subprocess.PIPE)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
|
@ -95,19 +92,26 @@ class Worker:
|
||||||
try:
|
try:
|
||||||
await self._send(obj, cancellable=False)
|
await self._send(obj, cancellable=False)
|
||||||
except:
|
except:
|
||||||
logger.warning("failed to send terminate command to worker"
|
logger.debug("failed to send terminate command to worker"
|
||||||
" (RID %s), killing", self.rid, exc_info=True)
|
" (RID %s), killing", self.rid, exc_info=True)
|
||||||
self.process.kill()
|
try:
|
||||||
|
self.process.kill()
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
await self.process.wait()
|
await self.process.wait()
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
await asyncio.wait_for(self.process.wait(), term_timeout)
|
await asyncio.wait_for(self.process.wait(), term_timeout)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
logger.warning("worker did not exit (RID %s), killing", self.rid)
|
logger.debug("worker did not exit by itself (RID %s), killing",
|
||||||
self.process.kill()
|
self.rid)
|
||||||
|
try:
|
||||||
|
self.process.kill()
|
||||||
|
except ProcessLookupError:
|
||||||
|
pass
|
||||||
await self.process.wait()
|
await self.process.wait()
|
||||||
else:
|
else:
|
||||||
logger.debug("worker exited gracefully (RID %s)", self.rid)
|
logger.debug("worker exited by itself (RID %s)", self.rid)
|
||||||
finally:
|
finally:
|
||||||
self.io_lock.release()
|
self.io_lock.release()
|
||||||
|
|
||||||
|
@ -163,10 +167,7 @@ class Worker:
|
||||||
return True
|
return True
|
||||||
elif action == "pause":
|
elif action == "pause":
|
||||||
return False
|
return False
|
||||||
elif action == "exception":
|
elif action == "create_watchdog":
|
||||||
raise WorkerException
|
|
||||||
del obj["action"]
|
|
||||||
if action == "create_watchdog":
|
|
||||||
func = self.create_watchdog
|
func = self.create_watchdog
|
||||||
elif action == "delete_watchdog":
|
elif action == "delete_watchdog":
|
||||||
func = self.delete_watchdog
|
func = self.delete_watchdog
|
||||||
|
@ -177,7 +178,7 @@ class Worker:
|
||||||
if getattr(func, "worker_pass_rid", False):
|
if getattr(func, "worker_pass_rid", False):
|
||||||
func = partial(func, self.rid)
|
func = partial(func, self.rid)
|
||||||
try:
|
try:
|
||||||
data = func(**obj)
|
data = func(*obj["args"], **obj["kwargs"])
|
||||||
reply = {"status": "ok", "data": data}
|
reply = {"status": "ok", "data": data}
|
||||||
except:
|
except:
|
||||||
reply = {"status": "failed",
|
reply = {"status": "failed",
|
||||||
|
@ -208,7 +209,7 @@ class Worker:
|
||||||
|
|
||||||
async def build(self, rid, pipeline_name, wd, expid, priority, timeout=15.0):
|
async def build(self, rid, pipeline_name, wd, expid, priority, timeout=15.0):
|
||||||
self.rid = rid
|
self.rid = rid
|
||||||
await self._create_process()
|
await self._create_process(expid["log_level"])
|
||||||
await self._worker_action(
|
await self._worker_action(
|
||||||
{"action": "build",
|
{"action": "build",
|
||||||
"rid": rid,
|
"rid": rid,
|
||||||
|
@ -245,7 +246,7 @@ class Worker:
|
||||||
timeout)
|
timeout)
|
||||||
|
|
||||||
async def examine(self, file, timeout=20.0):
|
async def examine(self, file, timeout=20.0):
|
||||||
await self._create_process()
|
await self._create_process(logging.WARNING)
|
||||||
r = dict()
|
r = dict()
|
||||||
def register(class_name, name, arguments):
|
def register(class_name, name, arguments):
|
||||||
r[class_name] = {"name": name, "arguments": arguments}
|
r[class_name] = {"name": name, "arguments": arguments}
|
||||||
|
|
|
@ -5,11 +5,11 @@ import os
|
||||||
import time
|
import time
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import numpy
|
import numpy as np
|
||||||
import h5py
|
import h5py
|
||||||
|
|
||||||
from artiq.protocols.sync_struct import Notifier
|
from artiq.protocols.sync_struct import Notifier
|
||||||
from artiq.protocols.pc_rpc import Client, BestEffortClient
|
from artiq.protocols.pc_rpc import AutoTarget, Client, BestEffortClient
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -22,11 +22,16 @@ def _create_device(desc, device_mgr):
|
||||||
device_class = getattr(module, desc["class"])
|
device_class = getattr(module, desc["class"])
|
||||||
return device_class(device_mgr, **desc["arguments"])
|
return device_class(device_mgr, **desc["arguments"])
|
||||||
elif ty == "controller":
|
elif ty == "controller":
|
||||||
if desc["best_effort"]:
|
if desc.get("best_effort", False):
|
||||||
cl = BestEffortClient
|
cls = BestEffortClient
|
||||||
else:
|
else:
|
||||||
cl = Client
|
cls = Client
|
||||||
return cl(desc["host"], desc["port"], desc["target_name"])
|
# Automatic target can be specified either by the absence of
|
||||||
|
# the target_name parameter, or a None value.
|
||||||
|
target_name = desc.get("target_name", None)
|
||||||
|
if target_name is None:
|
||||||
|
target_name = AutoTarget
|
||||||
|
return cls(desc["host"], desc["port"], target_name)
|
||||||
else:
|
else:
|
||||||
raise ValueError("Unsupported type in device DB: " + ty)
|
raise ValueError("Unsupported type in device DB: " + ty)
|
||||||
|
|
||||||
|
@ -114,36 +119,53 @@ def get_last_rid():
|
||||||
|
|
||||||
_type_to_hdf5 = {
|
_type_to_hdf5 = {
|
||||||
int: h5py.h5t.STD_I64BE,
|
int: h5py.h5t.STD_I64BE,
|
||||||
float: h5py.h5t.IEEE_F64BE
|
float: h5py.h5t.IEEE_F64BE,
|
||||||
|
|
||||||
|
np.int8: h5py.h5t.STD_I8BE,
|
||||||
|
np.int16: h5py.h5t.STD_I16BE,
|
||||||
|
np.int32: h5py.h5t.STD_I32BE,
|
||||||
|
np.int64: h5py.h5t.STD_I64BE,
|
||||||
|
|
||||||
|
np.uint8: h5py.h5t.STD_U8BE,
|
||||||
|
np.uint16: h5py.h5t.STD_U16BE,
|
||||||
|
np.uint32: h5py.h5t.STD_U32BE,
|
||||||
|
np.uint64: h5py.h5t.STD_U64BE,
|
||||||
|
|
||||||
|
np.float16: h5py.h5t.IEEE_F16BE,
|
||||||
|
np.float32: h5py.h5t.IEEE_F32BE,
|
||||||
|
np.float64: h5py.h5t.IEEE_F64BE
|
||||||
}
|
}
|
||||||
|
|
||||||
def result_dict_to_hdf5(f, rd):
|
def result_dict_to_hdf5(f, rd):
|
||||||
for name, data in rd.items():
|
for name, data in rd.items():
|
||||||
if isinstance(data, list):
|
flag = None
|
||||||
el_ty = type(data[0])
|
# beware: isinstance(True/False, int) == True
|
||||||
for d in data:
|
if isinstance(data, bool):
|
||||||
if type(d) != el_ty:
|
data = np.int8(data)
|
||||||
raise TypeError("All list elements must have the same"
|
flag = "py_bool"
|
||||||
" type for HDF5 output")
|
elif isinstance(data, int):
|
||||||
try:
|
data = np.int64(data)
|
||||||
el_ty_h5 = _type_to_hdf5[el_ty]
|
flag = "py_int"
|
||||||
except KeyError:
|
|
||||||
raise TypeError("List element type {} is not supported for"
|
if isinstance(data, np.ndarray):
|
||||||
" HDF5 output".format(el_ty))
|
dataset = f.create_dataset(name, data=data)
|
||||||
dataset = f.create_dataset(name, (len(data), ), el_ty_h5)
|
|
||||||
dataset[:] = data
|
|
||||||
elif isinstance(data, numpy.ndarray):
|
|
||||||
f.create_dataset(name, data=data)
|
|
||||||
else:
|
else:
|
||||||
ty = type(data)
|
ty = type(data)
|
||||||
try:
|
if ty is str:
|
||||||
ty_h5 = _type_to_hdf5[ty]
|
ty_h5 = "S{}".format(len(data))
|
||||||
except KeyError:
|
data = data.encode()
|
||||||
raise TypeError("Type {} is not supported for HDF5 output"
|
else:
|
||||||
.format(ty))
|
try:
|
||||||
|
ty_h5 = _type_to_hdf5[ty]
|
||||||
|
except KeyError:
|
||||||
|
raise TypeError("Type {} is not supported for HDF5 output"
|
||||||
|
.format(ty)) from None
|
||||||
dataset = f.create_dataset(name, (), ty_h5)
|
dataset = f.create_dataset(name, (), ty_h5)
|
||||||
dataset[()] = data
|
dataset[()] = data
|
||||||
|
|
||||||
|
if flag is not None:
|
||||||
|
dataset.attrs[flag] = np.int8(1)
|
||||||
|
|
||||||
|
|
||||||
class DatasetManager:
|
class DatasetManager:
|
||||||
def __init__(self, ddb):
|
def __init__(self, ddb):
|
||||||
|
@ -168,7 +190,8 @@ class DatasetManager:
|
||||||
try:
|
try:
|
||||||
return self.local[key]
|
return self.local[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return self.ddb.get(key)
|
pass
|
||||||
|
return self.ddb.get(key)
|
||||||
|
|
||||||
def write_hdf5(self, f):
|
def write_hdf5(self, f):
|
||||||
result_dict_to_hdf5(f, self.local)
|
result_dict_to_hdf5(f, self.local)
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import os
|
import os
|
||||||
import traceback
|
import logging
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.tools import file_import
|
from artiq.tools import file_import
|
||||||
|
@ -26,12 +26,9 @@ class ParentActionError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def make_parent_action(action, argnames, exception=ParentActionError):
|
def make_parent_action(action, exception=ParentActionError):
|
||||||
argnames = argnames.split()
|
def parent_action(*args, **kwargs):
|
||||||
def parent_action(*args):
|
request = {"action": action, "args": args, "kwargs": kwargs}
|
||||||
request = {"action": action}
|
|
||||||
for argname, arg in zip(argnames, args):
|
|
||||||
request[argname] = arg
|
|
||||||
put_object(request)
|
put_object(request)
|
||||||
reply = get_object()
|
reply = get_object()
|
||||||
if "action" in reply:
|
if "action" in reply:
|
||||||
|
@ -50,7 +47,7 @@ class LogForwarder:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.buffer = ""
|
self.buffer = ""
|
||||||
|
|
||||||
to_parent = staticmethod(make_parent_action("log", "message"))
|
to_parent = staticmethod(make_parent_action("log"))
|
||||||
|
|
||||||
def write(self, data):
|
def write(self, data):
|
||||||
self.buffer += data
|
self.buffer += data
|
||||||
|
@ -64,18 +61,18 @@ class LogForwarder:
|
||||||
|
|
||||||
|
|
||||||
class ParentDeviceDB:
|
class ParentDeviceDB:
|
||||||
get_device_db = make_parent_action("get_device_db", "")
|
get_device_db = make_parent_action("get_device_db")
|
||||||
get = make_parent_action("get_device", "key", KeyError)
|
get = make_parent_action("get_device", KeyError)
|
||||||
|
|
||||||
|
|
||||||
class ParentDatasetDB:
|
class ParentDatasetDB:
|
||||||
get = make_parent_action("get_dataset", "key", KeyError)
|
get = make_parent_action("get_dataset", KeyError)
|
||||||
update = make_parent_action("update_dataset", "mod")
|
update = make_parent_action("update_dataset")
|
||||||
|
|
||||||
|
|
||||||
class Watchdog:
|
class Watchdog:
|
||||||
_create = make_parent_action("create_watchdog", "t")
|
_create = make_parent_action("create_watchdog")
|
||||||
_delete = make_parent_action("delete_watchdog", "wid")
|
_delete = make_parent_action("delete_watchdog")
|
||||||
|
|
||||||
def __init__(self, t):
|
def __init__(self, t):
|
||||||
self.t = t
|
self.t = t
|
||||||
|
@ -91,15 +88,14 @@ set_watchdog_factory(Watchdog)
|
||||||
|
|
||||||
|
|
||||||
class Scheduler:
|
class Scheduler:
|
||||||
pause_noexc = staticmethod(make_parent_action("pause", ""))
|
pause_noexc = staticmethod(make_parent_action("pause"))
|
||||||
|
|
||||||
def pause(self):
|
def pause(self):
|
||||||
if self.pause_noexc():
|
if self.pause_noexc():
|
||||||
raise TerminationRequested
|
raise TerminationRequested
|
||||||
|
|
||||||
submit = staticmethod(make_parent_action("scheduler_submit",
|
submit = staticmethod(make_parent_action("scheduler_submit"))
|
||||||
"pipeline_name expid priority due_date flush"))
|
cancel = staticmethod(make_parent_action("scheduler_cancel"))
|
||||||
cancel = staticmethod(make_parent_action("scheduler_cancel", "rid"))
|
|
||||||
|
|
||||||
def set_run_info(self, pipeline_name, expid, priority):
|
def set_run_info(self, pipeline_name, expid, priority):
|
||||||
self.pipeline_name = pipeline_name
|
self.pipeline_name = pipeline_name
|
||||||
|
@ -120,22 +116,21 @@ def get_exp(file, class_name):
|
||||||
return getattr(module, class_name)
|
return getattr(module, class_name)
|
||||||
|
|
||||||
|
|
||||||
register_experiment = make_parent_action("register_experiment",
|
register_experiment = make_parent_action("register_experiment")
|
||||||
"class_name name arguments")
|
|
||||||
|
|
||||||
|
|
||||||
class ExamineDeviceMgr:
|
class ExamineDeviceMgr:
|
||||||
get_device_db = make_parent_action("get_device_db", "")
|
get_device_db = make_parent_action("get_device_db")
|
||||||
|
|
||||||
def get(self, name):
|
def get(name):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
class DummyDatasetMgr:
|
class DummyDatasetMgr:
|
||||||
def set(self, key, value, broadcast=False, persist=False, save=True):
|
def set(key, value, broadcast=False, persist=False, save=True):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get(self, key):
|
def get(key):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -158,7 +153,9 @@ def examine(device_mgr, dataset_mgr, file):
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
sys.stdout = sys.stderr = LogForwarder()
|
sys.stdout = LogForwarder()
|
||||||
|
sys.stderr = LogForwarder()
|
||||||
|
logging.basicConfig(level=int(sys.argv[1]))
|
||||||
|
|
||||||
start_time = None
|
start_time = None
|
||||||
rid = None
|
rid = None
|
||||||
|
@ -211,15 +208,15 @@ def main():
|
||||||
f.close()
|
f.close()
|
||||||
put_object({"action": "completed"})
|
put_object({"action": "completed"})
|
||||||
elif action == "examine":
|
elif action == "examine":
|
||||||
examine(ExamineDeviceMgr(), DummyDatasetMgr(), obj["file"])
|
examine(ExamineDeviceMgr, DummyDatasetMgr, obj["file"])
|
||||||
put_object({"action": "completed"})
|
put_object({"action": "completed"})
|
||||||
elif action == "terminate":
|
elif action == "terminate":
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
traceback.print_exc()
|
logging.error("Worker terminating with exception", exc_info=True)
|
||||||
put_object({"action": "exception"})
|
|
||||||
finally:
|
finally:
|
||||||
device_mgr.close_devices()
|
device_mgr.close_devices()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -0,0 +1,133 @@
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from artiq.protocols.asyncio_server import AsyncioServer
|
||||||
|
from artiq.tools import TaskObject, workaround_asyncio263
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
_fwd_logger = logging.getLogger("fwd")
|
||||||
|
|
||||||
|
|
||||||
|
def log_with_name(name, *args, **kwargs):
|
||||||
|
_fwd_logger.name = name
|
||||||
|
_fwd_logger.log(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
_name_to_level = {
|
||||||
|
"CRITICAL": logging.CRITICAL,
|
||||||
|
"ERROR": logging.ERROR,
|
||||||
|
"WARN": logging.WARNING,
|
||||||
|
"WARNING": logging.WARNING,
|
||||||
|
"INFO": logging.INFO,
|
||||||
|
"DEBUG": logging.DEBUG,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_log_message(msg):
|
||||||
|
for name, level in _name_to_level.items():
|
||||||
|
if msg.startswith(name + ":"):
|
||||||
|
remainder = msg[len(name) + 1:]
|
||||||
|
try:
|
||||||
|
idx = remainder.index(":")
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
return level, remainder[:idx], remainder[idx+1:]
|
||||||
|
return logging.INFO, "print", msg
|
||||||
|
|
||||||
|
|
||||||
|
_init_string = b"ARTIQ logging\n"
|
||||||
|
|
||||||
|
|
||||||
|
class Server(AsyncioServer):
|
||||||
|
"""Remote logging TCP server.
|
||||||
|
|
||||||
|
Takes one log entry per line, in the format:
|
||||||
|
source:levelno:name:message
|
||||||
|
"""
|
||||||
|
async def _handle_connection_cr(self, reader, writer):
|
||||||
|
try:
|
||||||
|
line = await reader.readline()
|
||||||
|
if line != _init_string:
|
||||||
|
return
|
||||||
|
|
||||||
|
while True:
|
||||||
|
line = await reader.readline()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
try:
|
||||||
|
line = line.decode()
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
line = line[:-1]
|
||||||
|
linesplit = line.split(":", 3)
|
||||||
|
if len(linesplit) != 4:
|
||||||
|
logger.warning("received improperly formatted message, "
|
||||||
|
"dropping connection")
|
||||||
|
return
|
||||||
|
source, level, name, message = linesplit
|
||||||
|
try:
|
||||||
|
level = int(level)
|
||||||
|
except:
|
||||||
|
logger.warning("received improperly formatted level, "
|
||||||
|
"dropping connection")
|
||||||
|
return
|
||||||
|
log_with_name(name, level, message,
|
||||||
|
extra={"source": source})
|
||||||
|
finally:
|
||||||
|
writer.close()
|
||||||
|
|
||||||
|
|
||||||
|
class SourceFilter:
|
||||||
|
def __init__(self, local_level, local_source):
|
||||||
|
self.local_level = local_level
|
||||||
|
self.local_source = local_source
|
||||||
|
|
||||||
|
def filter(self, record):
|
||||||
|
if not hasattr(record, "source"):
|
||||||
|
record.source = self.local_source
|
||||||
|
if record.source == self.local_source:
|
||||||
|
return record.levelno >= self.local_level
|
||||||
|
else:
|
||||||
|
# log messages that are forwarded from a source have already
|
||||||
|
# been filtered, and may have a level below the local level.
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class LogForwarder(logging.Handler, TaskObject):
|
||||||
|
def __init__(self, host, port, reconnect_timer=5.0, queue_size=1000,
|
||||||
|
**kwargs):
|
||||||
|
logging.Handler.__init__(self, **kwargs)
|
||||||
|
self.host = host
|
||||||
|
self.port = port
|
||||||
|
self.setFormatter(logging.Formatter(
|
||||||
|
"%(name)s:%(message)s"))
|
||||||
|
self._queue = asyncio.Queue(queue_size)
|
||||||
|
self.reconnect_timer = reconnect_timer
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
message = self.format(record)
|
||||||
|
for part in message.split("\n"):
|
||||||
|
part = "{}:{}:{}".format(record.source, record.levelno, part)
|
||||||
|
try:
|
||||||
|
self._queue.put_nowait(part)
|
||||||
|
except asyncio.QueueFull:
|
||||||
|
break
|
||||||
|
|
||||||
|
async def _do(self):
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
reader, writer = await asyncio.open_connection(self.host,
|
||||||
|
self.port)
|
||||||
|
writer.write(_init_string)
|
||||||
|
while True:
|
||||||
|
message = await self._queue.get() + "\n"
|
||||||
|
writer.write(message.encode())
|
||||||
|
await workaround_asyncio263()
|
||||||
|
await writer.drain()
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
await asyncio.sleep(self.reconnect_timer)
|
||||||
|
finally:
|
||||||
|
writer.close()
|
|
@ -27,6 +27,12 @@ from artiq.protocols.asyncio_server import AsyncioServer as _AsyncioServer
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AutoTarget:
|
||||||
|
"""Use this as target value in clients for them to automatically connect
|
||||||
|
to the target exposed by the server. Servers must have only one target."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class RemoteError(Exception):
|
class RemoteError(Exception):
|
||||||
"""Raised when a RPC failed or raised an exception on the remote (server)
|
"""Raised when a RPC failed or raised an exception on the remote (server)
|
||||||
side."""
|
side."""
|
||||||
|
@ -42,6 +48,20 @@ class IncompatibleServer(Exception):
|
||||||
_init_string = b"ARTIQ pc_rpc\n"
|
_init_string = b"ARTIQ pc_rpc\n"
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_target_name(target_name, target_names):
|
||||||
|
if target_name is AutoTarget:
|
||||||
|
if len(target_names) > 1:
|
||||||
|
raise ValueError("Server has multiple targets: " +
|
||||||
|
" ".join(sorted(target_names)))
|
||||||
|
else:
|
||||||
|
target_name = target_names[0]
|
||||||
|
elif target_name not in target_names:
|
||||||
|
raise IncompatibleServer(
|
||||||
|
"valid target name(s): " +
|
||||||
|
" ".join(sorted(target_names)))
|
||||||
|
return target_name
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
"""This class proxies the methods available on the server so that they
|
"""This class proxies the methods available on the server so that they
|
||||||
can be used as if they were local methods.
|
can be used as if they were local methods.
|
||||||
|
@ -67,11 +87,13 @@ class Client:
|
||||||
:param port: TCP port to use.
|
:param port: TCP port to use.
|
||||||
:param target_name: Target name to select. ``IncompatibleServer`` is
|
:param target_name: Target name to select. ``IncompatibleServer`` is
|
||||||
raised if the target does not exist.
|
raised if the target does not exist.
|
||||||
|
Use ``AutoTarget`` for automatic selection if the server has only one
|
||||||
|
target.
|
||||||
Use ``None`` to skip selecting a target. The list of targets can then
|
Use ``None`` to skip selecting a target. The list of targets can then
|
||||||
be retrieved using ``get_rpc_id`` and then one can be selected later
|
be retrieved using ``get_rpc_id`` and then one can be selected later
|
||||||
using ``select_rpc_target``.
|
using ``select_rpc_target``.
|
||||||
"""
|
"""
|
||||||
def __init__(self, host, port, target_name):
|
def __init__(self, host, port, target_name=AutoTarget):
|
||||||
self.__socket = socket.create_connection((host, port))
|
self.__socket = socket.create_connection((host, port))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -89,8 +111,7 @@ class Client:
|
||||||
def select_rpc_target(self, target_name):
|
def select_rpc_target(self, target_name):
|
||||||
"""Selects a RPC target by name. This function should be called
|
"""Selects a RPC target by name. This function should be called
|
||||||
exactly once if the object was created with ``target_name=None``."""
|
exactly once if the object was created with ``target_name=None``."""
|
||||||
if target_name not in self.__target_names:
|
target_name = _validate_target_name(target_name, self.__target_names)
|
||||||
raise IncompatibleServer
|
|
||||||
self.__socket.sendall((target_name + "\n").encode())
|
self.__socket.sendall((target_name + "\n").encode())
|
||||||
|
|
||||||
def get_rpc_id(self):
|
def get_rpc_id(self):
|
||||||
|
@ -180,8 +201,7 @@ class AsyncioClient:
|
||||||
"""Selects a RPC target by name. This function should be called
|
"""Selects a RPC target by name. This function should be called
|
||||||
exactly once if the connection was created with ``target_name=None``.
|
exactly once if the connection was created with ``target_name=None``.
|
||||||
"""
|
"""
|
||||||
if target_name not in self.__target_names:
|
target_name = _validate_target_name(target_name, self.__target_names)
|
||||||
raise IncompatibleServer
|
|
||||||
self.__writer.write((target_name + "\n").encode())
|
self.__writer.write((target_name + "\n").encode())
|
||||||
|
|
||||||
def get_rpc_id(self):
|
def get_rpc_id(self):
|
||||||
|
@ -259,7 +279,8 @@ class BestEffortClient:
|
||||||
except:
|
except:
|
||||||
logger.warning("first connection attempt to %s:%d[%s] failed, "
|
logger.warning("first connection attempt to %s:%d[%s] failed, "
|
||||||
"retrying in the background",
|
"retrying in the background",
|
||||||
self.__host, self.__port, self.__target_name)
|
self.__host, self.__port, self.__target_name,
|
||||||
|
exc_info=True)
|
||||||
self.__start_conretry()
|
self.__start_conretry()
|
||||||
else:
|
else:
|
||||||
self.__conretry_thread = None
|
self.__conretry_thread = None
|
||||||
|
@ -273,9 +294,9 @@ class BestEffortClient:
|
||||||
(self.__host, self.__port), timeout)
|
(self.__host, self.__port), timeout)
|
||||||
self.__socket.sendall(_init_string)
|
self.__socket.sendall(_init_string)
|
||||||
server_identification = self.__recv()
|
server_identification = self.__recv()
|
||||||
if self.__target_name not in server_identification["targets"]:
|
target_name = _validate_target_name(self.__target_name,
|
||||||
raise IncompatibleServer
|
server_identification["targets"])
|
||||||
self.__socket.sendall((self.__target_name + "\n").encode())
|
self.__socket.sendall((target_name + "\n").encode())
|
||||||
|
|
||||||
def __start_conretry(self):
|
def __start_conretry(self):
|
||||||
self.__conretry_thread = threading.Thread(target=self.__conretry)
|
self.__conretry_thread = threading.Thread(target=self.__conretry)
|
||||||
|
|
|
@ -132,7 +132,10 @@ class _Encoder:
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def encode(self, x):
|
def encode(self, x):
|
||||||
return getattr(self, "encode_" + _encode_map[type(x)])(x)
|
ty = _encode_map.get(type(x), None)
|
||||||
|
if ty is None:
|
||||||
|
raise TypeError(repr(x) + " is not PYON serializable")
|
||||||
|
return getattr(self, "encode_" + ty)(x)
|
||||||
|
|
||||||
|
|
||||||
def encode(x, pretty=False):
|
def encode(x, pretty=False):
|
||||||
|
@ -145,6 +148,7 @@ def encode(x, pretty=False):
|
||||||
|
|
||||||
def _nparray(shape, dtype, data):
|
def _nparray(shape, dtype, data):
|
||||||
a = numpy.frombuffer(base64.b64decode(data), dtype=dtype)
|
a = numpy.frombuffer(base64.b64decode(data), dtype=dtype)
|
||||||
|
a = a.copy()
|
||||||
return a.reshape(shape)
|
return a.reshape(shape)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -16,6 +16,7 @@ from functools import partial
|
||||||
|
|
||||||
from artiq.protocols import pyon
|
from artiq.protocols import pyon
|
||||||
from artiq.protocols.asyncio_server import AsyncioServer
|
from artiq.protocols.asyncio_server import AsyncioServer
|
||||||
|
from artiq.tools import workaround_asyncio263
|
||||||
|
|
||||||
|
|
||||||
_init_string = b"ARTIQ sync_struct\n"
|
_init_string = b"ARTIQ sync_struct\n"
|
||||||
|
@ -233,10 +234,11 @@ class Publisher(AsyncioServer):
|
||||||
line = await queue.get()
|
line = await queue.get()
|
||||||
writer.write(line)
|
writer.write(line)
|
||||||
# raise exception on connection error
|
# raise exception on connection error
|
||||||
|
await workaround_asyncio263()
|
||||||
await writer.drain()
|
await writer.drain()
|
||||||
finally:
|
finally:
|
||||||
self._recipients[notifier_name].remove(queue)
|
self._recipients[notifier_name].remove(queue)
|
||||||
except ConnectionResetError:
|
except (ConnectionResetError, BrokenPipeError):
|
||||||
# subscribers disconnecting are a normal occurence
|
# subscribers disconnecting are a normal occurence
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import h5py
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from artiq.master.worker_db import result_dict_to_hdf5
|
||||||
|
|
||||||
|
|
||||||
|
class TypesCase(unittest.TestCase):
|
||||||
|
def test_types(self):
|
||||||
|
d = {
|
||||||
|
"bool": True,
|
||||||
|
"int": 42,
|
||||||
|
"float": 42.0,
|
||||||
|
"string": "abcdef",
|
||||||
|
}
|
||||||
|
|
||||||
|
for size in 8, 16, 32, 64:
|
||||||
|
d["i"+str(size)] = getattr(np, "int" + str(size))(42)
|
||||||
|
d["u"+str(size)] = getattr(np, "uint" + str(size))(42)
|
||||||
|
for size in 16, 32, 64:
|
||||||
|
d["f"+str(size)] = getattr(np, "float" + str(size))(42)
|
||||||
|
|
||||||
|
with h5py.File("h5types.h5", "w") as f:
|
||||||
|
result_dict_to_hdf5(f, d)
|
|
@ -17,12 +17,12 @@ test_object = [5, 2.1, None, True, False,
|
||||||
|
|
||||||
|
|
||||||
class RPCCase(unittest.TestCase):
|
class RPCCase(unittest.TestCase):
|
||||||
def _run_server_and_test(self, test):
|
def _run_server_and_test(self, test, *args):
|
||||||
# running this file outside of unittest starts the echo server
|
# running this file outside of unittest starts the echo server
|
||||||
with subprocess.Popen([sys.executable,
|
with subprocess.Popen([sys.executable,
|
||||||
sys.modules[__name__].__file__]) as proc:
|
sys.modules[__name__].__file__]) as proc:
|
||||||
try:
|
try:
|
||||||
test()
|
test(*args)
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
proc.wait(timeout=1)
|
proc.wait(timeout=1)
|
||||||
|
@ -30,12 +30,12 @@ class RPCCase(unittest.TestCase):
|
||||||
proc.kill()
|
proc.kill()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _blocking_echo(self):
|
def _blocking_echo(self, target):
|
||||||
for attempt in range(100):
|
for attempt in range(100):
|
||||||
time.sleep(.2)
|
time.sleep(.2)
|
||||||
try:
|
try:
|
||||||
remote = pc_rpc.Client(test_address, test_port,
|
remote = pc_rpc.Client(test_address, test_port,
|
||||||
"test")
|
target)
|
||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
@ -50,14 +50,17 @@ class RPCCase(unittest.TestCase):
|
||||||
remote.close_rpc()
|
remote.close_rpc()
|
||||||
|
|
||||||
def test_blocking_echo(self):
|
def test_blocking_echo(self):
|
||||||
self._run_server_and_test(self._blocking_echo)
|
self._run_server_and_test(self._blocking_echo, "test")
|
||||||
|
|
||||||
async def _asyncio_echo(self):
|
def test_blocking_echo_autotarget(self):
|
||||||
|
self._run_server_and_test(self._blocking_echo, pc_rpc.AutoTarget)
|
||||||
|
|
||||||
|
async def _asyncio_echo(self, target):
|
||||||
remote = pc_rpc.AsyncioClient()
|
remote = pc_rpc.AsyncioClient()
|
||||||
for attempt in range(100):
|
for attempt in range(100):
|
||||||
await asyncio.sleep(.2)
|
await asyncio.sleep(.2)
|
||||||
try:
|
try:
|
||||||
await remote.connect_rpc(test_address, test_port, "test")
|
await remote.connect_rpc(test_address, test_port, target)
|
||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
@ -71,16 +74,19 @@ class RPCCase(unittest.TestCase):
|
||||||
finally:
|
finally:
|
||||||
remote.close_rpc()
|
remote.close_rpc()
|
||||||
|
|
||||||
def _loop_asyncio_echo(self):
|
def _loop_asyncio_echo(self, target):
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.new_event_loop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
try:
|
try:
|
||||||
loop.run_until_complete(self._asyncio_echo())
|
loop.run_until_complete(self._asyncio_echo(target))
|
||||||
finally:
|
finally:
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|
||||||
def test_asyncio_echo(self):
|
def test_asyncio_echo(self):
|
||||||
self._run_server_and_test(self._loop_asyncio_echo)
|
self._run_server_and_test(self._loop_asyncio_echo, "test")
|
||||||
|
|
||||||
|
def test_asyncio_echo_autotarget(self):
|
||||||
|
self._run_server_and_test(self._loop_asyncio_echo, pc_rpc.AutoTarget)
|
||||||
|
|
||||||
|
|
||||||
class FireAndForgetCase(unittest.TestCase):
|
class FireAndForgetCase(unittest.TestCase):
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
@ -32,6 +33,7 @@ class BackgroundExperiment(EnvExperiment):
|
||||||
|
|
||||||
def _get_expid(name):
|
def _get_expid(name):
|
||||||
return {
|
return {
|
||||||
|
"log_level": logging.WARNING,
|
||||||
"file": sys.modules[__name__].__file__,
|
"file": sys.modules[__name__].__file__,
|
||||||
"class_name": name,
|
"class_name": name,
|
||||||
"arguments": dict()
|
"arguments": dict()
|
||||||
|
|
|
@ -17,15 +17,17 @@ def write_test_data(test_dict):
|
||||||
for key, value in enumerate(test_values):
|
for key, value in enumerate(test_values):
|
||||||
test_dict[key] = value
|
test_dict[key] = value
|
||||||
test_dict[1.5] = 1.5
|
test_dict[1.5] = 1.5
|
||||||
test_dict["array"] = []
|
test_dict["list"] = []
|
||||||
test_dict["array"].append(42)
|
test_dict["list"].append(42)
|
||||||
test_dict["array"].insert(1, 1)
|
test_dict["list"].insert(1, 1)
|
||||||
test_dict[100] = 0
|
test_dict[100] = 0
|
||||||
test_dict[100] = 1
|
test_dict[100] = 1
|
||||||
test_dict[101] = 1
|
test_dict[101] = 1
|
||||||
test_dict.pop(101)
|
test_dict.pop(101)
|
||||||
test_dict[102] = 1
|
test_dict[102] = 1
|
||||||
del test_dict[102]
|
del test_dict[102]
|
||||||
|
test_dict["array"] = np.zeros(1)
|
||||||
|
test_dict["array"][0] = 10
|
||||||
test_dict["finished"] = True
|
test_dict["finished"] = True
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
@ -64,6 +65,7 @@ async def _call_worker(worker, expid):
|
||||||
|
|
||||||
def _run_experiment(class_name):
|
def _run_experiment(class_name):
|
||||||
expid = {
|
expid = {
|
||||||
|
"log_level": logging.WARNING,
|
||||||
"file": sys.modules[__name__].__file__,
|
"file": sys.modules[__name__].__file__,
|
||||||
"class_name": class_name,
|
"class_name": class_name,
|
||||||
"arguments": dict()
|
"arguments": dict()
|
||||||
|
@ -85,7 +87,7 @@ class WorkerCase(unittest.TestCase):
|
||||||
_run_experiment("SimpleExperiment")
|
_run_experiment("SimpleExperiment")
|
||||||
|
|
||||||
def test_exception(self):
|
def test_exception(self):
|
||||||
with self.assertRaises(WorkerException):
|
with self.assertRaises(WorkerError):
|
||||||
_run_experiment("ExceptionTermination")
|
_run_experiment("ExceptionTermination")
|
||||||
|
|
||||||
def test_watchdog_no_timeout(self):
|
def test_watchdog_no_timeout(self):
|
||||||
|
|
|
@ -49,14 +49,16 @@ def short_format(v):
|
||||||
if v is None:
|
if v is None:
|
||||||
return "None"
|
return "None"
|
||||||
t = type(v)
|
t = type(v)
|
||||||
if np.issubdtype(t, int) or np.issubdtype(t, float):
|
if t is bool or np.issubdtype(t, int) or np.issubdtype(t, float):
|
||||||
return str(v)
|
return str(v)
|
||||||
elif t is str:
|
elif t is str:
|
||||||
return "\"" + elide(v, 15) + "\""
|
return "\"" + elide(v, 50) + "\""
|
||||||
else:
|
else:
|
||||||
r = t.__name__
|
r = t.__name__
|
||||||
if t is list or t is dict or t is set:
|
if t is list or t is dict or t is set:
|
||||||
r += " ({})".format(len(v))
|
r += " ({})".format(len(v))
|
||||||
|
if t is np.ndarray:
|
||||||
|
r += " " + str(np.shape(v))
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
@ -175,3 +177,9 @@ class Condition:
|
||||||
for fut in self._waiters:
|
for fut in self._waiters:
|
||||||
if not fut.done():
|
if not fut.done():
|
||||||
fut.set_result(False)
|
fut.set_result(False)
|
||||||
|
|
||||||
|
|
||||||
|
# See: https://github.com/python/asyncio/issues/263
|
||||||
|
@asyncio.coroutine
|
||||||
|
def workaround_asyncio263():
|
||||||
|
yield
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
Uploading conda packages (Python 3.5)
|
|
||||||
=====================================
|
|
||||||
|
|
||||||
Preparing:
|
|
||||||
|
|
||||||
1. [Install miniconda][miniconda]
|
|
||||||
2. `conda update -q conda`
|
|
||||||
3. `conda install conda-build jinja2 anaconda`
|
|
||||||
4. `conda create -q -n py35 python=3.5`
|
|
||||||
5. `conda config --add channels https://conda.anaconda.org/m-labs/channel/dev`
|
|
||||||
|
|
||||||
Building:
|
|
||||||
|
|
||||||
1. `conda build pkgname --python 3.5`; this command displays a path to the freshly built package
|
|
||||||
2. `anaconda upload <package> -c main -c dev`
|
|
||||||
|
|
||||||
[miniconda]: http://conda.pydata.org/docs/install/quick.html#linux-miniconda-install
|
|
|
@ -1,2 +0,0 @@
|
||||||
"%PYTHON%" setup.py install
|
|
||||||
if errorlevel 1 exit 1
|
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,36 +0,0 @@
|
||||||
package:
|
|
||||||
name: aiohttp
|
|
||||||
version: "0.17.2"
|
|
||||||
|
|
||||||
source:
|
|
||||||
fn: aiohttp-0.17.2.tar.gz
|
|
||||||
url: https://pypi.python.org/packages/source/a/aiohttp/aiohttp-0.17.2.tar.gz
|
|
||||||
md5: 7640928fd4b5c1ccf1f8bcad276d39d6
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 0
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
- chardet
|
|
||||||
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
- chardet
|
|
||||||
|
|
||||||
test:
|
|
||||||
# Python imports
|
|
||||||
imports:
|
|
||||||
- aiohttp
|
|
||||||
|
|
||||||
requires:
|
|
||||||
- chardet
|
|
||||||
- gunicorn # [not win]
|
|
||||||
- nose
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: https://github.com/KeepSafe/aiohttp/
|
|
||||||
license: Apache Software License
|
|
||||||
summary: 'http client/server for asyncio'
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
BUILD_SETTINGS_FILE=$HOME/.m-labs/build_settings.sh
|
||||||
|
[ -f $BUILD_SETTINGS_FILE ] && . $BUILD_SETTINGS_FILE
|
||||||
|
|
||||||
|
SOC_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq/binaries/kc705
|
||||||
|
mkdir -p $SOC_PREFIX/nist_qc1
|
||||||
|
|
||||||
|
SOC_ROOT=$PWD/soc
|
||||||
|
|
||||||
|
# build bitstream
|
||||||
|
|
||||||
|
(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
|
||||||
|
cp $MSCDIR/build/artiq_kc705-nist_qc1-kc705.bit $SOC_PREFIX/
|
||||||
|
wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
|
||||||
|
mv bscan_spi_kc705.bit $SOC_PREFIX/
|
||||||
|
|
||||||
|
# build BIOS
|
||||||
|
|
||||||
|
(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_kc705 build-headers build-bios)
|
||||||
|
cp $MSCDIR/software/bios/bios.bin $SOC_PREFIX/
|
||||||
|
|
||||||
|
# build runtime
|
||||||
|
|
||||||
|
make -C soc/runtime clean runtime.fbi
|
||||||
|
cp soc/runtime/runtime.fbi $SOC_PREFIX/nist_qc1/
|
|
@ -0,0 +1,27 @@
|
||||||
|
package:
|
||||||
|
name: artiq-kc705-nist_qc1
|
||||||
|
version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
|
||||||
|
|
||||||
|
source:
|
||||||
|
git_url: https://github.com/m-labs/artiq
|
||||||
|
git_tag: master
|
||||||
|
|
||||||
|
build:
|
||||||
|
noarch_python: true
|
||||||
|
number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
|
||||||
|
string: py_{{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}+git{{ environ.get("GIT_DESCRIBE_HASH", "")[1:] }}
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
# We don't get meaningful GIT_DESCRIBE_* values until before conda installs build dependencies.
|
||||||
|
- artiq 0.0
|
||||||
|
- migen 0.0
|
||||||
|
- llvm-or1k
|
||||||
|
- binutils-or1k-linux
|
||||||
|
run:
|
||||||
|
- artiq 0.0
|
||||||
|
|
||||||
|
about:
|
||||||
|
home: http://m-labs.hk/artiq
|
||||||
|
license: GPL
|
||||||
|
summary: 'Bitstream, BIOS and runtime for NIST_QC1 on the KC705 board'
|
|
@ -0,0 +1,26 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
BUILD_SETTINGS_FILE=$HOME/.m-labs/build_settings.sh
|
||||||
|
[ -f $BUILD_SETTINGS_FILE ] && . $BUILD_SETTINGS_FILE
|
||||||
|
|
||||||
|
SOC_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq/binaries/kc705
|
||||||
|
mkdir -p $SOC_PREFIX/nist_qc2
|
||||||
|
|
||||||
|
SOC_ROOT=$PWD/soc
|
||||||
|
|
||||||
|
# build bitstream
|
||||||
|
|
||||||
|
(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
|
||||||
|
cp $MSCDIR/build/artiq_kc705-nist_qc2-kc705.bit $SOC_PREFIX/
|
||||||
|
wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
|
||||||
|
mv bscan_spi_kc705.bit $SOC_PREFIX/
|
||||||
|
|
||||||
|
# build BIOS
|
||||||
|
|
||||||
|
(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_kc705 -s NIST_QC2 build-headers build-bios)
|
||||||
|
cp $MSCDIR/software/bios/bios.bin $SOC_PREFIX/
|
||||||
|
|
||||||
|
# build runtime
|
||||||
|
|
||||||
|
make -C soc/runtime clean runtime.fbi
|
||||||
|
cp soc/runtime/runtime.fbi $SOC_PREFIX/nist_qc2/
|
|
@ -0,0 +1,27 @@
|
||||||
|
package:
|
||||||
|
name: artiq-kc705-nist_qc2
|
||||||
|
version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
|
||||||
|
|
||||||
|
source:
|
||||||
|
git_url: https://github.com/m-labs/artiq
|
||||||
|
git_tag: master
|
||||||
|
|
||||||
|
build:
|
||||||
|
noarch_python: true
|
||||||
|
number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
|
||||||
|
string: py_{{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}+git{{ environ.get("GIT_DESCRIBE_HASH", "")[1:] }}
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
# We don't get meaningful GIT_DESCRIBE_* values until before conda installs build dependencies.
|
||||||
|
- artiq 0.0
|
||||||
|
- migen 0.0
|
||||||
|
- llvm-or1k
|
||||||
|
- binutils-or1k-linux
|
||||||
|
run:
|
||||||
|
- artiq 0.0
|
||||||
|
|
||||||
|
about:
|
||||||
|
home: http://m-labs.hk/artiq
|
||||||
|
license: GPL
|
||||||
|
summary: 'Bitstream, BIOS and runtime for NIST_QC2 on the KC705 board'
|
|
@ -0,0 +1,26 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
BUILD_SETTINGS_FILE=$HOME/.m-labs/build_settings.sh
|
||||||
|
[ -f $BUILD_SETTINGS_FILE ] && . $BUILD_SETTINGS_FILE
|
||||||
|
|
||||||
|
SOC_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq/binaries/pipistrello
|
||||||
|
mkdir -p $SOC_PREFIX
|
||||||
|
|
||||||
|
SOC_ROOT=$PWD/soc
|
||||||
|
|
||||||
|
# build bitstream
|
||||||
|
|
||||||
|
(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream)
|
||||||
|
cp $MSCDIR/build/artiq_pipistrello-nist_qc1-pipistrello.bit $SOC_PREFIX/
|
||||||
|
wget https://people.phys.ethz.ch/~robertjo/bscan_spi_lx45_csg324.bit
|
||||||
|
mv bscan_spi_lx45_csg324.bit $SOC_PREFIX/
|
||||||
|
|
||||||
|
# build BIOS
|
||||||
|
|
||||||
|
(cd $MSCDIR; $PYTHON make.py -X $SOC_ROOT -t artiq_pipistrello build-headers build-bios)
|
||||||
|
cp $MSCDIR/software/bios/bios.bin $SOC_PREFIX/
|
||||||
|
|
||||||
|
# build runtime
|
||||||
|
|
||||||
|
make -C soc/runtime clean runtime.fbi
|
||||||
|
cp soc/runtime/runtime.fbi $SOC_PREFIX/
|
|
@ -0,0 +1,27 @@
|
||||||
|
package:
|
||||||
|
name: artiq-pipistrello-nist_qc1
|
||||||
|
version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
|
||||||
|
|
||||||
|
source:
|
||||||
|
git_url: https://github.com/m-labs/artiq
|
||||||
|
git_tag: master
|
||||||
|
|
||||||
|
build:
|
||||||
|
noarch_python: true
|
||||||
|
number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
|
||||||
|
string: py_{{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}+git{{ environ.get("GIT_DESCRIBE_HASH", "")[1:] }}
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
# We don't get meaningful GIT_DESCRIBE_* values until before conda installs build dependencies.
|
||||||
|
- artiq 0.0
|
||||||
|
- migen 0.0
|
||||||
|
- llvm-or1k
|
||||||
|
- binutils-or1k-linux
|
||||||
|
run:
|
||||||
|
- artiq 0.0
|
||||||
|
|
||||||
|
about:
|
||||||
|
home: http://m-labs.hk/artiq
|
||||||
|
license: GPL
|
||||||
|
summary: 'Bitstream, BIOS and runtime for NIST_QC1 on the Pipistrello board'
|
|
@ -1,64 +1,15 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
BUILD_SETTINGS_FILE=$HOME/.mlabs/build_settings.sh
|
ARTIQ_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq
|
||||||
|
|
||||||
if [ -f $BUILD_SETTINGS_FILE ]
|
|
||||||
then
|
|
||||||
source $BUILD_SETTINGS_FILE
|
|
||||||
fi
|
|
||||||
|
|
||||||
$PYTHON setup.py install --single-version-externally-managed --record=record.txt
|
$PYTHON setup.py install --single-version-externally-managed --record=record.txt
|
||||||
git clone --recursive https://github.com/m-labs/misoc
|
|
||||||
export MSCDIR=$SRC_DIR/misoc
|
|
||||||
|
|
||||||
ARTIQ_PREFIX=$PREFIX/lib/python3.5/site-packages/artiq
|
# install scripts
|
||||||
BIN_PREFIX=$ARTIQ_PREFIX/binaries/
|
|
||||||
mkdir -p $ARTIQ_PREFIX/misc
|
|
||||||
mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello
|
|
||||||
|
|
||||||
# build for KC705 NIST_QC1
|
|
||||||
|
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd -
|
|
||||||
make -C soc/runtime clean runtime.fbi
|
|
||||||
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
|
|
||||||
|
|
||||||
# install KC705 NIST_QC1 binaries
|
|
||||||
|
|
||||||
mkdir -p $BIN_PREFIX/kc705/nist_qc1
|
|
||||||
cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc1/
|
|
||||||
cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/kc705/
|
|
||||||
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc1-kc705.bit $BIN_PREFIX/kc705/
|
|
||||||
wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit
|
|
||||||
mv bscan_spi_kc705.bit $BIN_PREFIX/kc705/
|
|
||||||
|
|
||||||
# build for Pipistrello
|
|
||||||
|
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello build-headers build-bios; cd -
|
|
||||||
make -C soc/runtime clean runtime.fbi
|
|
||||||
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream)
|
|
||||||
|
|
||||||
# install Pipistrello binaries
|
|
||||||
|
|
||||||
cp soc/runtime/runtime.fbi $BIN_PREFIX/pipistrello/
|
|
||||||
cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/pipistrello/
|
|
||||||
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_pipistrello-nist_qc1-pipistrello.bit $BIN_PREFIX/pipistrello/
|
|
||||||
wget https://people.phys.ethz.ch/~robertjo/bscan_spi_lx45_csg324.bit
|
|
||||||
mv bscan_spi_lx45_csg324.bit $BIN_PREFIX/pipistrello/
|
|
||||||
|
|
||||||
# build for KC705 NIST_QC2
|
|
||||||
|
|
||||||
cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 build-headers; cd -
|
|
||||||
make -C soc/runtime clean runtime.fbi
|
|
||||||
[ "$BUILD_SOC" != "0" ] && (cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream)
|
|
||||||
|
|
||||||
# install KC705 NIST_QC2 binaries
|
|
||||||
|
|
||||||
mkdir -p $BIN_PREFIX/kc705/nist_qc2
|
|
||||||
cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc2/
|
|
||||||
[ "$BUILD_SOC" != "0" ] && cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc2-kc705.bit $BIN_PREFIX/kc705/
|
|
||||||
|
|
||||||
cp artiq/frontend/artiq_flash.sh $PREFIX/bin
|
cp artiq/frontend/artiq_flash.sh $PREFIX/bin
|
||||||
|
|
||||||
# misc
|
# install udev rules
|
||||||
|
|
||||||
|
mkdir -p $ARTIQ_PREFIX/misc
|
||||||
cp misc/99-papilio.rules $ARTIQ_PREFIX/misc/
|
cp misc/99-papilio.rules $ARTIQ_PREFIX/misc/
|
||||||
cp misc/99-kc705.rules $ARTIQ_PREFIX/misc/
|
cp misc/99-kc705.rules $ARTIQ_PREFIX/misc/
|
||||||
|
|
|
@ -1,65 +1,66 @@
|
||||||
package:
|
package:
|
||||||
name: artiq
|
name: artiq
|
||||||
version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
|
version: {{ environ.get("GIT_DESCRIBE_TAG", "") }}
|
||||||
|
|
||||||
source:
|
source:
|
||||||
git_url: https://github.com/m-labs/artiq
|
git_url: https://github.com/m-labs/artiq
|
||||||
git_tag: master
|
git_tag: master
|
||||||
|
|
||||||
build:
|
build:
|
||||||
number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
|
noarch_python: true
|
||||||
entry_points:
|
number: {{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}
|
||||||
- artiq_client = artiq.frontend.artiq_client:main
|
string: py_{{ environ.get("GIT_DESCRIBE_NUMBER", 0) }}+git{{ environ.get("GIT_DESCRIBE_HASH", "")[1:] }}
|
||||||
- artiq_compile = artiq.frontend.artiq_compile:main
|
entry_points:
|
||||||
- artiq_coretool = artiq.frontend.artiq_coretool:main
|
- artiq_client = artiq.frontend.artiq_client:main
|
||||||
- artiq_ctlmgr = artiq.frontend.artiq_ctlmgr:main
|
- artiq_compile = artiq.frontend.artiq_compile:main
|
||||||
- artiq_gui = artiq.frontend.artiq_gui:main
|
- artiq_coretool = artiq.frontend.artiq_coretool:main
|
||||||
- artiq_influxdb = artiq.frontend.artiq_influxdb:main
|
- artiq_ctlmgr = artiq.frontend.artiq_ctlmgr:main
|
||||||
- artiq_master = artiq.frontend.artiq_master:main
|
- artiq_gui = artiq.frontend.artiq_gui:main
|
||||||
- artiq_mkfs = artiq.frontend.artiq_mkfs:main
|
- artiq_influxdb = artiq.frontend.artiq_influxdb:main
|
||||||
- artiq_rpctool = artiq.frontend.artiq_rpctool:main
|
- artiq_master = artiq.frontend.artiq_master:main
|
||||||
- artiq_run = artiq.frontend.artiq_run:main
|
- artiq_mkfs = artiq.frontend.artiq_mkfs:main
|
||||||
- lda_controller = artiq.frontend.lda_controller:main
|
- artiq_rpctool = artiq.frontend.artiq_rpctool:main
|
||||||
- novatech409b_controller = artiq.frontend.novatech409b_controller:main
|
- artiq_run = artiq.frontend.artiq_run:main
|
||||||
- pdq2_client = artiq.frontend.pdq2_client:main
|
- lda_controller = artiq.frontend.lda_controller:main
|
||||||
- pdq2_controller = artiq.frontend.pdq2_controller:main
|
- novatech409b_controller = artiq.frontend.novatech409b_controller:main
|
||||||
- pxi6733_controller = artiq.frontend.pxi6733_controller:main
|
- pdq2_client = artiq.frontend.pdq2_client:main
|
||||||
- thorlabs_tcube_controller = artiq.frontend.thorlabs_tcube_controller:main
|
- pdq2_controller = artiq.frontend.pdq2_controller:main
|
||||||
|
- pxi6733_controller = artiq.frontend.pxi6733_controller:main
|
||||||
|
- thorlabs_tcube_controller = artiq.frontend.thorlabs_tcube_controller:main
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
build:
|
build:
|
||||||
- python >=3.5.0
|
- python >=3.5.0
|
||||||
- setuptools
|
- setuptools
|
||||||
- numpy
|
- numpy
|
||||||
- migen 0.0
|
- migen 0.0
|
||||||
- pyelftools
|
- pyelftools
|
||||||
- binutils-or1k-linux
|
- binutils-or1k-linux
|
||||||
run:
|
run:
|
||||||
- python >=3.5.0
|
- python >=3.5.0
|
||||||
- llvmlite-artiq
|
- llvmlite-artiq
|
||||||
- scipy
|
- scipy
|
||||||
- numpy
|
- numpy
|
||||||
- prettytable
|
- prettytable
|
||||||
- pyserial
|
- pyserial
|
||||||
- sphinx
|
- sphinx
|
||||||
- sphinx-argparse
|
- sphinx-argparse
|
||||||
- h5py
|
- h5py
|
||||||
- dateutil
|
- dateutil
|
||||||
- pydaqmx
|
- pydaqmx
|
||||||
- pyelftools
|
- pyelftools
|
||||||
- quamash
|
- quamash
|
||||||
- pyqtgraph
|
- pyqtgraph
|
||||||
- flterm # [linux]
|
- flterm # [linux]
|
||||||
- pygit2
|
- pygit2
|
||||||
- aiohttp
|
- aiohttp
|
||||||
- binutils-or1k-linux
|
- binutils-or1k-linux
|
||||||
|
|
||||||
test:
|
test:
|
||||||
imports:
|
imports:
|
||||||
- artiq
|
- artiq
|
||||||
|
|
||||||
|
|
||||||
about:
|
about:
|
||||||
home: http://m-labs.hk/artiq
|
home: http://m-labs.hk/artiq
|
||||||
license: 3-clause BSD
|
license: GPL
|
||||||
summary: 'ARTIQ (Advanced Real-Time Infrastructure for Quantum physics) is a next-generation control system for quantum information experiments. It is being developed in partnership with the Ion Storage Group at NIST, and its applicability reaches beyond ion trapping.'
|
summary: 'ARTIQ (Advanced Real-Time Infrastructure for Quantum physics) is a next-generation control system for quantum information experiments. It is being developed in partnership with the Ion Storage Group at NIST, and its applicability reaches beyond ion trapping.'
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
binutils-or1k-linux
|
|
||||||
===================
|
|
||||||
|
|
||||||
To build this package on Windows:
|
|
||||||
|
|
||||||
* Install cygwin
|
|
||||||
* Install the following packages: gcc-core g++-core make texinfo patch
|
|
||||||
* Run cygwin terminal and execute $ conda build binutils-or1k-linux
|
|
|
@ -1,10 +0,0 @@
|
||||||
FOR /F "tokens=* USEBACKQ" %%F IN (`cygpath -u %PREFIX%`) DO (
|
|
||||||
SET var=%%F
|
|
||||||
)
|
|
||||||
set PREFIX=%var%
|
|
||||||
FOR /F "tokens=* USEBACKQ" %%F IN (`cygpath -u %RECIPE_DIR%`) DO (
|
|
||||||
SET var=%%F
|
|
||||||
)
|
|
||||||
set RECIPE_DIR=%var%
|
|
||||||
sh %RECIPE_DIR%/build.sh
|
|
||||||
if errorlevel 1 exit 1
|
|
|
@ -1,6 +0,0 @@
|
||||||
patch -p1 < $RECIPE_DIR/../../misc/binutils-2.25.1-or1k-R_PCREL-pcrel_offset.patch
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
../configure --target=or1k-linux --prefix=$PREFIX
|
|
||||||
make -j2
|
|
||||||
make install
|
|
|
@ -1,20 +0,0 @@
|
||||||
package:
|
|
||||||
name: binutils-or1k-linux
|
|
||||||
version: 2.25.1
|
|
||||||
|
|
||||||
source:
|
|
||||||
fn: binutils-2.25.1.tar.bz2
|
|
||||||
url: https://ftp.gnu.org/gnu/binutils/binutils-2.25.1.tar.bz2
|
|
||||||
sha256: b5b14added7d78a8d1ca70b5cb75fef57ce2197264f4f5835326b0df22ac9f22
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 0
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- system # [not win]
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: https://www.gnu.org/software/binutils/
|
|
||||||
license: GPL
|
|
||||||
summary: 'A set of programming tools for creating and managing binary programs, object files, libraries, profile data, and assembly source code.'
|
|
|
@ -1,2 +0,0 @@
|
||||||
"%PYTHON%" setup.py install
|
|
||||||
if errorlevel 1 exit 1
|
|
|
@ -1 +0,0 @@
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,33 +0,0 @@
|
||||||
package:
|
|
||||||
name: chardet
|
|
||||||
version: 2.2.1
|
|
||||||
|
|
||||||
source:
|
|
||||||
fn: chardet-2.2.1.tar.gz
|
|
||||||
url: https://pypi.python.org/packages/source/c/chardet/chardet-2.2.1.tar.gz
|
|
||||||
md5: 4a758402eaefd0331bdedc7ecb6f452c
|
|
||||||
|
|
||||||
build:
|
|
||||||
entry_points:
|
|
||||||
- chardetect = chardet.chardetect:main
|
|
||||||
number: 0
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
|
|
||||||
test:
|
|
||||||
# Python imports
|
|
||||||
imports:
|
|
||||||
- chardet
|
|
||||||
|
|
||||||
commands:
|
|
||||||
- chardetect run_test.py
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: https://github.com/chardet/chardet
|
|
||||||
license: GNU Library or Lesser General Public License (LGPL)
|
|
|
@ -1 +0,0 @@
|
||||||
%PYTHON% setup.py install
|
|
|
@ -1 +0,0 @@
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,30 +0,0 @@
|
||||||
package:
|
|
||||||
name: dateutil
|
|
||||||
version: 2.4.2
|
|
||||||
|
|
||||||
source:
|
|
||||||
fn: python-dateutil-2.4.2.tar.gz
|
|
||||||
url: https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.2.tar.gz
|
|
||||||
md5: 4ef68e1c485b09e9f034e10473e5add2
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 0
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
- six >=1.5
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
- six >=1.5
|
|
||||||
|
|
||||||
test:
|
|
||||||
imports:
|
|
||||||
- dateutil
|
|
||||||
- dateutil.zoneinfo
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: https://dateutil.readthedocs.org
|
|
||||||
license: BSD
|
|
||||||
summary: 'Extensions to the standard Python datetime module'
|
|
|
@ -1,5 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
make -C $SRC_DIR/tools flterm
|
|
||||||
mkdir -p $PREFIX/bin
|
|
||||||
cp $SRC_DIR/tools/flterm $PREFIX/bin/
|
|
|
@ -1,12 +0,0 @@
|
||||||
package:
|
|
||||||
name: flterm
|
|
||||||
version: 0
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/m-labs/misoc
|
|
||||||
git_tag: master
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: https://github.com/m-labs/misoc/blob/master/tools/flterm.c
|
|
||||||
license: 3-clause BSD
|
|
||||||
summary: 'Serial terminal to connect to MiSoC uart.'
|
|
|
@ -1,20 +0,0 @@
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
REM Configure step
|
|
||||||
if "%ARCH%"=="32" (
|
|
||||||
set CMAKE_GENERATOR=Visual Studio 12 2013
|
|
||||||
) else (
|
|
||||||
set CMAKE_GENERATOR=Visual Studio 12 2013 Win64
|
|
||||||
)
|
|
||||||
set CMAKE_GENERATOR_TOOLSET=v120_xp
|
|
||||||
cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DSTDCALL=OFF -DCMAKE_PREFIX_PATH=$PREFIX %SRC_DIR%
|
|
||||||
if errorlevel 1 exit 1
|
|
||||||
REM Build step
|
|
||||||
cmake --build .
|
|
||||||
if errorlevel 1 exit 1
|
|
||||||
REM Install step
|
|
||||||
cmake --build . --target install
|
|
||||||
if errorlevel 1 exit 1
|
|
||||||
REM Hack to help pygit2 to find libgit2
|
|
||||||
mkdir %PREFIX%\Scripts
|
|
||||||
copy "%PREFIX%\bin\git2.dll" "%PREFIX%\Scripts\"
|
|
|
@ -1,7 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DCMAKE_PREFIX_PATH=$PREFIX
|
|
||||||
make -j2
|
|
||||||
make install
|
|
|
@ -1,27 +0,0 @@
|
||||||
package:
|
|
||||||
name: libgit2
|
|
||||||
version: 0.22.3
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/libgit2/libgit2
|
|
||||||
git_tag: v0.22.3
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 1
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- system # [linux]
|
|
||||||
- cmake # [linux]
|
|
||||||
- openssl
|
|
||||||
- libssh2
|
|
||||||
- zlib
|
|
||||||
run:
|
|
||||||
- openssl
|
|
||||||
- zlib
|
|
||||||
- libssh2
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: https://libgit2.github.com/
|
|
||||||
license: GPLv2 with a special Linking Exception
|
|
||||||
summary: 'libgit2 is a portable, pure C implementation of the Git core methods provided as a re-entrant linkable library with a solid API, allowing you to write native speed custom Git applications in any language with bindings.'
|
|
|
@ -1,17 +0,0 @@
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
REM Configure step
|
|
||||||
if "%ARCH%"=="32" (
|
|
||||||
set CMAKE_GENERATOR=Visual Studio 12 2013
|
|
||||||
) else (
|
|
||||||
set CMAKE_GENERATOR=Visual Studio 12 2013 Win64
|
|
||||||
)
|
|
||||||
set CMAKE_GENERATOR_TOOLSET=v120_xp
|
|
||||||
cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DOPENSSL_ROOT_DIR=%PREFIX%\Library -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=OFF -DBUILD_EXAMPLES=OFF -DCMAKE_PREFIX_PATH=$PREFIX %SRC_DIR%
|
|
||||||
if errorlevel 1 exit 1
|
|
||||||
REM Build step
|
|
||||||
cmake --build .
|
|
||||||
if errorlevel 1 exit 1
|
|
||||||
REM Install step
|
|
||||||
cmake --build . --target install
|
|
||||||
if errorlevel 1 exit 1
|
|
|
@ -1,7 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DOPENSSL_ROOT_DIR=$PREFIX -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=OFF -DBUILD_EXAMPLES=OFF -DCMAKE_PREFIX_PATH=$PREFIX
|
|
||||||
make -j2
|
|
||||||
make install
|
|
|
@ -1,23 +0,0 @@
|
||||||
package:
|
|
||||||
name: libssh2
|
|
||||||
version: 1.6.0
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/libssh2/libssh2
|
|
||||||
git_tag: libssh2-1.6.0
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 1
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- system # [linux]
|
|
||||||
- cmake # [linux]
|
|
||||||
- openssl
|
|
||||||
run:
|
|
||||||
- openssl
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: http://www.libssh2.org/
|
|
||||||
license: BSD
|
|
||||||
summary: 'libssh2 is a client-side C library implementing the SSH2 protocol'
|
|
|
@ -1,2 +0,0 @@
|
||||||
"%PYTHON%" setup.py install
|
|
||||||
if errorlevel 1 exit 1
|
|
|
@ -1 +0,0 @@
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,27 +0,0 @@
|
||||||
package:
|
|
||||||
name: lit
|
|
||||||
version: 0.4.1
|
|
||||||
|
|
||||||
source:
|
|
||||||
fn: lit-0.4.1.tar.gz
|
|
||||||
url: https://pypi.python.org/packages/source/l/lit/lit-0.4.1.tar.gz
|
|
||||||
md5: ea6f00470e1bf7ed9e4edcff0f650fe6
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 0
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
|
|
||||||
test:
|
|
||||||
commands:
|
|
||||||
- lit --version
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: http://llvm.org/docs/CommandGuide/lit.html
|
|
||||||
license: MIT
|
|
|
@ -1,25 +0,0 @@
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
set BUILD_CONFIG=Release
|
|
||||||
REM Configure step
|
|
||||||
if "%ARCH%"=="32" (
|
|
||||||
set CMAKE_GENERATOR=Visual Studio 12 2013
|
|
||||||
) else (
|
|
||||||
set CMAKE_GENERATOR=Visual Studio 12 2013 Win64
|
|
||||||
)
|
|
||||||
set CMAKE_GENERATOR_TOOLSET=v120_xp
|
|
||||||
@rem Reduce build times and package size by removing unused stuff
|
|
||||||
set CMAKE_CUSTOM=-DLLVM_TARGETS_TO_BUILD="OR1K;X86" -DLLVM_INCLUDE_TESTS=OFF ^
|
|
||||||
-DLLVM_INCLUDE_TOOLS=OFF -DLLVM_INCLUDE_UTILS=OFF ^
|
|
||||||
-DLLVM_INCLUDE_DOCS=OFF -DLLVM_INCLUDE_EXAMPLES=OFF ^
|
|
||||||
-DLLVM_ENABLE_ASSERTIONS=ON
|
|
||||||
cmake -G "%CMAKE_GENERATOR%" -T "%CMAKE_GENERATOR_TOOLSET%" ^
|
|
||||||
-DCMAKE_BUILD_TYPE="%BUILD_CONFIG%" -DCMAKE_PREFIX_PATH=%LIBRARY_PREFIX% ^
|
|
||||||
-DCMAKE_INSTALL_PREFIX:PATH=%LIBRARY_PREFIX% %CMAKE_CUSTOM% %SRC_DIR%
|
|
||||||
if errorlevel 1 exit 1
|
|
||||||
REM Build step
|
|
||||||
cmake --build . --config "%BUILD_CONFIG%"
|
|
||||||
if errorlevel 1 exit 1
|
|
||||||
REM Install step
|
|
||||||
cmake --build . --config "%BUILD_CONFIG%" --target install
|
|
||||||
if errorlevel 1 exit 1
|
|
|
@ -1,10 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
cd tools
|
|
||||||
git clone https://github.com/openrisc/clang-or1k clang
|
|
||||||
cd ..
|
|
||||||
mkdir build
|
|
||||||
cd build
|
|
||||||
cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DLLVM_TARGETS_TO_BUILD="OR1K;X86" -DCMAKE_BUILD_TYPE=Rel -DLLVM_ENABLE_ASSERTIONS=ON
|
|
||||||
make -j2
|
|
||||||
make install
|
|
|
@ -1,22 +0,0 @@
|
||||||
package:
|
|
||||||
name: llvmdev-or1k
|
|
||||||
version: "3.5.0"
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/openrisc/llvm-or1k
|
|
||||||
git_tag: master
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 5
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- system [linux]
|
|
||||||
- cmake [linux]
|
|
||||||
run:
|
|
||||||
- system [linux]
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: http://llvm.org/
|
|
||||||
license: Open Source (http://llvm.org/releases/3.5.0/LICENSE.TXT)
|
|
||||||
summary: Development headers and libraries for LLVM
|
|
|
@ -1,8 +0,0 @@
|
||||||
@rem Let CMake know about the LLVM install path, for find_package()
|
|
||||||
set CMAKE_PREFIX_PATH=%LIBRARY_PREFIX%
|
|
||||||
|
|
||||||
@rem Ensure there are no build leftovers (CMake can complain)
|
|
||||||
if exist ffi\build rmdir /S /Q ffi\build
|
|
||||||
|
|
||||||
%PYTHON% setup.py install
|
|
||||||
if errorlevel 1 exit 1
|
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,27 +0,0 @@
|
||||||
package:
|
|
||||||
name: llvmlite-artiq
|
|
||||||
version: "0.5.1"
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/m-labs/llvmlite
|
|
||||||
git_tag: artiq
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- llvmdev-or1k
|
|
||||||
- setuptools
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 5
|
|
||||||
|
|
||||||
test:
|
|
||||||
imports:
|
|
||||||
- llvmlite_artiq
|
|
||||||
- llvmlite_artiq.binding
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: https://pypi.python.org/pypi/llvmlite/
|
|
||||||
license: BSD
|
|
|
@ -1,8 +0,0 @@
|
||||||
"%PYTHON%" setup.py install
|
|
||||||
if errorlevel 1 exit 1
|
|
||||||
|
|
||||||
:: Add more build steps here, if they are necessary.
|
|
||||||
|
|
||||||
:: See
|
|
||||||
:: http://docs.continuum.io/conda/build.html
|
|
||||||
:: for a list of environment variables that are set during the build process.
|
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,26 +0,0 @@
|
||||||
package:
|
|
||||||
name: prettytable
|
|
||||||
version: !!str 0.7.2
|
|
||||||
|
|
||||||
source:
|
|
||||||
fn: prettytable-0.7.2.tar.bz2
|
|
||||||
url: https://pypi.python.org/packages/source/P/PrettyTable/prettytable-0.7.2.tar.bz2
|
|
||||||
md5: 760dc900590ac3c46736167e09fa463a
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
|
|
||||||
test:
|
|
||||||
imports:
|
|
||||||
- prettytable
|
|
||||||
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: http://code.google.com/p/prettytable/
|
|
||||||
license: BSD License
|
|
||||||
summary: 'A simple Python library for easily displaying tabular data in a visually appealing ASCII table format.'
|
|
|
@ -1,2 +0,0 @@
|
||||||
"%PYTHON%" setup.py build
|
|
||||||
"%PYTHON%" setup.py install
|
|
|
@ -1,2 +0,0 @@
|
||||||
$PYTHON setup.py build
|
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,22 +0,0 @@
|
||||||
package:
|
|
||||||
name: pydaqmx
|
|
||||||
version: "1.3.1"
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/clade/pydaqmx
|
|
||||||
git_tag: master
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 0
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: http://pythonhosted.org/PyDAQmx/
|
|
||||||
license: BSD
|
|
||||||
summary: PyDAQmx allows users to use data acquisition hardware from National Instruments with Python. It provides an interface between the NIDAQmx driver and Python. The package works on Windows and Linux.'
|
|
|
@ -1 +0,0 @@
|
||||||
%PYTHON% setup.py install
|
|
|
@ -1 +0,0 @@
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,26 +0,0 @@
|
||||||
package:
|
|
||||||
name: pyelftools
|
|
||||||
version: 0.23
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/eliben/pyelftools.git
|
|
||||||
git_tag: v0.23
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 0
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
|
|
||||||
test:
|
|
||||||
imports:
|
|
||||||
- elftools
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: https://github.com/eliben/pyelftools.git
|
|
||||||
license: Public domain
|
|
||||||
summary: 'Library for analyzing ELF files and DWARF debugging information'
|
|
|
@ -1,3 +0,0 @@
|
||||||
set LIBGIT2=%PREFIX%
|
|
||||||
set VS100COMNTOOLS=%VS120COMNTOOLS%
|
|
||||||
%PYTHON% setup.py install
|
|
|
@ -1,2 +0,0 @@
|
||||||
export LIBGIT2=$PREFIX
|
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,28 +0,0 @@
|
||||||
package:
|
|
||||||
name: pygit2
|
|
||||||
version: 0.22.1
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/libgit2/pygit2
|
|
||||||
git_tag: v0.22.1
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 1
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- system # [linux]
|
|
||||||
- python
|
|
||||||
- libgit2
|
|
||||||
- cffi >=0.8.1
|
|
||||||
- pkgconfig # [linux]
|
|
||||||
run:
|
|
||||||
- system # [linux]
|
|
||||||
- python
|
|
||||||
- libgit2
|
|
||||||
- cffi >=0.8.1
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: http://www.pygit2.org/
|
|
||||||
license: GPLv2 with a special Linking Exception
|
|
||||||
summary: 'Pygit2 is a set of Python bindings to the libgit2 shared library, libgit2 implements the core of Git.'
|
|
|
@ -1,2 +0,0 @@
|
||||||
"%PYTHON%" setup.py install
|
|
||||||
if errorlevel 1 exit 1
|
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,27 +0,0 @@
|
||||||
package:
|
|
||||||
name: pyqtgraph
|
|
||||||
version: 0.9.10.1036edf
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/pyqtgraph/pyqtgraph.git
|
|
||||||
git_rev: 1036edf
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
- numpy
|
|
||||||
|
|
||||||
run:
|
|
||||||
- python
|
|
||||||
- numpy
|
|
||||||
- pyqt >=4.7
|
|
||||||
|
|
||||||
test:
|
|
||||||
imports:
|
|
||||||
- pyqtgraph
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: http://www.pyqtgraph.org
|
|
||||||
license: MIT License
|
|
||||||
summary: 'Scientific Graphics and GUI Library for Python'
|
|
|
@ -1,2 +0,0 @@
|
||||||
pip install regex
|
|
||||||
%PYTHON% setup.py install
|
|
|
@ -1,2 +0,0 @@
|
||||||
pip install regex
|
|
||||||
$PYTHON setup.py install
|
|
|
@ -1,24 +0,0 @@
|
||||||
package:
|
|
||||||
name: pythonparser
|
|
||||||
version: 0.0
|
|
||||||
|
|
||||||
source:
|
|
||||||
git_url: https://github.com/m-labs/pythonparser
|
|
||||||
git_tag: master
|
|
||||||
|
|
||||||
build:
|
|
||||||
number: 0
|
|
||||||
|
|
||||||
requirements:
|
|
||||||
build:
|
|
||||||
- python
|
|
||||||
- setuptools
|
|
||||||
|
|
||||||
test:
|
|
||||||
imports:
|
|
||||||
- pythonparser
|
|
||||||
|
|
||||||
about:
|
|
||||||
home: http://m-labs.hk/pythonparser/
|
|
||||||
license: BSD
|
|
||||||
summary: 'PythonParser is a Python parser written specifically for use in tooling. It parses source code into an AST that is a superset of Python’s built-in ast module, but returns precise location information for every token.'
|
|
|
@ -1 +0,0 @@
|
||||||
%PYTHON% setup.py install
|
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
$PYTHON setup.py install
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue