From b548d50a2f099bfa56c67bd36e450636290e40fc Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 29 Jul 2015 19:42:43 +0800 Subject: [PATCH 001/191] test/coredevice: use ttl_out for PulseRate (loop is less available) --- artiq/test/coredevice.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/artiq/test/coredevice.py b/artiq/test/coredevice.py index e4d9375ba..7ae1dc336 100644 --- a/artiq/test/coredevice.py +++ b/artiq/test/coredevice.py @@ -76,7 +76,7 @@ class ClockGeneratorLoopback(EnvExperiment): class PulseRate(EnvExperiment): def build(self): self.attr_device("core") - self.attr_device("loop_out") + self.attr_device("ttl_out") def set_pulse_rate(self, pulse_rate): self.set_result("pulse_rate", pulse_rate) @@ -87,7 +87,7 @@ class PulseRate(EnvExperiment): while True: try: for i in range(1000): - self.loop_out.pulse_mu(dt) + self.ttl_out.pulse_mu(dt) delay_mu(dt) except RTIOUnderflow: dt += 1 From 1d34c06d797b9e9995f2d7f4a7765a36520313f6 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 29 Jul 2015 19:43:35 +0800 Subject: [PATCH 002/191] rtio: detect collision errors --- artiq/coredevice/runtime_exceptions.py | 20 +++++++++++-- artiq/gateware/rtio/core.py | 41 +++++++++++++++++++------- artiq/test/coredevice.py | 19 +++++++++++- examples/master/ddb.pyon | 1 + soc/runtime/exceptions.h | 5 ++-- soc/runtime/rtio.c | 22 ++++++++++++++ soc/runtime/rtio.h | 19 +++--------- 7 files changed, 97 insertions(+), 30 deletions(-) diff --git a/artiq/coredevice/runtime_exceptions.py b/artiq/coredevice/runtime_exceptions.py index aa9a2c1c5..e97152e71 100644 --- a/artiq/coredevice/runtime_exceptions.py +++ b/artiq/coredevice/runtime_exceptions.py @@ -41,6 +41,22 @@ class RTIOSequenceError(RuntimeException): return "at {} on channel {}".format(self.p0*self.core.ref_period, self.p1) +class RTIOCollisionError(RuntimeException): + """Raised when an event is submitted on a given channel with the same + coarse timestamp as the previous one but with a different fine timestamp. + + Coarse timestamps correspond to the RTIO system clock (typically around + 125MHz) whereas fine timestamps correspond to the RTIO SERDES clock + (typically around 1GHz). + + The offending event is discarded and the RTIO core keeps operating. + """ + eid = 5 + + def __str__(self): + return "at {} on channel {}".format(self.p0*self.core.ref_period, + self.p1) + class RTIOOverflow(RuntimeException): """Raised when at least one event could not be registered into the RTIO @@ -50,7 +66,7 @@ class RTIOOverflow(RuntimeException): read attempt and discarding some events. Reading can be reattempted after the exception is caught, and events will be partially retrieved. """ - eid = 5 + eid = 6 def __str__(self): return "on channel {}".format(self.p0) @@ -60,7 +76,7 @@ class DDSBatchError(RuntimeException): """Raised when attempting to start a DDS batch while already in a batch, or when too many commands are batched. """ - eid = 6 + eid = 7 exception_map = {e.eid: e for e in globals().values() diff --git a/artiq/gateware/rtio/core.py b/artiq/gateware/rtio/core.py index 08c46188f..7a087c7c3 100644 --- a/artiq/gateware/rtio/core.py +++ b/artiq/gateware/rtio/core.py @@ -100,6 +100,7 @@ class _OutputManager(Module): self.underflow = Signal() # valid 1 cycle after we, pulsed self.sequence_error = Signal() + self.collision_error = Signal() # # # @@ -116,13 +117,24 @@ class _OutputManager(Module): # Special cases replace = Signal() sequence_error = Signal() + collision_error = Signal() + any_error = Signal() nop = Signal() self.sync.rsys += [ - replace.eq(self.ev.timestamp[fine_ts_width:] \ - == buf.timestamp[fine_ts_width:]), - sequence_error.eq(self.ev.timestamp[fine_ts_width:] \ + # Note: replace does not perform any RTLink address checks, + # i.e. a write to a different address will be silently replaced + # as well. + replace.eq(self.ev.timestamp == buf.timestamp), + # Detect sequence errors on coarse timestamps only + # so that they are mutually exclusive with collision errors. + sequence_error.eq(self.ev.timestamp[fine_ts_width:] < buf.timestamp[fine_ts_width:]) ] + if fine_ts_width: + self.sync.rsys += collision_error.eq( + (self.ev.timestamp[fine_ts_width:] == buf.timestamp[fine_ts_width:]) + & (self.ev.timestamp[:fine_ts_width] != buf.timestamp[:fine_ts_width])) + self.comb += any_error.eq(sequence_error | collision_error) if interface.suppress_nop: # disable NOP at reset: do not suppress a first write with all 0s nop_en = Signal(reset=0) @@ -134,11 +146,14 @@ class _OutputManager(Module): if hasattr(self.ev, a)], default=0)), # buf now contains valid data. enable NOP. - If(self.we & ~sequence_error, nop_en.eq(1)), + If(self.we & ~any_error, nop_en.eq(1)), # underflows cancel the write. allow it to be retried. If(self.underflow, nop_en.eq(0)) ] - self.comb += self.sequence_error.eq(self.we & sequence_error) + self.comb += [ + self.sequence_error.eq(self.we & sequence_error), + self.collision_error.eq(self.we & collision_error) + ] # Buffer read and FIFO write self.comb += fifo.din.eq(buf) @@ -156,7 +171,7 @@ class _OutputManager(Module): fifo.we.eq(1) ) ), - If(self.we & ~replace & ~nop & ~sequence_error, + If(self.we & ~replace & ~nop & ~any_error, fifo.we.eq(1) ) ) @@ -165,7 +180,7 @@ class _OutputManager(Module): # Must come after read to handle concurrent read+write properly self.sync.rsys += [ buf_just_written.eq(0), - If(self.we & ~nop & ~sequence_error, + If(self.we & ~nop & ~any_error, buf_just_written.eq(1), buf_pending.eq(1), buf.eq(self.ev) @@ -286,9 +301,10 @@ class _KernelCSRs(AutoCSR): self.o_address = CSRStorage(address_width) self.o_timestamp = CSRStorage(full_ts_width) self.o_we = CSR() - self.o_status = CSRStatus(3) + self.o_status = CSRStatus(4) self.o_underflow_reset = CSR() self.o_sequence_error_reset = CSR() + self.o_collision_error_reset = CSR() if data_width: self.i_data = CSRStatus(data_width) @@ -369,17 +385,22 @@ class RTIO(Module): underflow = Signal() sequence_error = Signal() + collision_error = Signal() self.sync.rsys += [ If(selected & self.kcsrs.o_underflow_reset.re, underflow.eq(0)), If(selected & self.kcsrs.o_sequence_error_reset.re, sequence_error.eq(0)), + If(selected & self.kcsrs.o_collision_error_reset.re, + collision_error.eq(0)), If(o_manager.underflow, underflow.eq(1)), - If(o_manager.sequence_error, sequence_error.eq(1)) + If(o_manager.sequence_error, sequence_error.eq(1)), + If(o_manager.collision_error, collision_error.eq(1)) ] o_statuses.append(Cat(~o_manager.writable, underflow, - sequence_error)) + sequence_error, + collision_error)) if channel.interface.i is not None: i_manager = _InputManager(channel.interface.i, self.counter, diff --git a/artiq/test/coredevice.py b/artiq/test/coredevice.py index 7ae1dc336..530521a60 100644 --- a/artiq/test/coredevice.py +++ b/artiq/test/coredevice.py @@ -155,6 +155,19 @@ class SequenceError(EnvExperiment): self.ttl_out.pulse(25*us) +class CollisionError(EnvExperiment): + def build(self): + self.attr_device("core") + self.attr_device("ttl_out_serdes") + + @kernel + def run(self): + delay(5*ms) # make sure we won't get underflow + for i in range(16): + self.ttl_out_serdes.pulse_mu(1) + delay_mu(1) + + class TimeKeepsRunning(EnvExperiment): def build(self): self.attr_device("core") @@ -211,7 +224,7 @@ class CoredeviceTest(ExperimentCase): def test_loopback_count(self): npulses = 2 - r = self.execute(LoopbackCount, npulses=npulses) + self.execute(LoopbackCount, npulses=npulses) count = self.rdb.get("count") self.assertEqual(count, npulses) @@ -223,6 +236,10 @@ class CoredeviceTest(ExperimentCase): with self.assertRaises(runtime_exceptions.RTIOSequenceError): self.execute(SequenceError) + def test_collision_error(self): + with self.assertRaises(runtime_exceptions.RTIOCollisionError): + self.execute(CollisionError) + def test_watchdog(self): # watchdog only works on the device with self.assertRaises(IOError): diff --git a/examples/master/ddb.pyon b/examples/master/ddb.pyon index a1ce5680a..3098391bd 100644 --- a/examples/master/ddb.pyon +++ b/examples/master/ddb.pyon @@ -135,6 +135,7 @@ "ttl_inout": "pmt0", "ttl_out": "ttl0", + "ttl_out_serdes": "ttl0", "pmt": "pmt0", "bd_dds": "dds0", diff --git a/soc/runtime/exceptions.h b/soc/runtime/exceptions.h index 1c820c060..ae97b9d95 100644 --- a/soc/runtime/exceptions.h +++ b/soc/runtime/exceptions.h @@ -7,8 +7,9 @@ enum { EID_RPC_EXCEPTION = 2, EID_RTIO_UNDERFLOW = 3, EID_RTIO_SEQUENCE_ERROR = 4, - EID_RTIO_OVERFLOW = 5, - EID_DDS_BATCH_ERROR = 6, + EID_RTIO_COLLISION_ERROR = 5, + EID_RTIO_OVERFLOW = 6, + EID_DDS_BATCH_ERROR = 7 }; int exception_setjmp(void *jb) __attribute__((returns_twice)); diff --git a/soc/runtime/rtio.c b/soc/runtime/rtio.c index 3329019ba..004c71a86 100644 --- a/soc/runtime/rtio.c +++ b/soc/runtime/rtio.c @@ -1,5 +1,6 @@ #include +#include "exceptions.h" #include "rtio.h" void rtio_init(void) @@ -14,3 +15,24 @@ long long int rtio_get_counter(void) rtio_counter_update_write(1); return rtio_counter_read(); } + +void rtio_process_exceptional_status(int status, long long int timestamp, int channel) +{ + if(status & RTIO_O_STATUS_FULL) + while(rtio_o_status_read() & RTIO_O_STATUS_FULL); + if(status & RTIO_O_STATUS_UNDERFLOW) { + rtio_o_underflow_reset_write(1); + exception_raise_params(EID_RTIO_UNDERFLOW, + timestamp, channel, rtio_get_counter()); + } + if(status & RTIO_O_STATUS_SEQUENCE_ERROR) { + rtio_o_sequence_error_reset_write(1); + exception_raise_params(EID_RTIO_SEQUENCE_ERROR, + timestamp, channel, 0); + } + if(status & RTIO_O_STATUS_COLLISION_ERROR) { + rtio_o_collision_error_reset_write(1); + exception_raise_params(EID_RTIO_COLLISION_ERROR, + timestamp, channel, 0); + } +} diff --git a/soc/runtime/rtio.h b/soc/runtime/rtio.h index be1591554..566f18ead 100644 --- a/soc/runtime/rtio.h +++ b/soc/runtime/rtio.h @@ -2,16 +2,17 @@ #define __RTIO_H #include -#include "exceptions.h" #define RTIO_O_STATUS_FULL 1 #define RTIO_O_STATUS_UNDERFLOW 2 #define RTIO_O_STATUS_SEQUENCE_ERROR 4 +#define RTIO_O_STATUS_COLLISION_ERROR 8 #define RTIO_I_STATUS_EMPTY 1 #define RTIO_I_STATUS_OVERFLOW 2 void rtio_init(void); long long int rtio_get_counter(void); +void rtio_process_exceptional_status(int status, long long int timestamp, int channel); static inline void rtio_write_and_process_status(long long int timestamp, int channel) { @@ -19,20 +20,8 @@ static inline void rtio_write_and_process_status(long long int timestamp, int ch rtio_o_we_write(1); status = rtio_o_status_read(); - if(status) { - if(status & RTIO_O_STATUS_FULL) - while(rtio_o_status_read() & RTIO_O_STATUS_FULL); - if(status & RTIO_O_STATUS_UNDERFLOW) { - rtio_o_underflow_reset_write(1); - exception_raise_params(EID_RTIO_UNDERFLOW, - timestamp, channel, rtio_get_counter()); - } - if(status & RTIO_O_STATUS_SEQUENCE_ERROR) { - rtio_o_sequence_error_reset_write(1); - exception_raise_params(EID_RTIO_SEQUENCE_ERROR, - timestamp, channel, 0); - } - } + if(status) + rtio_process_exceptional_status(status, timestamp, channel); } #endif /* __RTIO_H */ From a8c13cb7de6bd63adb83a98d678edc10d6bd30e7 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 29 Jul 2015 23:28:34 +0800 Subject: [PATCH 003/191] gui: fix NumberEntry min/max --- artiq/gui/explorer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index 6e9a81334..5954b87d5 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -69,7 +69,7 @@ class _NumberEntry(QtGui.QDoubleSpinBox): if procdesc["min"] is not None: self.setMinimum(procdesc["min"]) if procdesc["max"] is not None: - self.setMinimum(procdesc["max"]) + self.setMaximum(procdesc["max"]) if procdesc["unit"]: self.setSuffix(" " + procdesc["unit"]) if "default" in procdesc: From 86fef7b53b3b198a50f0dc8ae0f10e5c2c4fb52e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 29 Jul 2015 23:29:07 +0800 Subject: [PATCH 004/191] master: do not scan experiments starting with '_' --- artiq/master/worker_impl.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/artiq/master/worker_impl.py b/artiq/master/worker_impl.py index 77ff1349c..c0e10fe45 100644 --- a/artiq/master/worker_impl.py +++ b/artiq/master/worker_impl.py @@ -138,6 +138,8 @@ class DummyPDB: def examine(dmgr, pdb, rdb, file): module = file_import(file) for class_name, exp_class in module.__dict__.items(): + if class_name[0] == "_": + continue if is_experiment(exp_class): if exp_class.__doc__ is None: name = class_name From 1ddb19277f0187d65ee9cbcaa3bb4268ecc98ea0 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 29 Jul 2015 23:29:26 +0800 Subject: [PATCH 005/191] add speed benchmark --- examples/master/repository/speed_benchmark.py | 142 ++++++++++++++++++ 1 file changed, 142 insertions(+) create mode 100644 examples/master/repository/speed_benchmark.py diff --git a/examples/master/repository/speed_benchmark.py b/examples/master/repository/speed_benchmark.py new file mode 100644 index 000000000..6c188f544 --- /dev/null +++ b/examples/master/repository/speed_benchmark.py @@ -0,0 +1,142 @@ +import time + +from artiq import * + + +class _PayloadNOP(EnvExperiment): + def build(self): + pass + + def run(self): + pass + + +class _PayloadCoreNOP(EnvExperiment): + def build(self): + self.attr_device("core") + + @kernel + def run(self): + pass + + +class _PayloadCoreSend100Ints(EnvExperiment): + def build(self): + self.attr_device("core") + + def devnull(self, d): + pass + + @kernel + def run(self): + for i in range(100): + self.devnull(42) + + +class _PayloadCoreSend1MB(EnvExperiment): + def build(self): + self.attr_device("core") + + def devnull(self, d): + pass + + @kernel + def run(self): + # FIXME: make this a single list + # affected by issue #82 at the moment + data = [0 for _ in range(20000//4)] + for i in range(50): + self.devnull(data) + + +class _PayloadCorePrimes(EnvExperiment): + def build(self): + self.attr_device("core") + + def devnull(self, d): + pass + + @kernel + def run(self): + for x in range(1, 1000): + d = 2 + prime = True + while d*d <= x: + if x % d == 0: + prime = False + break + d += 1 + if prime: + self.devnull(x) + + +class SpeedBenchmark(EnvExperiment): + """Speed benchmark""" + def build(self): + self.attr_argument("mode", EnumerationValue(["Single experiment", + "With pause", + "With scheduler"])) + self.attr_argument("payload", EnumerationValue(["NOP", + "CoreNOP", + "CoreSend100Ints", + "CoreSend1MB", + "CorePrimes"])) + self.attr_argument("nruns", NumberValue(10, min=1, max=1000)) + self.attr_device("core") + self.attr_device("scheduler") + + def run_with_scheduler(self): + nruns = int(self.nruns) + + donop_expid = dict(self.scheduler.expid) + donop_expid["class_name"] = "_Payload" + self.payload + donop_expid["arguments"] = {} + for i in range(nruns): + self.scheduler.submit(self.scheduler.pipeline_name, donop_expid, + self.scheduler.priority, None, False) + + report_expid = dict(self.scheduler.expid) + report_expid["class_name"] = "_Report" + report_expid["arguments"] = { + "start_time": time.monotonic(), + "nruns": nruns} + self.scheduler.submit(self.scheduler.pipeline_name, report_expid, + self.scheduler.priority, None, False) + + def run_without_scheduler(self, pause): + payload = globals()["_Payload" + self.payload](*self.dbs()) + + start_time = time.monotonic() + for i in range(int(self.nruns)): + payload.run() + if pause: + self.core.comm.close() + self.scheduler.pause() + end_time = time.monotonic() + + self.set_result("benchmark_run_time", + (end_time-start_time)/self.nruns, + realtime=True) + + + def run(self): + if self.mode == "Single experiment": + self.run_without_scheduler(False) + elif self.mode == "With pause": + self.run_without_scheduler(True) + elif self.mode == "With scheduler": + self.run_with_scheduler() + else: + raise ValueError + + +class _Report(EnvExperiment): + def build(self): + self.attr_argument("start_time") + self.attr_argument("nruns") + + def run(self): + end_time = time.monotonic() + self.set_result("benchmark_run_time", + (end_time-self.start_time)/self.nruns, + realtime=True) From 6b98f867de148c538fe82af55184803dc845dce8 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 29 Jul 2015 23:32:33 +0800 Subject: [PATCH 006/191] import DDS phase modes at the top level --- artiq/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/artiq/__init__.py b/artiq/__init__.py index ae6a8f96b..1a98f31af 100644 --- a/artiq/__init__.py +++ b/artiq/__init__.py @@ -1,5 +1,9 @@ from artiq import language from artiq.language import * +from artiq.coredevice.dds import (PHASE_MODE_CONTINUOUS, PHASE_MODE_ABSOLUTE, + PHASE_MODE_TRACKING) __all__ = [] __all__.extend(language.__all__) +__all__ += ["PHASE_MODE_CONTINUOUS", "PHASE_MODE_ABSOLUTE", + "PHASE_MODE_TRACKING"] From 55708e86787c8e058da5cac72667c437fcf764d3 Mon Sep 17 00:00:00 2001 From: Robert Jordens Date: Wed, 29 Jul 2015 11:45:15 -0600 Subject: [PATCH 007/191] pipistrello: drop bitgen_opt change (done upstream) --- soc/targets/artiq_pipistrello.py | 1 - 1 file changed, 1 deletion(-) diff --git a/soc/targets/artiq_pipistrello.py b/soc/targets/artiq_pipistrello.py index 7a16cdcfe..2e0f682e4 100644 --- a/soc/targets/artiq_pipistrello.py +++ b/soc/targets/artiq_pipistrello.py @@ -113,7 +113,6 @@ class NIST_QC1(BaseSoC, AMPSoC): sdram_controller_settings=MiniconSettings(l2_size=64*1024), with_timer=False, **kwargs) AMPSoC.__init__(self) - platform.toolchain.bitgen_opt = "-g Binary:Yes -w" platform.toolchain.ise_commands += """ trce -v 12 -fastpaths -tsi {build_name}.tsi -o {build_name}.twr {build_name}.ncd {build_name}.pcf """ From 4df2001874a3ae74d0c3ec2331fbf96a5dd2c325 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 13:50:35 +0800 Subject: [PATCH 008/191] travis: try to use the new anaconda-client --- .travis.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2469dedc2..7f3498cb7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,7 @@ before_install: - . ./.travis/get-toolchain.sh - . ./.travis/get-anaconda.sh - source $HOME/miniconda/bin/activate py34 - - conda install -q pip coverage binstar migen cython + - conda install -q pip coverage anaconda-client migen cython - pip install coveralls install: - conda build conda/artiq @@ -22,9 +22,9 @@ script: - coverage run --source=artiq setup.py test - make -C doc/manual html after_success: - - binstar -q login --hostname $(hostname) --username $binstar_login --password $binstar_password - - binstar -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2 - - binstar -q logout + - anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password + - anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2 + - anaconda -q logout - coveralls notifications: email: From f3c38005d3c43847d75dc4debc93f90f764af01e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 13:58:28 +0800 Subject: [PATCH 009/191] i hate travis-ci --- conda/artiq/build.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index c3b7694f0..4a90450a3 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -18,6 +18,12 @@ mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello # build for KC705 +echo $PYTHONPATH +echo $PREFIX/lib/python3.4/site-packages +ls $PREFIX/lib/python3.4/site-packages +ls $PREFIX/lib/python3.4/site-packages/migen +ls $PREFIX/lib/python3.4/site-packages/mibuild + cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd - make -C soc/runtime clean runtime.fbi cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd - From 53f55a75028384672fdcaffb2c5953fc3bd94f49 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 14:01:39 +0800 Subject: [PATCH 010/191] try to workaround travis problem --- conda/artiq/build.sh | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index 4a90450a3..373135c22 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -18,11 +18,7 @@ mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello # build for KC705 -echo $PYTHONPATH -echo $PREFIX/lib/python3.4/site-packages -ls $PREFIX/lib/python3.4/site-packages -ls $PREFIX/lib/python3.4/site-packages/migen -ls $PREFIX/lib/python3.4/site-packages/mibuild +PYTHONPATH=$PREFIX/lib/python3.4/site-packages cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd - make -C soc/runtime clean runtime.fbi From 36d92c72dfb945e1c59e4b2b6f97aaea0aa309fa Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 14:10:14 +0800 Subject: [PATCH 011/191] travis: try export --- conda/artiq/build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index 373135c22..e242eeba9 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -18,7 +18,7 @@ mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello # build for KC705 -PYTHONPATH=$PREFIX/lib/python3.4/site-packages +export PYTHONPATH=$PREFIX/lib/python3.4/site-packages cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd - make -C soc/runtime clean runtime.fbi From feb2c4d0c4b3baf492675314b031d3522967abec Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 14:52:15 +0800 Subject: [PATCH 012/191] more travis debugging --- conda/artiq/build.sh | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index e242eeba9..becb18d2c 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -16,9 +16,14 @@ BIN_PREFIX=$ARTIQ_PREFIX/binaries/ mkdir -p $ARTIQ_PREFIX/misc mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello -# build for KC705 - +# workarounds export PYTHONPATH=$PREFIX/lib/python3.4/site-packages +echo $PATH +echo $PREFIX/lib/python3.4/site-packages +which python +# + +# build for KC705 cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd - make -C soc/runtime clean runtime.fbi From a118d03ac6357af34fc90492f073b4db5c04267d Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 14:57:26 +0800 Subject: [PATCH 013/191] even more travis debugging --- conda/artiq/build.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index becb18d2c..607d77fe6 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -20,7 +20,9 @@ mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello export PYTHONPATH=$PREFIX/lib/python3.4/site-packages echo $PATH echo $PREFIX/lib/python3.4/site-packages +echo $PYTHON which python +which $PYTHON # # build for KC705 From b4e1d1b07430fbe9b090aee0c4ef755960b65076 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 15:02:54 +0800 Subject: [PATCH 014/191] conda/artiq: use $PYTHON --- conda/artiq/build.sh | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index 607d77fe6..b818cdf70 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -16,20 +16,11 @@ BIN_PREFIX=$ARTIQ_PREFIX/binaries/ mkdir -p $ARTIQ_PREFIX/misc mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello -# workarounds -export PYTHONPATH=$PREFIX/lib/python3.4/site-packages -echo $PATH -echo $PREFIX/lib/python3.4/site-packages -echo $PYTHON -which python -which $PYTHON -# - # build for KC705 -cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd - +cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd - make -C soc/runtime clean runtime.fbi -cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd - +cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd - # install KC705 binaries @@ -41,9 +32,9 @@ mv bscan_spi_kc705.bit $BIN_PREFIX/kc705/ # build for Pipistrello -cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_pipistrello build-headers build-bios; cd - +cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello build-headers build-bios; cd - make -C soc/runtime clean runtime.fbi -cd $SRC_DIR/misoc; python make.py -X ../soc -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream; cd - +cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_pipistrello $MISOC_EXTRA_ISE_CMDLINE build-bitstream; cd - # install Pipistrello binaries From d02d40871e875a182eb7f61233b8aa626c3c8e1f Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 18:15:16 +0800 Subject: [PATCH 015/191] runtime: update lwip --- soc/runtime/lwip | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/soc/runtime/lwip b/soc/runtime/lwip index d3217718a..e448a9a4a 160000 --- a/soc/runtime/lwip +++ b/soc/runtime/lwip @@ -1 +1 @@ -Subproject commit d3217718a904ffd3b6f6cd12d14d526813d46d7a +Subproject commit e448a9a4a976a6bc3091e3f22309ae3e6c089351 From 8d1663394be0e602ac7ebb6fcd62e69d4982cf5f Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 18:16:02 +0800 Subject: [PATCH 016/191] runtime: increase lwip TCP_SND_QUEUELEN (closes #82) --- soc/runtime/liblwip/lwipopts.h | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/soc/runtime/liblwip/lwipopts.h b/soc/runtime/liblwip/lwipopts.h index eff5a4aae..bca6722a3 100644 --- a/soc/runtime/liblwip/lwipopts.h +++ b/soc/runtime/liblwip/lwipopts.h @@ -117,9 +117,8 @@ a lot of data that needs to be copied, this should be set high. */ /* TCP sender buffer space (bytes). */ #define TCP_SND_BUF 32 * 1024 -/* TCP sender buffer space (pbufs). This must be at least = 2 * - TCP_SND_BUF/TCP_MSS for things to work. */ -#define TCP_SND_QUEUELEN 2 * TCP_SND_BUF/TCP_MSS +/* TCP sender buffer space (pbufs). */ +#define TCP_SND_QUEUELEN 3 * TCP_SND_BUF/TCP_MSS /* TCP receive window. */ #define TCP_WND 16 * 1024 From 33531c2f3bcb24a853e9d9ce6b94c3f6cd14176f Mon Sep 17 00:00:00 2001 From: whitequark Date: Thu, 30 Jul 2015 13:45:57 +0300 Subject: [PATCH 017/191] =?UTF-8?q?Rename=20{kserver=20=E2=86=92=20net=5Fs?= =?UTF-8?q?erver}.{c,h}.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- soc/runtime/Makefile | 2 +- soc/runtime/kserver.h | 7 --- soc/runtime/main.c | 6 +-- soc/runtime/{kserver.c => net_server.c} | 66 ++++++++++++------------- soc/runtime/net_server.h | 7 +++ 5 files changed, 44 insertions(+), 44 deletions(-) delete mode 100644 soc/runtime/kserver.h rename soc/runtime/{kserver.c => net_server.c} (68%) create mode 100644 soc/runtime/net_server.h diff --git a/soc/runtime/Makefile b/soc/runtime/Makefile index 673235928..36fb42b2b 100644 --- a/soc/runtime/Makefile +++ b/soc/runtime/Makefile @@ -1,6 +1,6 @@ include $(MSCDIR)/software/common.mak -OBJECTS := isr.o flash_storage.o clock.o rtiocrg.o elf_loader.o services.o session.o log.o test_mode.o kloader.o bridge_ctl.o mailbox.o ksupport_data.o kserver.o moninj.o main.o +OBJECTS := isr.o flash_storage.o clock.o rtiocrg.o elf_loader.o services.o session.o log.o test_mode.o kloader.o bridge_ctl.o mailbox.o ksupport_data.o net_server.o moninj.o main.o OBJECTS_KSUPPORT := ksupport.o exception_jmp.o exceptions.o mailbox.o bridge.o rtio.o ttl.o dds.o CFLAGS += -Ilwip/src/include -Iliblwip diff --git a/soc/runtime/kserver.h b/soc/runtime/kserver.h deleted file mode 100644 index 28eb79ecc..000000000 --- a/soc/runtime/kserver.h +++ /dev/null @@ -1,7 +0,0 @@ -#ifndef __KSERVER_H -#define __KSERVER_H - -void kserver_init(void); -void kserver_service(void); - -#endif /* __KSERVER_H */ diff --git a/soc/runtime/main.c b/soc/runtime/main.c index 74df38aea..bdd9d4b8c 100644 --- a/soc/runtime/main.c +++ b/soc/runtime/main.c @@ -26,7 +26,7 @@ #include "clock.h" #include "rtiocrg.h" #include "test_mode.h" -#include "kserver.h" +#include "net_server.h" #include "session.h" #include "moninj.h" @@ -138,14 +138,14 @@ static void regular_main(void) { puts("Accepting sessions on Ethernet."); network_init(); - kserver_init(); + net_server_init(); moninj_init(); session_end(); while(1) { lwip_service(); kloader_service_essential_kmsg(); - kserver_service(); + net_server_service(); } } diff --git a/soc/runtime/kserver.c b/soc/runtime/net_server.c similarity index 68% rename from soc/runtime/kserver.c rename to soc/runtime/net_server.c index 4d75da8bc..a28b396bd 100644 --- a/soc/runtime/kserver.c +++ b/soc/runtime/net_server.c @@ -14,19 +14,19 @@ #include #include "session.h" -#include "kserver.h" +#include "net_server.h" -struct kserver_connstate { +struct net_server_connstate { int magic_recognized; struct pbuf *rp; int rp_offset; }; -static struct kserver_connstate *cs_new(void) +static struct net_server_connstate *cs_new(void) { - struct kserver_connstate *cs; + struct net_server_connstate *cs; - cs = (struct kserver_connstate *)mem_malloc(sizeof(struct kserver_connstate)); + cs = (struct net_server_connstate *)mem_malloc(sizeof(struct net_server_connstate)); if(!cs) return NULL; cs->magic_recognized = 0; @@ -35,24 +35,24 @@ static struct kserver_connstate *cs_new(void) return cs; } -static void cs_free(struct kserver_connstate *cs) +static void cs_free(struct net_server_connstate *cs) { if(cs->rp) pbuf_free(cs->rp); mem_free(cs); } -static const char kserver_magic[] = "ARTIQ coredev\n"; +static const char net_server_magic[] = "ARTIQ coredev\n"; -static int magic_ok(struct kserver_connstate *cs) +static int magic_ok(struct net_server_connstate *cs) { return cs->magic_recognized >= 14; } -static struct kserver_connstate *active_cs; +static struct net_server_connstate *active_cs; static struct tcp_pcb *active_pcb; -static void kserver_close(struct kserver_connstate *cs, struct tcp_pcb *pcb) +static void net_server_close(struct net_server_connstate *cs, struct tcp_pcb *pcb) { if(cs == active_cs) { session_end(); @@ -70,11 +70,11 @@ static void kserver_close(struct kserver_connstate *cs, struct tcp_pcb *pcb) tcp_close(pcb); } -static err_t kserver_recv(void *arg, struct tcp_pcb *pcb, struct pbuf *p, err_t err) +static err_t net_server_recv(void *arg, struct tcp_pcb *pcb, struct pbuf *p, err_t err) { - struct kserver_connstate *cs; + struct net_server_connstate *cs; - cs = (struct kserver_connstate *)arg; + cs = (struct net_server_connstate *)arg; if(p) { if(cs->rp) pbuf_cat(cs->rp, p); @@ -83,11 +83,11 @@ static err_t kserver_recv(void *arg, struct tcp_pcb *pcb, struct pbuf *p, err_t cs->rp_offset = 0; } } else - kserver_close(cs, pcb); + net_server_close(cs, pcb); return ERR_OK; } -static err_t kserver_sent(void *arg, struct tcp_pcb *pcb, u16_t len) +static err_t net_server_sent(void *arg, struct tcp_pcb *pcb, u16_t len) { session_ack_mem(len); return ERR_OK; @@ -95,13 +95,13 @@ static err_t kserver_sent(void *arg, struct tcp_pcb *pcb, u16_t len) static void tcp_pcb_service(void *arg, struct tcp_pcb *pcb) { - struct kserver_connstate *cs; + struct net_server_connstate *cs; int remaining_in_pbuf; char *rpp; struct pbuf *next; int r; - cs = (struct kserver_connstate *)arg; + cs = (struct net_server_connstate *)arg; while(cs->rp) { remaining_in_pbuf = cs->rp->len - cs->rp_offset; @@ -116,20 +116,20 @@ static void tcp_pcb_service(void *arg, struct tcp_pcb *pcb) } else if(r == 0) return; else - kserver_close(cs, pcb); + net_server_close(cs, pcb); } else { - if(rpp[cs->rp_offset] == kserver_magic[cs->magic_recognized]) { + if(rpp[cs->rp_offset] == net_server_magic[cs->magic_recognized]) { cs->magic_recognized++; if(magic_ok(cs)) { if(active_cs) - kserver_close(active_cs, active_pcb); + net_server_close(active_cs, active_pcb); session_start(); active_cs = cs; active_pcb = pcb; - tcp_sent(pcb, kserver_sent); + tcp_sent(pcb, net_server_sent); } } else { - kserver_close(cs, pcb); + net_server_close(cs, pcb); return; } remaining_in_pbuf--; @@ -150,41 +150,41 @@ static void tcp_pcb_service(void *arg, struct tcp_pcb *pcb) } } -static void kserver_err(void *arg, err_t err) +static void net_server_err(void *arg, err_t err) { - struct kserver_connstate *cs; + struct net_server_connstate *cs; - cs = (struct kserver_connstate *)arg; + cs = (struct net_server_connstate *)arg; cs_free(cs); } static struct tcp_pcb *listen_pcb; -static err_t kserver_accept(void *arg, struct tcp_pcb *newpcb, err_t err) +static err_t net_server_accept(void *arg, struct tcp_pcb *newpcb, err_t err) { - struct kserver_connstate *cs; + struct net_server_connstate *cs; cs = cs_new(); if(!cs) return ERR_MEM; tcp_accepted(listen_pcb); tcp_arg(newpcb, cs); - tcp_recv(newpcb, kserver_recv); - tcp_err(newpcb, kserver_err); + tcp_recv(newpcb, net_server_recv); + tcp_err(newpcb, net_server_err); return ERR_OK; } -void kserver_init(void) +void net_server_init(void) { listen_pcb = tcp_new(); tcp_bind(listen_pcb, IP_ADDR_ANY, 1381); listen_pcb = tcp_listen(listen_pcb); - tcp_accept(listen_pcb, kserver_accept); + tcp_accept(listen_pcb, net_server_accept); } extern struct tcp_pcb *tcp_active_pcbs; -void kserver_service(void) +void net_server_service(void) { struct tcp_pcb *pcb; void *data; @@ -208,7 +208,7 @@ void kserver_service(void) session_ack_data(len); } if(len < 0) - kserver_close(active_cs, active_pcb); + net_server_close(active_cs, active_pcb); } } diff --git a/soc/runtime/net_server.h b/soc/runtime/net_server.h new file mode 100644 index 000000000..274f79fe8 --- /dev/null +++ b/soc/runtime/net_server.h @@ -0,0 +1,7 @@ +#ifndef __NET_SERVER_H +#define __NET_SERVER_H + +void net_server_init(void); +void net_server_service(void); + +#endif /* __NET_SERVER_H */ From 89343ae276a3dd3fece96655940d63fb5b47ebd5 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 18:23:11 +0800 Subject: [PATCH 018/191] examples/speed_benchmark: send 1MB in one RPC --- examples/master/repository/speed_benchmark.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/examples/master/repository/speed_benchmark.py b/examples/master/repository/speed_benchmark.py index 6c188f544..d2084d243 100644 --- a/examples/master/repository/speed_benchmark.py +++ b/examples/master/repository/speed_benchmark.py @@ -42,11 +42,8 @@ class _PayloadCoreSend1MB(EnvExperiment): @kernel def run(self): - # FIXME: make this a single list - # affected by issue #82 at the moment - data = [0 for _ in range(20000//4)] - for i in range(50): - self.devnull(data) + data = [0 for _ in range(1000000//4)] + self.devnull(data) class _PayloadCorePrimes(EnvExperiment): From 9b0ed344ed0bd4837249a2839a4c1e07e486ceb6 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 31 Jul 2015 19:29:34 +0800 Subject: [PATCH 019/191] runtime/Makefile: WA for more pesky travis/miniconda misbehavior --- soc/runtime/Makefile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/soc/runtime/Makefile b/soc/runtime/Makefile index 36fb42b2b..bbfa9ef8a 100644 --- a/soc/runtime/Makefile +++ b/soc/runtime/Makefile @@ -1,5 +1,7 @@ include $(MSCDIR)/software/common.mak +PYTHON ?= python3 + OBJECTS := isr.o flash_storage.o clock.o rtiocrg.o elf_loader.o services.o session.o log.o test_mode.o kloader.o bridge_ctl.o mailbox.o ksupport_data.o net_server.o moninj.o main.o OBJECTS_KSUPPORT := ksupport.o exception_jmp.o exceptions.o mailbox.o bridge.o rtio.o ttl.o dds.o @@ -43,7 +45,7 @@ ksupport_data.o: ksupport.bin $(LD) -r -b binary -o $@ $< service_table.h: ksupport.elf gen_service_table.py - @echo " GEN " $@ && ./gen_service_table.py ksupport.elf > $@ + @echo " GEN " $@ && $(PYTHON) gen_service_table.py ksupport.elf > $@ services.c: service_table.h From 4a7a4acf074fd129abfa10fdaeb6ccf2d65f46b9 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 1 Aug 2015 00:16:26 +0800 Subject: [PATCH 020/191] conda: remove unnecessary pixman package --- conda/pixman/build.sh | 4 ---- conda/pixman/meta.yaml | 16 ---------------- 2 files changed, 20 deletions(-) delete mode 100644 conda/pixman/build.sh delete mode 100644 conda/pixman/meta.yaml diff --git a/conda/pixman/build.sh b/conda/pixman/build.sh deleted file mode 100644 index 06a641d4f..000000000 --- a/conda/pixman/build.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -./configure --prefix=$PREFIX -make -j -make install diff --git a/conda/pixman/meta.yaml b/conda/pixman/meta.yaml deleted file mode 100644 index df1ccd8f3..000000000 --- a/conda/pixman/meta.yaml +++ /dev/null @@ -1,16 +0,0 @@ -# This conda recipe comes from https://github.com/sandialabs/pixman-conda-recipe - -package: - name: pixman - version: "0.32.6" - -source: - fn: pixman-0.32.6.tar.gz - url: http://cairographics.org/releases/pixman-0.32.6.tar.gz - -build: - number: 0 - -about: - home: http://cairographics.org - license: GNU Lesser General Public License (LGPL) version 2.1 or the Mozilla Public License (MPL) version 1.1 at your option. From 9e24b56099612b2be36964e56441a77aa2d59572 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 1 Aug 2015 16:48:27 +0800 Subject: [PATCH 021/191] gui: add state manager --- artiq/gui/state.py | 63 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 artiq/gui/state.py diff --git a/artiq/gui/state.py b/artiq/gui/state.py new file mode 100644 index 000000000..9c5e7b8bb --- /dev/null +++ b/artiq/gui/state.py @@ -0,0 +1,63 @@ +import asyncio +from collections import OrderedDict + +from artiq.tools import TaskObject +from artiq.protocols import pyon + + +# support Qt CamelCase naming scheme for save/restore state +def _save_state(obj): + method = getattr(obj, "save_state", None) + if method is None: + method = obj.saveState + return method() + + +def _restore_state(obj, state): + method = getattr(obj, "restore_state", None) + if method is None: + method = obj.restoreState + method(state) + + +class StateManager(TaskObject): + def __init__(self, filename, autosave_period=30): + self.filename = filename + self.autosave_period = autosave_period + self.stateful_objects = OrderedDict() + + def register(self, obj, name=None): + if name is None: + name = obj.__class__.__name__ + if name in self.stateful_objects: + raise RuntimeError("Name '{}' already exists in state" + .format(name)) + self.stateful_objects[name] = obj + + def load(self): + try: + data = pyon.load_file(self.filename) + except FileNotFoundError: + return + # The state of one object may depend on the state of another, + # e.g. the display state may create docks that are referenced in + # the area state. + # To help address this problem, state is restored in the opposite + # order as the stateful objects are registered. + for name, obj in reversed(list(self.stateful_objects.items())): + state = data.get(name, None) + if state is not None: + _restore_state(obj, state) + + def save(self): + data = {k: _save_state(v) for k, v in self.stateful_objects.items()} + pyon.store_file(self.filename, data) + + @asyncio.coroutine + def _do(self): + try: + while True: + yield from asyncio.sleep(self.autosave_period) + self.save() + finally: + self.save() From 00cae1c23a0c28784ae3ebafb59910ddc91e088b Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 1 Aug 2015 16:48:44 +0800 Subject: [PATCH 022/191] gui: save dock area state --- artiq/frontend/artiq_gui.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py index fb6700377..35f444fa9 100755 --- a/artiq/frontend/artiq_gui.py +++ b/artiq/frontend/artiq_gui.py @@ -10,8 +10,8 @@ import os from quamash import QEventLoop, QtGui from pyqtgraph import dockarea -from artiq.protocols.file_db import FlatFileDB from artiq.protocols.pc_rpc import AsyncioClient +from artiq.gui.state import StateManager from artiq.gui.explorer import ExplorerDock from artiq.gui.moninj import MonInj from artiq.gui.results import ResultsDock @@ -53,16 +53,16 @@ class _MainWindow(QtGui.QMainWindow): def closeEvent(self, *args): self.exit_request.set() + def main(): - args = get_argparser().parse_args() - - db = FlatFileDB(args.db_file, default_data=dict()) - app = QtGui.QApplication([]) loop = QEventLoop(app) asyncio.set_event_loop(loop) atexit.register(lambda: loop.close()) + args = get_argparser().parse_args() + smgr = StateManager(args.db_file) + schedule_ctl = AsyncioClient() loop.run_until_complete(schedule_ctl.connect_rpc( args.server, args.port_control, "master_schedule")) @@ -70,6 +70,7 @@ def main(): win = _MainWindow(app) area = dockarea.DockArea() + smgr.register(area) win.setCentralWidget(area) status_bar = QtGui.QStatusBar() status_bar.showMessage("Connected to {}".format(args.server)) @@ -125,6 +126,9 @@ def main(): area.addDock(d_log, "above", d_console) area.addDock(d_schedule, "above", d_log) + smgr.load() + smgr.start() + atexit.register(lambda: loop.run_until_complete(smgr.stop())) win.show() loop.run_until_complete(win.exit_request.wait()) From a64766a10d58c37b9850e47562f9dd7c1afac4fc Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 1 Aug 2015 16:49:05 +0800 Subject: [PATCH 023/191] protocols/FlatFileDB: remove unneeded default_data --- artiq/protocols/file_db.py | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/artiq/protocols/file_db.py b/artiq/protocols/file_db.py index b7499587e..744eff687 100644 --- a/artiq/protocols/file_db.py +++ b/artiq/protocols/file_db.py @@ -5,16 +5,9 @@ from artiq.protocols.sync_struct import Notifier class FlatFileDB: - def __init__(self, filename, default_data=None): + def __init__(self, filename): self.filename = filename - try: - data = pyon.load_file(self.filename) - except FileNotFoundError: - if default_data is None: - raise - else: - data = default_data - self.data = Notifier(data) + self.data = Notifier(pyon.load_file(self.filename)) self.hooks = [] def save(self): From 8ad88438c71fd34a190868a87b44d4fb5f5555ca Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 1 Aug 2015 19:37:16 +0800 Subject: [PATCH 024/191] gui: save display state --- artiq/frontend/artiq_gui.py | 1 + artiq/gui/results.py | 27 +++++++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py index 35f444fa9..74ced8e62 100755 --- a/artiq/frontend/artiq_gui.py +++ b/artiq/frontend/artiq_gui.py @@ -82,6 +82,7 @@ def main(): atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close())) d_results = ResultsDock(win, area) + smgr.register(d_results) loop.run_until_complete(d_results.sub_connect( args.server, args.port_notify)) atexit.register(lambda: loop.run_until_complete(d_results.sub_close())) diff --git a/artiq/gui/results.py b/artiq/gui/results.py index c7f47214f..9c4c321c8 100644 --- a/artiq/gui/results.py +++ b/artiq/gui/results.py @@ -1,6 +1,7 @@ import asyncio from collections import OrderedDict from functools import partial +import logging from quamash import QtGui, QtCore from pyqtgraph import dockarea @@ -11,6 +12,9 @@ from artiq.gui.tools import DictSyncModel, short_format from artiq.gui.displays import * +logger = logging.getLogger(__name__) + + class ResultsModel(DictSyncModel): def __init__(self, parent, init): DictSyncModel.__init__(self, ["Result", "Value"], @@ -28,6 +32,12 @@ class ResultsModel(DictSyncModel): raise ValueError +def _get_display_type_name(display_cls): + for name, (_, cls) in display_types.items(): + if cls is display_cls: + return name + + class ResultsDock(dockarea.Dock): def __init__(self, dialog_parent, dock_area): dockarea.Dock.__init__(self, "Results", size=(1500, 500)) @@ -110,3 +120,20 @@ class ResultsDock(dockarea.Dock): dsp.sigClosed.connect(on_close) self.dock_area.addDock(dsp) self.dock_area.floatDock(dsp) + + def save_state(self): + r = dict() + for name, display in self.displays.items(): + r[name] = { + "ty": _get_display_type_name(type(display)), + "settings": display.settings + } + return r + + def restore_state(self, state): + for name, desc in state.items(): + try: + self.create_display(desc["ty"], None, name, desc["settings"]) + except: + logger.warning("Failed to create display '%s'", name, + exc_info=True) From b2f720da67bf3d9f94cea0e9ed7e7e71fda1d995 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 1 Aug 2015 19:52:13 +0800 Subject: [PATCH 025/191] gui: better state error handling Remains limited by issue pyqtgraph/pyqtgraph#204 --- artiq/gui/state.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/artiq/gui/state.py b/artiq/gui/state.py index 9c5e7b8bb..9088da4e6 100644 --- a/artiq/gui/state.py +++ b/artiq/gui/state.py @@ -1,10 +1,14 @@ import asyncio from collections import OrderedDict +import logging from artiq.tools import TaskObject from artiq.protocols import pyon +logger = logging.getLogger(__name__) + + # support Qt CamelCase naming scheme for save/restore state def _save_state(obj): method = getattr(obj, "save_state", None) @@ -38,6 +42,8 @@ class StateManager(TaskObject): try: data = pyon.load_file(self.filename) except FileNotFoundError: + logger.info("State database '%s' not found, using defaults", + self.filename) return # The state of one object may depend on the state of another, # e.g. the display state may create docks that are referenced in @@ -47,10 +53,20 @@ class StateManager(TaskObject): for name, obj in reversed(list(self.stateful_objects.items())): state = data.get(name, None) if state is not None: - _restore_state(obj, state) + try: + _restore_state(obj, state) + except: + logger.warning("Failed to restore state for object '%s'", + name, exc_info=True) def save(self): - data = {k: _save_state(v) for k, v in self.stateful_objects.items()} + data = dict() + for k, v in self.stateful_objects.items(): + try: + data[k] = _save_state(v) + except: + logger.warning("Failed to save state for object '%s'", k, + exc_info=True) pyon.store_file(self.filename, data) @asyncio.coroutine From ad7cbc439476db2d96017f912067991cf1514b79 Mon Sep 17 00:00:00 2001 From: whitequark Date: Sun, 2 Aug 2015 16:35:49 +0300 Subject: [PATCH 026/191] =?UTF-8?q?Rename=20artiq=5Fcoreconfig=20=E2=86=92?= =?UTF-8?q?=20artiq=5Fcoretool;=20add=20log=20subcommand.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ...{artiq_coreconfig.py => artiq_coretool.py} | 46 +++++++++++++------ doc/manual/core_device_flash_storage.rst | 2 +- doc/manual/installing.rst | 4 +- doc/manual/utilities.rst | 42 +++++++++-------- setup.py | 2 +- 5 files changed, 57 insertions(+), 39 deletions(-) rename artiq/frontend/{artiq_coreconfig.py => artiq_coretool.py} (75%) diff --git a/artiq/frontend/artiq_coreconfig.py b/artiq/frontend/artiq_coretool.py similarity index 75% rename from artiq/frontend/artiq_coreconfig.py rename to artiq/frontend/artiq_coretool.py index 301657117..41649f03e 100755 --- a/artiq/frontend/artiq_coreconfig.py +++ b/artiq/frontend/artiq_coretool.py @@ -11,15 +11,26 @@ def to_bytes(string): def get_argparser(): - parser = argparse.ArgumentParser(description="ARTIQ core device config " - "remote access") + parser = argparse.ArgumentParser(description="ARTIQ core device " + "remote access tool") + parser.add_argument("--ddb", default="ddb.pyon", + help="device database file") + subparsers = parser.add_subparsers(dest="action") subparsers.required = True - p_read = subparsers.add_parser("read", + + # Log Read command + subparsers.add_parser("log", + help="read from the core device log ring buffer") + + # Configuration Read command + p_read = subparsers.add_parser("cfg-read", help="read key from core device config") p_read.add_argument("key", type=to_bytes, help="key to be read from core device config") - p_write = subparsers.add_parser("write", + + # Configuration Write command + p_write = subparsers.add_parser("cfg-write", help="write key-value records to core " "device config") p_write.add_argument("-s", "--string", nargs=2, action="append", @@ -31,14 +42,17 @@ def get_argparser(): metavar=("KEY", "FILENAME"), help="key and file whose content to be written to " "core device config") - subparsers.add_parser("erase", help="erase core device config") - p_delete = subparsers.add_parser("delete", + + # Configuration Delete command + p_delete = subparsers.add_parser("cfg-delete", help="delete key from core device config") p_delete.add_argument("key", nargs=argparse.REMAINDER, default=[], type=to_bytes, help="key to be deleted from core device config") - parser.add_argument("--ddb", default="ddb.pyon", - help="device database file") + + # Configuration Erase command + subparsers.add_parser("cfg-erase", help="erase core device config") + return parser @@ -48,23 +62,25 @@ def main(): try: comm = dmgr.get("comm") - if args.action == "read": + if args.action == "log": + print(comm.get_log()) + elif args.action == "cfg-read": value = comm.flash_storage_read(args.key) if not value: print("Key {} does not exist".format(args.key)) else: print(value) - elif args.action == "erase": - comm.flash_storage_erase() - elif args.action == "delete": - for key in args.key: - comm.flash_storage_remove(key) - elif args.action == "write": + elif args.action == "cfg-write": for key, value in args.string: comm.flash_storage_write(key, value) for key, filename in args.file: with open(filename, "rb") as fi: comm.flash_storage_write(key, fi.read()) + elif args.action == "cfg-delete": + for key in args.key: + comm.flash_storage_remove(key) + elif args.action == "cfg-erase": + comm.flash_storage_erase() finally: dmgr.close_devices() diff --git a/doc/manual/core_device_flash_storage.rst b/doc/manual/core_device_flash_storage.rst index fb2bb8d9a..cc5fe1b2d 100644 --- a/doc/manual/core_device_flash_storage.rst +++ b/doc/manual/core_device_flash_storage.rst @@ -11,4 +11,4 @@ This storage area is used to store the core device MAC address, IP address and e The flash storage area is one sector (64 kB) large and is organized as a list of key-value records. -This flash storage space can be accessed by using the artiq_coreconfig.py :ref:`core-device-configuration-tool`. +This flash storage space can be accessed by using the artiq_coretool.py :ref:`core-device-access-tool`. diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index 5ade4cb7e..3f04b043a 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -289,9 +289,9 @@ To flash the ``idle`` kernel: * Write it into the core device configuration flash storage: :: - $ artiq_coreconfig write -f idle_kernel idle.elf + $ artiq_coretool cfg-write -f idle_kernel idle.elf -.. note:: You can find more information about how to use the ``artiq_coreconfig`` tool on the :ref:`Utilities ` page. +.. note:: You can find more information about how to use the ``artiq_coretool`` utility on the :ref:`Utilities ` page. Installing the host-side software ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/doc/manual/utilities.rst b/doc/manual/utilities.rst index a507291ae..23df5b581 100644 --- a/doc/manual/utilities.rst +++ b/doc/manual/utilities.rst @@ -93,60 +93,62 @@ This tool compiles key/value pairs into a binary image suitable for flashing int :ref: artiq.frontend.artiq_mkfs.get_argparser :prog: artiq_mkfs -.. _core-device-configuration-tool: +.. _core-device-access-tool: -Core device configuration tool ------------------------------- +Core device access tool +----------------------- -The artiq_coreconfig tool allows to read, write and remove key-value records from the :ref:`core-device-flash-storage`. +The artiq_coretool utility allows to perform maintenance on the core device: -It also allows to erase the entire flash storage area. + * read core device logs; + * as well as read, write and remove key-value records from the :ref:`core-device-flash-storage`; + * erase the entire flash storage area. To use this tool, you need to specify a ``ddb.pyon`` DDB file which contains a ``comm`` device (an example is provided in ``artiq/examples/master/ddb.pyon``). This tells the tool how to connect to the core device (via serial or via TCP) and with which parameters (baudrate, serial device, IP address, TCP port). -When not specified, the artiq_coreconfig tool will assume that there is a file named ``ddb.pyon`` in the current directory. +When not specified, the artiq_coretool utility will assume that there is a file named ``ddb.pyon`` in the current directory. To read the record whose key is ``mac``:: - $ artiq_coreconfig read mac + $ artiq_coretool cfg-read mac To write the value ``test_value`` in the key ``my_key``:: - $ artiq_coreconfig write -s my_key test_value - $ artiq_coreconfig read my_key + $ artiq_coretool cfg-write -s my_key test_value + $ artiq_coretool cfg-read my_key b'test_value' You can also write entire files in a record using the ``-f`` parameter. This is useful for instance to write the ``idle`` kernel in the flash storage:: - $ artiq_coreconfig write -f idle_kernel idle.elf - $ artiq_coreconfig read idle_kernel | head -c9 + $ artiq_coretool cfg-write -f idle_kernel idle.elf + $ artiq_coretool cfg-read idle_kernel | head -c9 b'\x7fELF You can write several records at once:: - $ artiq_coreconfig write -s key1 value1 -f key2 filename -s key3 value3 + $ artiq_coretool cfg-write -s key1 value1 -f key2 filename -s key3 value3 To remove the previously written key ``my_key``:: - $ artiq_coreconfig delete my_key + $ artiq_coretool cfg-delete my_key You can remove several keys at once:: - $ artiq_coreconfig delete key1 key2 + $ artiq_coretool cfg-delete key1 key2 To erase the entire flash storage area:: - $ artiq_coreconfig erase + $ artiq_coretool cfg-erase You don't need to remove a record in order to change its value, just overwrite it:: - $ artiq_coreconfig write -s my_key some_value - $ artiq_coreconfig write -s my_key some_other_value - $ artiq_coreconfig read my_key + $ artiq_coretool cfg-write -s my_key some_value + $ artiq_coretool cfg-write -s my_key some_other_value + $ artiq_coretool cfg-read my_key b'some_other_value' .. argparse:: - :ref: artiq.frontend.artiq_coreconfig.get_argparser - :prog: artiq_coreconfig + :ref: artiq.frontend.artiq_coretool.get_argparser + :prog: artiq_coretool diff --git a/setup.py b/setup.py index e3ca507f6..8c97ce619 100755 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ requirements = [ scripts = [ "artiq_client=artiq.frontend.artiq_client:main", "artiq_compile=artiq.frontend.artiq_compile:main", - "artiq_coreconfig=artiq.frontend.artiq_coreconfig:main", + "artiq_coretool=artiq.frontend.artiq_coretool:main", "artiq_ctlmgr=artiq.frontend.artiq_ctlmgr:main", "artiq_gui=artiq.frontend.artiq_gui:main", "artiq_master=artiq.frontend.artiq_master:main", From 433cb948bc367457a8e0134052424d92afda1773 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 4 Aug 2015 17:00:48 +0800 Subject: [PATCH 027/191] doc: artiq git clone needs --recursive to fetch lwIP --- doc/manual/installing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index 3f04b043a..e36c3a750 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -190,7 +190,7 @@ These steps are required to generate bitstream (``.bit``) files, build the MiSoC * Download and install ARTIQ: :: $ cd ~/artiq-dev - $ git clone https://github.com/m-labs/artiq + $ git clone --recursive https://github.com/m-labs/artiq $ python3 setup.py develop --user * Build and flash the bitstream and BIOS by running `from the MiSoC top-level directory`: @@ -310,7 +310,7 @@ Installing the host-side software * Install ARTIQ: :: $ cd ~/artiq-dev - $ git clone https://github.com/m-labs/artiq # if not already done + $ git clone --recursive https://github.com/m-labs/artiq # if not already done $ cd artiq $ python3 setup.py develop --user From 18f38e6242b379275aefb1c601631ab1a42fcdd7 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 4 Aug 2015 20:20:56 +0800 Subject: [PATCH 028/191] doc/manual: make sure correct Clang is used --- doc/manual/installing.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index e36c3a750..9bc358753 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -197,7 +197,9 @@ These steps are required to generate bitstream (``.bit``) files, build the MiSoC :: $ cd ~/artiq-dev/misoc - $ export PATH=$PATH:/usr/local/llvm-or1k/bin + $ export PATH=/usr/local/llvm-or1k/bin:$PATH + + .. note:: Make sure that ``/usr/local/llvm-or1k/bin`` is first in your ``PATH``, so that the ``clang`` command you just built is found instead of the system one, if any. * For Pipistrello:: From d0402243a02550538ddcb9d5b85e0aba0a496cd7 Mon Sep 17 00:00:00 2001 From: whitequark Date: Tue, 4 Aug 2015 17:58:35 +0300 Subject: [PATCH 029/191] Update installation instructions. --- doc/manual/installing.rst | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index 9bc358753..985acd80f 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -302,11 +302,8 @@ Installing the host-side software $ cd ~/artiq-dev $ git clone https://github.com/m-labs/llvmlite - $ git checkout backport-3.5 + $ git checkout artiq $ cd llvmlite - $ patch -p1 < ~/artiq-dev/artiq/misc/llvmlite-add-all-targets.patch - $ patch -p1 < ~/artiq-dev/artiq/misc/llvmlite-rename.patch - $ patch -p1 < ~/artiq-dev/artiq/misc/llvmlite-build-as-debug-on-windows.patch $ LLVM_CONFIG=/usr/local/llvm-or1k/bin/llvm-config python3 setup.py install --user * Install ARTIQ: :: From 4a8e397a77113a2d9e5dffe268782274af9c3a84 Mon Sep 17 00:00:00 2001 From: whitequark Date: Wed, 29 Jul 2015 13:54:00 +0300 Subject: [PATCH 030/191] Fold llvmlite patches into m-labs/llvmlite repository. --- artiq/coredevice/runtime.py | 4 +- artiq/py2llvm/ast_body.py | 2 +- artiq/py2llvm/base_types.py | 2 +- artiq/py2llvm/fractions.py | 2 +- artiq/py2llvm/lists.py | 2 +- artiq/py2llvm/module.py | 4 +- artiq/py2llvm/tools.py | 2 +- artiq/py2llvm/values.py | 2 +- artiq/test/py2llvm.py | 2 +- conda/llvmlite-or1k/bld.bat | 5 -- conda/llvmlite-or1k/build.sh | 3 - conda/llvmlite-or1k/meta.yaml | 4 +- misc/llvmlite-add-all-targets.patch | 38 ----------- misc/llvmlite-build-as-debug-on-windows.patch | 13 ---- misc/llvmlite-rename.patch | 65 ------------------- setup.py | 2 +- 16 files changed, 14 insertions(+), 138 deletions(-) delete mode 100644 misc/llvmlite-add-all-targets.patch delete mode 100644 misc/llvmlite-build-as-debug-on-windows.patch delete mode 100644 misc/llvmlite-rename.patch diff --git a/artiq/coredevice/runtime.py b/artiq/coredevice/runtime.py index 84a6bb386..ae818b1a6 100644 --- a/artiq/coredevice/runtime.py +++ b/artiq/coredevice/runtime.py @@ -1,7 +1,7 @@ import os -import llvmlite_or1k.ir as ll -import llvmlite_or1k.binding as llvm +import llvmlite_artiq.ir as ll +import llvmlite_artiq.binding as llvm from artiq.py2llvm import base_types, fractions, lists from artiq.language import units diff --git a/artiq/py2llvm/ast_body.py b/artiq/py2llvm/ast_body.py index 42de0fc9c..17b08c861 100644 --- a/artiq/py2llvm/ast_body.py +++ b/artiq/py2llvm/ast_body.py @@ -1,6 +1,6 @@ import ast -import llvmlite_or1k.ir as ll +import llvmlite_artiq.ir as ll from artiq.py2llvm import values, base_types, fractions, lists, iterators from artiq.py2llvm.tools import is_terminated diff --git a/artiq/py2llvm/base_types.py b/artiq/py2llvm/base_types.py index 3ef472984..a5690c396 100644 --- a/artiq/py2llvm/base_types.py +++ b/artiq/py2llvm/base_types.py @@ -1,4 +1,4 @@ -import llvmlite_or1k.ir as ll +import llvmlite_artiq.ir as ll from artiq.py2llvm.values import VGeneric diff --git a/artiq/py2llvm/fractions.py b/artiq/py2llvm/fractions.py index aae7575cc..d00ff74de 100644 --- a/artiq/py2llvm/fractions.py +++ b/artiq/py2llvm/fractions.py @@ -1,7 +1,7 @@ import inspect import ast -import llvmlite_or1k.ir as ll +import llvmlite_artiq.ir as ll from artiq.py2llvm.values import VGeneric, operators from artiq.py2llvm.base_types import VBool, VInt, VFloat diff --git a/artiq/py2llvm/lists.py b/artiq/py2llvm/lists.py index e17ab5348..d486e7ddd 100644 --- a/artiq/py2llvm/lists.py +++ b/artiq/py2llvm/lists.py @@ -1,4 +1,4 @@ -import llvmlite_or1k.ir as ll +import llvmlite_artiq.ir as ll from artiq.py2llvm.values import VGeneric from artiq.py2llvm.base_types import VInt, VNone diff --git a/artiq/py2llvm/module.py b/artiq/py2llvm/module.py index f4df806e6..b842833e9 100644 --- a/artiq/py2llvm/module.py +++ b/artiq/py2llvm/module.py @@ -1,5 +1,5 @@ -import llvmlite_or1k.ir as ll -import llvmlite_or1k.binding as llvm +import llvmlite_artiq.ir as ll +import llvmlite_artiq.binding as llvm from artiq.py2llvm import infer_types, ast_body, base_types, fractions, tools diff --git a/artiq/py2llvm/tools.py b/artiq/py2llvm/tools.py index ba9e76949..361b82a6f 100644 --- a/artiq/py2llvm/tools.py +++ b/artiq/py2llvm/tools.py @@ -1,4 +1,4 @@ -import llvmlite_or1k.ir as ll +import llvmlite_artiq.ir as ll def is_terminated(basic_block): return (basic_block.instructions diff --git a/artiq/py2llvm/values.py b/artiq/py2llvm/values.py index 6f0b90e2c..254d17541 100644 --- a/artiq/py2llvm/values.py +++ b/artiq/py2llvm/values.py @@ -1,7 +1,7 @@ from types import SimpleNamespace from copy import copy -import llvmlite_or1k.ir as ll +import llvmlite_artiq.ir as ll class VGeneric: diff --git a/artiq/test/py2llvm.py b/artiq/test/py2llvm.py index 6eb607c7d..6026d3797 100644 --- a/artiq/test/py2llvm.py +++ b/artiq/test/py2llvm.py @@ -5,7 +5,7 @@ from fractions import Fraction from ctypes import CFUNCTYPE, c_int, c_int32, c_int64, c_double import struct -import llvmlite_or1k.binding as llvm +import llvmlite_artiq.binding as llvm from artiq.language.core import int64 from artiq.py2llvm.infer_types import infer_function_types diff --git a/conda/llvmlite-or1k/bld.bat b/conda/llvmlite-or1k/bld.bat index 17e63ad30..bbb38d3c9 100644 --- a/conda/llvmlite-or1k/bld.bat +++ b/conda/llvmlite-or1k/bld.bat @@ -4,10 +4,5 @@ set CMAKE_PREFIX_PATH=%LIBRARY_PREFIX% @rem Ensure there are no build leftovers (CMake can complain) if exist ffi\build rmdir /S /Q ffi\build -@rem Apply patches -patch -p1 < %RECIPE_DIR%/../../misc/llvmlite-add-all-targets.patch -patch -p1 < %RECIPE_DIR%/../../misc/llvmlite-rename.patch -patch -p1 < %RECIPE_DIR%/../../misc/llvmlite-build-as-debug-on-windows.patch - %PYTHON% -S setup.py install if errorlevel 1 exit 1 diff --git a/conda/llvmlite-or1k/build.sh b/conda/llvmlite-or1k/build.sh index 327c15518..d3b3bbe2c 100755 --- a/conda/llvmlite-or1k/build.sh +++ b/conda/llvmlite-or1k/build.sh @@ -1,6 +1,3 @@ #!/bin/bash -patch -p1 < ${RECIPE_DIR}/../../misc/llvmlite-add-all-targets.patch -patch -p1 < ${RECIPE_DIR}/../../misc/llvmlite-rename.patch -patch -p1 < ${RECIPE_DIR}/../../misc/llvmlite-build-as-debug-on-windows.patch PATH=/usr/local/llvm-or1k/bin:$PATH $PYTHON setup.py install diff --git a/conda/llvmlite-or1k/meta.yaml b/conda/llvmlite-or1k/meta.yaml index db3c24bcd..669c37881 100644 --- a/conda/llvmlite-or1k/meta.yaml +++ b/conda/llvmlite-or1k/meta.yaml @@ -19,8 +19,8 @@ build: test: imports: - - llvmlite_or1k - - llvmlite_or1k.llvmpy + - llvmlite_artiq + - llvmlite_artiq.llvmpy about: home: https://pypi.python.org/pypi/llvmlite/ diff --git a/misc/llvmlite-add-all-targets.patch b/misc/llvmlite-add-all-targets.patch deleted file mode 100644 index 6b52fad9f..000000000 --- a/misc/llvmlite-add-all-targets.patch +++ /dev/null @@ -1,38 +0,0 @@ -diff --git a/ffi/initfini.cpp b/ffi/initfini.cpp -index 42c8965..067be62 100644 ---- a/ffi/initfini.cpp -+++ b/ffi/initfini.cpp -@@ -37,9 +37,10 @@ LLVMPY_Shutdown(){ - // NOTE: it is important that we don't export functions which we don't use, - // especially those which may pull in large amounts of additional code or data. - --// INIT(AllTargetInfos) --// INIT(AllTargets) --// INIT(AllTargetMCs) -+INIT(AllTargetInfos) -+INIT(AllTargets) -+INIT(AllTargetMCs) -+INIT(AllAsmPrinters) - INIT(NativeTarget) - INIT(NativeAsmParser) - INIT(NativeAsmPrinter) -diff --git a/llvmlite/binding/initfini.py b/llvmlite/binding/initfini.py -index bfaa5b2..7d0df11 100644 ---- a/llvmlite/binding/initfini.py -+++ b/llvmlite/binding/initfini.py -@@ -8,6 +8,15 @@ def initialize(): - ffi.lib.LLVMPY_InitializeCore() - - -+def initialize_all_targets(): -+ ffi.lib.LLVMPY_InitializeAllTargetInfos() -+ ffi.lib.LLVMPY_InitializeAllTargets() -+ ffi.lib.LLVMPY_InitializeAllTargetMCs() -+ -+def initialize_all_asmprinters(): -+ ffi.lib.LLVMPY_InitializeAllAsmPrinters() -+ -+ - def initialize_native_target(): - """ - Initialize the native (host) target. Necessary before doing any diff --git a/misc/llvmlite-build-as-debug-on-windows.patch b/misc/llvmlite-build-as-debug-on-windows.patch deleted file mode 100644 index e385fb4a2..000000000 --- a/misc/llvmlite-build-as-debug-on-windows.patch +++ /dev/null @@ -1,13 +0,0 @@ -diff --git a/ffi/build.py b/ffi/build.py -index 3889ba5..58f93ec 100755 ---- a/ffi/build.py -+++ b/ffi/build.py -@@ -58,7 +58,7 @@ def find_win32_generator(): - - def main_win32(): - generator = find_win32_generator() -- config = 'Release' -+ config = 'Debug' - if not os.path.exists(build_dir): - os.mkdir(build_dir) - try_cmake(here_dir, build_dir, generator) diff --git a/misc/llvmlite-rename.patch b/misc/llvmlite-rename.patch deleted file mode 100644 index faea85104..000000000 --- a/misc/llvmlite-rename.patch +++ /dev/null @@ -1,65 +0,0 @@ -diff --git a/setup.py b/setup.py -index 6d28265..f4edd29 100644 ---- a/setup.py -+++ b/setup.py -@@ -15,10 +15,10 @@ from llvmlite.utils import get_library_files - import versioneer - - versioneer.VCS = 'git' --versioneer.versionfile_source = 'llvmlite/_version.py' --versioneer.versionfile_build = 'llvmlite/_version.py' -+versioneer.versionfile_source = 'llvmlite_or1k/_version.py' -+versioneer.versionfile_build = 'llvmlite_or1k/_version.py' - versioneer.tag_prefix = 'v' # tags are like v1.2.0 --versioneer.parentdir_prefix = 'llvmlite-' # dirname like 'myproject-1.2.0' -+versioneer.parentdir_prefix = 'llvmlite_or1k-' # dirname like 'myproject-1.2.0' - - - here_dir = os.path.dirname(__file__) -@@ -54,7 +54,7 @@ class LlvmliteBuildExt(build_ext): - # HACK: this makes sure the library file (which is large) is only - # included in binary builds, not source builds. - self.distribution.package_data = { -- "llvmlite.binding": get_library_files(), -+ "llvmlite_or1k.binding": get_library_files(), - } - - -@@ -63,7 +63,7 @@ class LlvmliteInstall(install): - # This seems to only be necessary on OSX. - def run(self): - self.distribution.package_data = { -- "llvmlite.binding": get_library_files(), -+ "llvmlite_or1k.binding": get_library_files(), - } - install.run(self) - -@@ -74,14 +74,14 @@ cmdclass.update({'build': LlvmliteBuild, - }) - - --packages = ['llvmlite', -- 'llvmlite.binding', -- 'llvmlite.ir', -- 'llvmlite.llvmpy', -- 'llvmlite.tests', -+packages = ['llvmlite_or1k', -+ 'llvmlite_or1k.binding', -+ 'llvmlite_or1k.ir', -+ 'llvmlite_or1k.llvmpy', -+ 'llvmlite_or1k.tests', - ] - --setup(name='llvmlite', -+setup(name='llvmlite_or1k', - description="lightweight wrapper around basic LLVM functionality", - version=versioneer.get_version(), - classifiers=[ -@@ -96,6 +96,7 @@ setup(name='llvmlite', - "Topic :: Software Development :: Code Generators", - "Topic :: Software Development :: Compilers", - ], -+ package_dir={"llvmlite_or1k" : "llvmlite"}, - # Include the separately-compiled shared library - author="Continuum Analytics, Inc.", - author_email="numba-users@continuum.io", diff --git a/setup.py b/setup.py index 8c97ce619..1936af587 100755 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ class PushDocCommand(Command): requirements = [ "sphinx", "sphinx-argparse", "pyserial", "numpy", "scipy", "python-dateutil", "prettytable", "h5py", "pydaqmx", "pyelftools", - "quamash", "pyqtgraph" + "quamash", "pyqtgraph", "llvmlite_artiq" ] scripts = [ From 8f2dfcd08e6c33644e841a7e165a8a33503c6329 Mon Sep 17 00:00:00 2001 From: whitequark Date: Wed, 5 Aug 2015 04:17:59 +0300 Subject: [PATCH 031/191] =?UTF-8?q?Rename=20conda=20package=20llvmlite-{or?= =?UTF-8?q?1k=20=E2=86=92=20artiq}.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- conda/artiq/meta.yaml | 2 +- conda/{llvmlite-or1k => llvmlite-artiq}/bld.bat | 0 conda/{llvmlite-or1k => llvmlite-artiq}/build.sh | 0 conda/{llvmlite-or1k => llvmlite-artiq}/meta.yaml | 8 ++++---- 4 files changed, 5 insertions(+), 5 deletions(-) rename conda/{llvmlite-or1k => llvmlite-artiq}/bld.bat (100%) rename conda/{llvmlite-or1k => llvmlite-artiq}/build.sh (100%) rename conda/{llvmlite-or1k => llvmlite-artiq}/meta.yaml (67%) diff --git a/conda/artiq/meta.yaml b/conda/artiq/meta.yaml index 97c9a5e51..67991b4b9 100644 --- a/conda/artiq/meta.yaml +++ b/conda/artiq/meta.yaml @@ -34,7 +34,7 @@ requirements: - pyelftools run: - python >=3.4.3 - - llvmlite-or1k + - llvmlite-artiq - scipy - numpy - prettytable diff --git a/conda/llvmlite-or1k/bld.bat b/conda/llvmlite-artiq/bld.bat similarity index 100% rename from conda/llvmlite-or1k/bld.bat rename to conda/llvmlite-artiq/bld.bat diff --git a/conda/llvmlite-or1k/build.sh b/conda/llvmlite-artiq/build.sh similarity index 100% rename from conda/llvmlite-or1k/build.sh rename to conda/llvmlite-artiq/build.sh diff --git a/conda/llvmlite-or1k/meta.yaml b/conda/llvmlite-artiq/meta.yaml similarity index 67% rename from conda/llvmlite-or1k/meta.yaml rename to conda/llvmlite-artiq/meta.yaml index 669c37881..d59db9375 100644 --- a/conda/llvmlite-or1k/meta.yaml +++ b/conda/llvmlite-artiq/meta.yaml @@ -1,10 +1,10 @@ package: - name: llvmlite-or1k - version: "0.2.1" + name: llvmlite-artiq + version: "0.5.1" source: - git_url: https://github.com/numba/llvmlite - git_tag: 11a8303d02e3d6dd2d1e0e9065701795cd8a979f + git_url: https://github.com/m-labs/llvmlite + git_tag: artiq requirements: build: From c8db83b11fa9415ed220f5d546e04669bc1ace33 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 5 Aug 2015 11:41:43 +0800 Subject: [PATCH 032/191] gui: add verbosity args for debugging --- artiq/frontend/artiq_gui.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py index 74ced8e62..d0c429b70 100755 --- a/artiq/frontend/artiq_gui.py +++ b/artiq/frontend/artiq_gui.py @@ -10,6 +10,7 @@ import os from quamash import QEventLoop, QtGui from pyqtgraph import dockarea +from artiq.tools import verbosity_args, init_logger from artiq.protocols.pc_rpc import AsyncioClient from artiq.gui.state import StateManager from artiq.gui.explorer import ExplorerDock @@ -39,6 +40,7 @@ def get_argparser(): parser.add_argument( "--db-file", default="artiq_gui.pyon", help="database file for local GUI settings") + verbosity_args(parser) return parser @@ -55,12 +57,14 @@ class _MainWindow(QtGui.QMainWindow): def main(): + args = get_argparser().parse_args() + init_logger(args) + app = QtGui.QApplication([]) loop = QEventLoop(app) asyncio.set_event_loop(loop) atexit.register(lambda: loop.close()) - args = get_argparser().parse_args() smgr = StateManager(args.db_file) schedule_ctl = AsyncioClient() From cddb5b9ae4f814a50ef8474a41949e0d96279d1e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 5 Aug 2015 13:35:28 +0800 Subject: [PATCH 033/191] gui: save explorer state --- artiq/frontend/artiq_gui.py | 1 + artiq/gui/explorer.py | 81 ++++++++++++++++++++++++++++--------- artiq/gui/scan.py | 32 ++++++++------- 3 files changed, 81 insertions(+), 33 deletions(-) diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py index d0c429b70..7607924c7 100755 --- a/artiq/frontend/artiq_gui.py +++ b/artiq/frontend/artiq_gui.py @@ -81,6 +81,7 @@ def main(): win.setStatusBar(status_bar) d_explorer = ExplorerDock(win, status_bar, schedule_ctl) + smgr.register(d_explorer) loop.run_until_complete(d_explorer.sub_connect( args.server, args.port_notify)) atexit.register(lambda: loop.run_until_complete(d_explorer.sub_close())) diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index 5954b87d5..4a4bcfe99 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -28,21 +28,27 @@ class _FreeValueEntry(QtGui.QLineEdit): def __init__(self, procdesc): QtGui.QLineEdit.__init__(self) if "default" in procdesc: - self.insert(pyon.encode(procdesc["default"])) + self.set_argument_value(procdesc["default"]) def get_argument_value(self): return pyon.decode(self.text()) + def set_argument_value(self, value): + self.setText(pyon.encode(value)) + class _BooleanEntry(QtGui.QCheckBox): def __init__(self, procdesc): QtGui.QCheckBox.__init__(self) if "default" in procdesc: - self.setChecked(procdesc["default"]) + self.set_argument_value(procdesc["default"]) def get_argument_value(self): return self.isChecked() + def set_argument_value(self, value): + self.setChecked(value) + class _EnumerationEntry(QtGui.QComboBox): def __init__(self, procdesc): @@ -50,16 +56,15 @@ class _EnumerationEntry(QtGui.QComboBox): self.choices = procdesc["choices"] self.addItems(self.choices) if "default" in procdesc: - try: - idx = self.choices.index(procdesc["default"]) - except: - pass - else: - self.setCurrentIndex(idx) + self.set_argument_value(procdesc["default"]) def get_argument_value(self): return self.choices[self.currentIndex()] + def set_argument_value(self, value): + idx = self.choices.index(value) + self.setCurrentIndex(idx) + class _NumberEntry(QtGui.QDoubleSpinBox): def __init__(self, procdesc): @@ -73,21 +78,27 @@ class _NumberEntry(QtGui.QDoubleSpinBox): if procdesc["unit"]: self.setSuffix(" " + procdesc["unit"]) if "default" in procdesc: - force_spinbox_value(self, procdesc["default"]) + self.set_argument_value(procdesc["default"]) def get_argument_value(self): return self.value() + def set_argument_value(self, value): + force_spinbox_value(self, value) + class _StringEntry(QtGui.QLineEdit): def __init__(self, procdesc): QtGui.QLineEdit.__init__(self) if "default" in procdesc: - self.insert(procdesc["default"]) + self.set_argument_value(procdesc["default"]) def get_argument_value(self): return self.text() + def set_argument_value(self, value): + self.setText(value) + _procty_to_entry = { "FreeValue": _FreeValueEntry, @@ -114,21 +125,31 @@ class _ArgumentSetter(LayoutWidget): self.addWidget(entry, n, 1) self._args_to_entries[name] = entry - def get_argument_values(self): + def get_argument_values(self, show_error_message): r = dict() for arg, entry in self._args_to_entries.items(): try: r[arg] = entry.get_argument_value() except: - msgbox = QtGui.QMessageBox(self.dialog_parent) - msgbox.setWindowTitle("Error") - msgbox.setText("Failed to obtain value for argument '{}'.\n{}" - .format(arg, traceback.format_exc())) - msgbox.setStandardButtons(QtGui.QMessageBox.Ok) - msgbox.show() + if show_error_message: + msgbox = QtGui.QMessageBox(self.dialog_parent) + msgbox.setWindowTitle("Error") + msgbox.setText("Failed to obtain value for argument '{}'.\n{}" + .format(arg, traceback.format_exc())) + msgbox.setStandardButtons(QtGui.QMessageBox.Ok) + msgbox.show() return None return r + def set_argument_values(self, arguments, ignore_errors): + for arg, value in arguments.items(): + try: + entry = self._args_to_entries[arg] + entry.set_argument_value(value) + except: + if not ignore_errors: + raise + class ExplorerDock(dockarea.Dock): def __init__(self, dialog_parent, status_bar, schedule_ctl): @@ -163,7 +184,7 @@ class ExplorerDock(dockarea.Dock): grid.addWidget(self.priority, 1, 3) self.pipeline = QtGui.QLineEdit() - self.pipeline.insert("main") + self.pipeline.setText("main") grid.addWidget(QtGui.QLabel("Pipeline:"), 2, 0) grid.addWidget(self.pipeline, 2, 1) @@ -177,8 +198,15 @@ class ExplorerDock(dockarea.Dock): self.argsetter = _ArgumentSetter(self.dialog_parent, []) self.splitter.addWidget(self.argsetter) self.splitter.setSizes([grid.minimumSizeHint().width(), 1000]) + self.state = dict() def update_argsetter(self, selected, deselected): + deselected = deselected.indexes() + if deselected: + row = deselected[0].row() + key = self.explist_model.row_to_key[row] + self.state[key] = self.argsetter.get_argument_values(False) + selected = selected.indexes() if selected: row = selected[0].row() @@ -188,9 +216,24 @@ class ExplorerDock(dockarea.Dock): sizes = self.splitter.sizes() self.argsetter.deleteLater() self.argsetter = _ArgumentSetter(self.dialog_parent, arguments) + if key in self.state: + arguments = self.state[key] + if arguments is not None: + self.argsetter.set_argument_values(arguments, True) self.splitter.insertWidget(1, self.argsetter) self.splitter.setSizes(sizes) + def save_state(self): + idx = self.el.selectedIndexes() + if idx: + row = idx[0].row() + key = self.explist_model.row_to_key[row] + self.state[key] = self.argsetter.get_argument_values(False) + return self.state + + def restore_state(self, state): + self.state = state + def enable_duedate(self): self.datetime_en.setChecked(True) @@ -231,7 +274,7 @@ class ExplorerDock(dockarea.Dock): due_date = self.datetime.dateTime().toMSecsSinceEpoch()/1000 else: due_date = None - arguments = self.argsetter.get_argument_values() + arguments = self.argsetter.get_argument_values(True) if arguments is None: return asyncio.async(self.submit(self.pipeline.text(), diff --git a/artiq/gui/scan.py b/artiq/gui/scan.py index 8c319a925..ee97a1401 100644 --- a/artiq/gui/scan.py +++ b/artiq/gui/scan.py @@ -96,20 +96,7 @@ class ScanController(LayoutWidget): b.toggled.connect(self.select_page) if "default" in procdesc: - d = procdesc["default"] - if d["ty"] == "NoScan": - self.noscan.setChecked(True) - force_spinbox_value(self.v_noscan, d["value"]) - elif d["ty"] == "LinearScan": - self.linear.setChecked(True) - self.v_linear.set_values(d["min"], d["max"], d["npoints"]) - elif d["ty"] == "RandomScan": - self.random.setChecked(True) - self.v_random.set_values(d["min"], d["max"], d["npoints"]) - elif d["ty"] == "ExplicitScan": - self.explicit.setChecked(True) - self.v_explicit.insert(" ".join( - [str(x) for x in d["sequence"]])) + self.set_argument_value(procdesc["default"]) else: self.noscan.setChecked(True) @@ -137,3 +124,20 @@ class ScanController(LayoutWidget): elif self.explicit.isChecked(): sequence = [float(x) for x in self.v_explicit.text().split()] return {"ty": "ExplicitScan", "sequence": sequence} + + def set_argument_value(self, d): + if d["ty"] == "NoScan": + self.noscan.setChecked(True) + force_spinbox_value(self.v_noscan, d["value"]) + elif d["ty"] == "LinearScan": + self.linear.setChecked(True) + self.v_linear.set_values(d["min"], d["max"], d["npoints"]) + elif d["ty"] == "RandomScan": + self.random.setChecked(True) + self.v_random.set_values(d["min"], d["max"], d["npoints"]) + elif d["ty"] == "ExplicitScan": + self.explicit.setChecked(True) + self.v_explicit.insert(" ".join( + [str(x) for x in d["sequence"]])) + else: + raise ValueError("Unknown scan type '{}'".format(d["ty"])) From 7180552d24e119d92a027281da41586b67af10e3 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 15:34:58 +0800 Subject: [PATCH 034/191] gui: support setting histogram X axis --- artiq/gui/displays.py | 117 ++++++++++++++++++++++++++++++------------ 1 file changed, 84 insertions(+), 33 deletions(-) diff --git a/artiq/gui/displays.py b/artiq/gui/displays.py index ca2407871..a6cd4a287 100644 --- a/artiq/gui/displays.py +++ b/artiq/gui/displays.py @@ -5,43 +5,63 @@ import pyqtgraph as pg from pyqtgraph import dockarea -class _SimpleSettings(QtGui.QDialog): - def __init__(self, parent, prev_name, prev_settings, - result_list, create_cb): +class _BaseSettings(QtGui.QDialog): + def __init__(self, parent, window_title, prev_name, create_cb): QtGui.QDialog.__init__(self, parent=parent) - self.setWindowTitle(self._window_title) + self.setWindowTitle(window_title) - grid = QtGui.QGridLayout() - self.setLayout(grid) + self.grid = QtGui.QGridLayout() + self.setLayout(self.grid) - grid.addWidget(QtGui.QLabel("Name:"), 0, 0) - self.name = name = QtGui.QLineEdit() - grid.addWidget(name, 0, 1) + self.grid.addWidget(QtGui.QLabel("Name:"), 0, 0) + self.name = QtGui.QLineEdit() + self.grid.addWidget(self.name, 0, 1) if prev_name is not None: - name.insert(prev_name) + self.name.setText(prev_name) - grid.addWidget(QtGui.QLabel("Result:")) - self.result = result = QtGui.QComboBox() - grid.addWidget(result, 1, 1) - result.addItems(result_list) - result.setEditable(True) - if "result" in prev_settings: - result.setEditText(prev_settings["result"]) + def on_accept(): + create_cb(self.name.text(), self.get_input()) + self.accepted.connect(on_accept) + def add_buttons(self): buttons = QtGui.QDialogButtonBox( QtGui.QDialogButtonBox.Ok | QtGui.QDialogButtonBox.Cancel) - grid.addWidget(buttons, 2, 0, 1, 2) + self.grid.addWidget(buttons, self.grid.rowCount(), 0, 1, 2) buttons.accepted.connect(self.accept) buttons.rejected.connect(self.reject) - def on_accept(): - create_cb(name.text(), {"result": result.currentText()}) - self.accepted.connect(on_accept) - def accept(self): - if self.name.text() and self.result.currentText(): + if self.name.text() and self.validate_input(): QtGui.QDialog.accept(self) + def validate_input(self): + raise NotImplementedError + + def get_input(self): + raise NotImplementedError + + +class _SimpleSettings(_BaseSettings): + def __init__(self, parent, prev_name, prev_settings, + result_list, create_cb): + _BaseSettings.__init__(self, parent, self._window_title, + prev_name, create_cb) + + self.grid.addWidget(QtGui.QLabel("Result:")) + self.result = QtGui.QComboBox() + self.grid.addWidget(self.result, 1, 1) + self.result.addItems(result_list) + self.result.setEditable(True) + if "result" in prev_settings: + self.result.setEditText(prev_settings["result"]) + self.add_buttons() + + def validate_input(self): + return bool(self.result.currentText()) + + def get_input(self): + return {"result": self.result.currentText()} + class NumberDisplaySettings(_SimpleSettings): _window_title = "Number display" @@ -95,8 +115,30 @@ class XYDisplay(dockarea.Dock): self.plot.plot(y) -class HistogramDisplaySettings(_SimpleSettings): - _window_title = "Histogram" +class HistogramDisplaySettings(_BaseSettings): + def __init__(self, parent, prev_name, prev_settings, + result_list, create_cb): + _BaseSettings.__init__(self, parent, "Histogram", + prev_name, create_cb) + + for row, axis in enumerate("yx"): + self.grid.addWidget(QtGui.QLabel(axis.upper() + ":")) + w = QtGui.QComboBox() + self.grid.addWidget(w, row + 1, 1) + if axis == "x": + w.addItem("") + w.addItems(result_list) + w.setEditable(True) + if axis in prev_settings: + w.setEditText(prev_settings["y"]) + setattr(self, axis, w) + self.add_buttons() + + def validate_input(self): + return bool(self.y.currentText()) and bool(self.x.currentText()) + + def get_input(self): + return {"y": self.y.currentText(), "x": self.x.currentText()} class HistogramDisplay(dockarea.Dock): @@ -108,19 +150,28 @@ class HistogramDisplay(dockarea.Dock): self.addWidget(self.plot) def data_sources(self): - return {self.settings["result"]} + s = {self.settings["y"]} + if self.settings["x"] != "": + s.add(self.settings["x"]) + return s def update_data(self, data): - result = self.settings["result"] + result_y = self.settings["y"] + result_x = self.settings["x"] try: - y = data[result] + y = data[result_y] + if result_x == "": + x = None + else: + x = data[result_x] except KeyError: return - x = list(range(len(y)+1)) - self.plot.clear() - if not y: - return - self.plot.plot(x, y, stepMode=True, fillLevel=0, brush=(0, 0, 255, 150)) + if x is None: + x = list(range(len(y)+1)) + + if y and len(x) == len(y) + 1: + self.plot.clear() + self.plot.plot(x, y, stepMode=True, fillLevel=0, brush=(0, 0, 255, 150)) display_types = OrderedDict([ From 263ff86e66c858b5b753cae65c2d7652c80f4416 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 18:02:30 +0800 Subject: [PATCH 035/191] gui: support X axis, fit, error bars in XY plot --- artiq/gui/displays.py | 98 ++++++++++++++++++++++++++----------------- 1 file changed, 59 insertions(+), 39 deletions(-) diff --git a/artiq/gui/displays.py b/artiq/gui/displays.py index a6cd4a287..512159c7e 100644 --- a/artiq/gui/displays.py +++ b/artiq/gui/displays.py @@ -1,4 +1,5 @@ from collections import OrderedDict +import numpy as np from quamash import QtGui import pyqtgraph as pg @@ -47,24 +48,30 @@ class _SimpleSettings(_BaseSettings): _BaseSettings.__init__(self, parent, self._window_title, prev_name, create_cb) - self.grid.addWidget(QtGui.QLabel("Result:")) - self.result = QtGui.QComboBox() - self.grid.addWidget(self.result, 1, 1) - self.result.addItems(result_list) - self.result.setEditable(True) - if "result" in prev_settings: - self.result.setEditText(prev_settings["result"]) + self.result_widgets = dict() + for row, (has_none, key) in enumerate(self._result_keys): + self.grid.addWidget(QtGui.QLabel(key.capitalize() + ":")) + w = QtGui.QComboBox() + self.grid.addWidget(w, row + 1, 1) + if has_none: + w.addItem("") + w.addItems(result_list) + w.setEditable(True) + if key in prev_settings: + w.setEditText(prev_settings[key]) + self.result_widgets[key] = w self.add_buttons() def validate_input(self): - return bool(self.result.currentText()) + return all(w.currentText() for w in self.result_widgets.values()) def get_input(self): - return {"result": self.result.currentText()} + return {k: v.currentText() for k, v in self.result_widgets.items()} class NumberDisplaySettings(_SimpleSettings): _window_title = "Number display" + _result_keys = [(False, "result")] class NumberDisplay(dockarea.Dock): @@ -90,6 +97,7 @@ class NumberDisplay(dockarea.Dock): class XYDisplaySettings(_SimpleSettings): _window_title = "XY plot" + _result_keys = [(False, "y"), (True, "x"), (True, "error"), (True, "fit")] class XYDisplay(dockarea.Dock): @@ -101,44 +109,56 @@ class XYDisplay(dockarea.Dock): self.addWidget(self.plot) def data_sources(self): - return {self.settings["result"]} + s = {self.settings["y"]} + for k in "x", "error", "fit": + if self.settings[k] != "": + s.add(self.settings[k]) + return s def update_data(self, data): - result = self.settings["result"] + result_y = self.settings["y"] + result_x = self.settings["x"] + result_error = self.settings["error"] + result_fit = self.settings["fit"] + try: - y = data[result] + y = data[result_y] except KeyError: return - self.plot.clear() - if not y: + x = data.get(result_x, None) + if x is None: + x = list(range(len(y))) + error = data.get(result_error, None) + fit = data.get(result_fit, None) + + if not y or len(y) != len(x): return - self.plot.plot(y) + if error is not None and hasattr(error, "__len__"): + if not len(error): + error = None + elif len(error) != len(y): + return + if fit is not None: + if not len(fit): + fit = None + elif len(fit) != len(y): + return + + self.plot.clear() + self.plot.plot(x, y, pen=None, symbol="x") + if error is not None: + # See https://github.com/pyqtgraph/pyqtgraph/issues/211 + if hasattr(error, "__len__") and not isinstance(error, np.ndarray): + error = np.array(error) + errbars = pg.ErrorBarItem(x=np.array(x), y=np.array(y), height=error) + self.plot.addItem(errbars) + if fit is not None: + self.plot.plot(x, fit) -class HistogramDisplaySettings(_BaseSettings): - def __init__(self, parent, prev_name, prev_settings, - result_list, create_cb): - _BaseSettings.__init__(self, parent, "Histogram", - prev_name, create_cb) - - for row, axis in enumerate("yx"): - self.grid.addWidget(QtGui.QLabel(axis.upper() + ":")) - w = QtGui.QComboBox() - self.grid.addWidget(w, row + 1, 1) - if axis == "x": - w.addItem("") - w.addItems(result_list) - w.setEditable(True) - if axis in prev_settings: - w.setEditText(prev_settings["y"]) - setattr(self, axis, w) - self.add_buttons() - - def validate_input(self): - return bool(self.y.currentText()) and bool(self.x.currentText()) - - def get_input(self): - return {"y": self.y.currentText(), "x": self.x.currentText()} +class HistogramDisplaySettings(_SimpleSettings): + _window_title = "Histogram" + _result_keys = [(False, "y"), (True, "x")] class HistogramDisplay(dockarea.Dock): From fd795724f2f50b3977a83f5c150e803cf5193394 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 18:03:05 +0800 Subject: [PATCH 036/191] gui: better display of None --- artiq/gui/tools.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/artiq/gui/tools.py b/artiq/gui/tools.py index f388521d8..ac003cf9c 100644 --- a/artiq/gui/tools.py +++ b/artiq/gui/tools.py @@ -10,6 +10,8 @@ def force_spinbox_value(spinbox, value): def short_format(v): + if v is None: + return "None" t = type(v) if t is int or t is float: return str(v) From 59a29ae04b184d1a43b6a43ad9c7b89931bb01b5 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 18:23:09 +0800 Subject: [PATCH 037/191] language/environment: remove attr_rtresult This function has the typical side effect of clearing the result. When happening in build(), this caused minor trouble when using pipelined scheduling (no result displayed at all until run() proceeds). --- artiq/language/environment.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/artiq/language/environment.py b/artiq/language/environment.py index f6da67803..2d567312f 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -223,12 +223,6 @@ class HasEnvironment: raise ValueError("Result is already realtime") self.__rdb.nrt[key] = value - def attr_rtresult(self, key, init_value): - """Writes the value of a real-time result and sets the corresponding - ``Notifier`` as attribute. The names of the result and of the - attribute are the same.""" - setattr(self, key, set_result(key, init_value, True)) - def get_result(self, key): """Retrieves the value of a result. From 9261254653248cbd2bc0c1c1ec800343a61b2671 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 18:43:27 +0800 Subject: [PATCH 038/191] language/environment: support non-stored results --- artiq/language/environment.py | 6 +++++- artiq/master/worker_db.py | 13 +++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/artiq/language/environment.py b/artiq/language/environment.py index 2d567312f..1132e7f72 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -201,13 +201,15 @@ class HasEnvironment: raise ValueError("Parameter database not present") self.__pdb.set(key, value) - def set_result(self, key, value, realtime=False): + def set_result(self, key, value, realtime=False, store=True): """Writes the value of a result. :param realtime: Marks the result as real-time, making it immediately available to clients such as the user interface. Returns a ``Notifier`` instance that can be used to modify mutable results (such as lists) and synchronize the modifications with the clients. + :param store: Defines if the result should be stored permanently, + e.g. in HDF5 output. Default is to store. """ if self.__rdb is None: raise ValueError("Result database not present") @@ -217,11 +219,13 @@ class HasEnvironment: self.__rdb.rt[key] = value notifier = self.__rdb.rt[key] notifier.kernel_attr_init = False + self.__rdb.set_store(key, store) return notifier else: if key in self.__rdb.rt.read: raise ValueError("Result is already realtime") self.__rdb.nrt[key] = value + self.__rdb.set_store(key, store) def get_result(self, key): """Retrieves the value of a result. diff --git a/artiq/master/worker_db.py b/artiq/master/worker_db.py index 0da07dcf7..a4664415a 100644 --- a/artiq/master/worker_db.py +++ b/artiq/master/worker_db.py @@ -91,6 +91,7 @@ class ResultDB: def __init__(self): self.rt = Notifier(dict()) self.nrt = dict() + self.store = set() def get(self, key): try: @@ -98,9 +99,17 @@ class ResultDB: except KeyError: return self.rt[key].read + def set_store(self, key, store): + if store: + self.store.add(key) + else: + self.store.discard(key) + def write_hdf5(self, f): - result_dict_to_hdf5(f, self.rt.read) - result_dict_to_hdf5(f, self.nrt) + result_dict_to_hdf5( + f, {k: v for k, v in self.rt.read.items() if k in self.store}) + result_dict_to_hdf5( + f, {k: v for k, v in self.nrt.items() if k in self.store}) def _create_device(desc, dmgr): From 1f5a49d263f3ba6f36e95aa92d644cdd044a9c35 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 18:43:54 +0800 Subject: [PATCH 039/191] examples/flopping_f_simulation: demonstrate previous functionality --- .../repository/flopping_f_simulation.py | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/examples/master/repository/flopping_f_simulation.py b/examples/master/repository/flopping_f_simulation.py index 5efe1d8e2..9512e4bdd 100644 --- a/examples/master/repository/flopping_f_simulation.py +++ b/examples/master/repository/flopping_f_simulation.py @@ -31,14 +31,18 @@ class FloppingF(EnvExperiment): default=LinearScan(1000, 2000, 100))) self.attr_argument("F0", NumberValue(1500, min=1000, max=2000)) - self.attr_argument("noise_amplitude", NumberValue(0.1, min=0, max=100)) - - self.frequency = self.set_result("flopping_f_frequency", [], True) - self.brightness = self.set_result("flopping_f_brightness", [], True) + self.attr_argument("noise_amplitude", NumberValue(0.1, min=0, max=100, + step=0.01)) self.attr_device("scheduler") def run(self): + self.frequency = self.set_result("flopping_f_frequency", [], + realtime=True, store=False) + self.brightness = self.set_result("flopping_f_brightness", [], + realtime=True) + self.set_result("flopping_f_fit", [], realtime=True, store=False) + for frequency in self.frequency_scan: brightness = model(frequency, self.F0) + self.noise_amplitude*random.random() self.frequency.append(frequency) @@ -48,9 +52,16 @@ class FloppingF(EnvExperiment): self.scheduler.priority, time.time() + 20, False) def analyze(self): + # Use get_result so that analyze can be run stand-alone. + frequency = self.get_result("flopping_f_frequency") + brightness = self.get_result("flopping_f_brightness") popt, pcov = curve_fit(model_numpy, - self.frequency.read, self.brightness.read, + frequency, brightness, p0=[self.get_parameter("flopping_freq")]) perr = np.sqrt(np.diag(pcov)) if perr < 0.1: - self.set_parameter("flopping_freq", float(popt)) + F0 = float(popt) + self.set_parameter("flopping_freq", F0) + self.set_result("flopping_f_fit", + [model(x, F0) for x in frequency], + realtime=True, store=False) From 705ec6ba047fee51e286df0779dc6673d46b2709 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 18:53:14 +0800 Subject: [PATCH 040/191] examples/flopping_f_simulation: do not setattr needlessly --- .../master/repository/flopping_f_simulation.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/master/repository/flopping_f_simulation.py b/examples/master/repository/flopping_f_simulation.py index 9512e4bdd..f0b4f4461 100644 --- a/examples/master/repository/flopping_f_simulation.py +++ b/examples/master/repository/flopping_f_simulation.py @@ -37,16 +37,16 @@ class FloppingF(EnvExperiment): self.attr_device("scheduler") def run(self): - self.frequency = self.set_result("flopping_f_frequency", [], - realtime=True, store=False) - self.brightness = self.set_result("flopping_f_brightness", [], - realtime=True) + frequency = self.set_result("flopping_f_frequency", [], + realtime=True, store=False) + brightness = self.set_result("flopping_f_brightness", [], + realtime=True) self.set_result("flopping_f_fit", [], realtime=True, store=False) - for frequency in self.frequency_scan: - brightness = model(frequency, self.F0) + self.noise_amplitude*random.random() - self.frequency.append(frequency) - self.brightness.append(brightness) + for f in self.frequency_scan: + m_brightness = model(f, self.F0) + self.noise_amplitude*random.random() + frequency.append(f) + brightness.append(m_brightness) time.sleep(0.1) self.scheduler.submit(self.scheduler.pipeline_name, self.scheduler.expid, self.scheduler.priority, time.time() + 20, False) From e078dabd3c6b4621e1e83ee0720a8464b3ead66e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 22:14:49 +0800 Subject: [PATCH 041/191] tools/TaskObject: do not suppress exceptions raised by terminating task --- artiq/tools.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/artiq/tools.py b/artiq/tools.py index 767e858dd..c6db084a9 100644 --- a/artiq/tools.py +++ b/artiq/tools.py @@ -128,7 +128,10 @@ class TaskObject: @asyncio.coroutine def stop(self): self.task.cancel() - yield from asyncio.wait([self.task]) + try: + yield from asyncio.wait_for(self.task, None) + except asyncio.CancelledError: + pass del self.task @asyncio.coroutine From 968760d48facc7a1eb970355410d288144ebfe6a Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 6 Aug 2015 22:27:46 +0800 Subject: [PATCH 042/191] gui: save geometry of main window --- artiq/frontend/artiq_gui.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py index 7607924c7..39a0f1142 100755 --- a/artiq/frontend/artiq_gui.py +++ b/artiq/frontend/artiq_gui.py @@ -7,7 +7,7 @@ import os # Quamash must be imported first so that pyqtgraph picks up the Qt binding # it has chosen. -from quamash import QEventLoop, QtGui +from quamash import QEventLoop, QtGui, QtCore from pyqtgraph import dockarea from artiq.tools import verbosity_args, init_logger @@ -44,17 +44,23 @@ def get_argparser(): return parser -class _MainWindow(QtGui.QMainWindow): +class MainWindow(QtGui.QMainWindow): def __init__(self, app): QtGui.QMainWindow.__init__(self) self.setWindowIcon(QtGui.QIcon(os.path.join(data_dir, "icon.png"))) - self.resize(1400, 800) + #self.resize(1400, 800) self.setWindowTitle("ARTIQ") self.exit_request = asyncio.Event() def closeEvent(self, *args): self.exit_request.set() + def save_state(self): + return bytes(self.saveGeometry()) + + def restore_state(self, state): + self.restoreGeometry(QtCore.QByteArray(state)) + def main(): args = get_argparser().parse_args() @@ -72,9 +78,10 @@ def main(): args.server, args.port_control, "master_schedule")) atexit.register(lambda: schedule_ctl.close_rpc()) - win = _MainWindow(app) + win = MainWindow(app) area = dockarea.DockArea() smgr.register(area) + smgr.register(win) win.setCentralWidget(area) status_bar = QtGui.QStatusBar() status_bar.showMessage("Connected to {}".format(args.server)) From 7ed8fe57fa197f5f5e3190d8f89c552c7908ce9e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 7 Aug 2015 15:51:56 +0800 Subject: [PATCH 043/191] Git support --- artiq/frontend/artiq_client.py | 25 +++++++--- artiq/frontend/artiq_master.py | 21 ++++++-- artiq/gui/explorer.py | 1 + artiq/gui/schedule.py | 10 +++- artiq/master/repository.py | 88 +++++++++++++++++++++++++++++----- artiq/master/scheduler.py | 34 +++++++++---- artiq/master/worker.py | 3 +- artiq/master/worker_impl.py | 15 ++++-- artiq/test/scheduler.py | 6 +-- artiq/test/worker.py | 2 +- setup.py | 5 +- 11 files changed, 167 insertions(+), 43 deletions(-) diff --git a/artiq/frontend/artiq_client.py b/artiq/frontend/artiq_client.py index 8277ba318..5650e5f09 100755 --- a/artiq/frontend/artiq_client.py +++ b/artiq/frontend/artiq_client.py @@ -42,6 +42,12 @@ def get_argparser(): parser_add.add_argument("-f", "--flush", default=False, action="store_true", help="flush the pipeline before preparing " "the experiment") + parser_add.add_argument("-R", "--repository", default=False, + action="store_true", + help="use the experiment repository") + parser_add.add_argument("-r", "--revision", default=None, + help="use a specific repository revision " + "(defaults to head, ignored without -R)") parser_add.add_argument("-c", "--class-name", default=None, help="name of the class to run") parser_add.add_argument("file", @@ -81,8 +87,8 @@ def get_argparser(): "what", help="select object to show: schedule/devices/parameters") - parser_scan_repository = subparsers.add_parser( - "scan-repository", help="rescan repository") + subparsers.add_parser("scan-repository", + help="trigger a repository rescan") return parser @@ -107,6 +113,8 @@ def _action_submit(remote, args): "class_name": args.class_name, "arguments": arguments, } + if args.repository: + expid["repo_rev"] = args.revision if args.timed is None: due_date = None else: @@ -148,7 +156,7 @@ def _show_schedule(schedule): x[1]["due_date"] or 0, x[0])) table = PrettyTable(["RID", "Pipeline", " Status ", "Prio", - "Due date", "File", "Class name"]) + "Due date", "Revision", "File", "Class name"]) for rid, v in l: row = [rid, v["pipeline"], v["status"], v["priority"]] if v["due_date"] is None: @@ -156,11 +164,16 @@ def _show_schedule(schedule): else: row.append(time.strftime("%m/%d %H:%M:%S", time.localtime(v["due_date"]))) - row.append(v["expid"]["file"]) - if v["expid"]["class_name"] is None: + expid = v["expid"] + if "repo_rev" in expid: + row.append(expid["repo_rev"]) + else: + row.append("Outside repo.") + row.append(expid["file"]) + if expid["class_name"] is None: row.append("") else: - row.append(v["expid"]["class_name"]) + row.append(expid["class_name"]) table.add_row(row) print(table) else: diff --git a/artiq/frontend/artiq_master.py b/artiq/frontend/artiq_master.py index 01c3fb081..939b67659 100755 --- a/artiq/frontend/artiq_master.py +++ b/artiq/frontend/artiq_master.py @@ -10,7 +10,7 @@ from artiq.protocols.sync_struct import Notifier, Publisher, process_mod from artiq.protocols.file_db import FlatFileDB from artiq.master.scheduler import Scheduler from artiq.master.worker_db import get_last_rid -from artiq.master.repository import Repository +from artiq.master.repository import FilesystemBackend, GitBackend, Repository from artiq.tools import verbosity_args, init_logger @@ -26,6 +26,13 @@ def get_argparser(): group.add_argument( "--port-control", default=3251, type=int, help="TCP port to listen to for control (default: %(default)d)") + group = parser.add_argument_group("repository") + group.add_argument( + "-g", "--git", default=False, action="store_true", + help="use the Git repository backend") + group.add_argument( + "-r", "--repository", default="repository", + help="path to the repository (default: '%(default)s')") verbosity_args(parser) return parser @@ -57,6 +64,13 @@ def main(): rtr = Notifier(dict()) log = Log(1000) + if args.git: + repo_backend = GitBackend(args.repository) + else: + repo_backend = FilesystemBackend(args.repository) + repository = Repository(repo_backend, log.log) + repository.scan_async() + worker_handlers = { "get_device": ddb.get, "get_parameter": pdb.get, @@ -64,14 +78,11 @@ def main(): "update_rt_results": lambda mod: process_mod(rtr, mod), "log": log.log } - scheduler = Scheduler(get_last_rid() + 1, worker_handlers) + scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend) worker_handlers["scheduler_submit"] = scheduler.submit scheduler.start() atexit.register(lambda: loop.run_until_complete(scheduler.stop())) - repository = Repository(log.log) - repository.scan_async() - server_control = Server({ "master_ddb": ddb, "master_pdb": pdb, diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index 4a4bcfe99..4412320a6 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -256,6 +256,7 @@ class ExplorerDock(dockarea.Dock): def submit(self, pipeline_name, file, class_name, arguments, priority, due_date, flush): expid = { + "repo_rev": None, "file": file, "class_name": class_name, "arguments": arguments, diff --git a/artiq/gui/schedule.py b/artiq/gui/schedule.py index 65bcdc0cb..69324e07a 100644 --- a/artiq/gui/schedule.py +++ b/artiq/gui/schedule.py @@ -12,7 +12,7 @@ class _ScheduleModel(DictSyncModel): def __init__(self, parent, init): DictSyncModel.__init__(self, ["RID", "Pipeline", "Status", "Prio", "Due date", - "File", "Class name"], + "Revision", "File", "Class name"], parent, init) def sort_key(self, k, v): @@ -35,8 +35,14 @@ class _ScheduleModel(DictSyncModel): return time.strftime("%m/%d %H:%M:%S", time.localtime(v["due_date"])) elif column == 5: - return v["expid"]["file"] + expid = v["expid"] + if "repo_rev" in expid: + return expid["repo_rev"] + else: + return "Outside repo." elif column == 6: + return v["expid"]["file"] + elif column == 7: if v["expid"]["class_name"] is None: return "" else: diff --git a/artiq/master/repository.py b/artiq/master/repository.py index 465ce6f85..12e3f8a68 100644 --- a/artiq/master/repository.py +++ b/artiq/master/repository.py @@ -1,6 +1,8 @@ -import os -import logging import asyncio +import os +import tempfile +import shutil +import logging from artiq.protocols.sync_struct import Notifier from artiq.master.worker import Worker @@ -10,15 +12,14 @@ logger = logging.getLogger(__name__) @asyncio.coroutine -def _scan_experiments(log): +def _scan_experiments(wd, log): r = dict() - for f in os.listdir("repository"): + for f in os.listdir(wd): if f.endswith(".py"): try: - full_name = os.path.join("repository", f) worker = Worker({"log": lambda message: log("scan", message)}) try: - description = yield from worker.examine(full_name) + description = yield from worker.examine(os.path.join(wd, f)) finally: yield from worker.close() for class_name, class_desc in description.items(): @@ -32,7 +33,7 @@ def _scan_experiments(log): name = basename + str(i) i += 1 entry = { - "file": full_name, + "file": f, "class_name": class_name, "arguments": arguments } @@ -52,19 +53,84 @@ def _sync_explist(target, source): class Repository: - def __init__(self, log_fn): - self.explist = Notifier(dict()) - self._scanning = False + def __init__(self, backend, log_fn): + self.backend = backend self.log_fn = log_fn + self.head_rev = self.backend.get_head_rev() + self.backend.request_rev(self.head_rev) + self.explist = Notifier(dict()) + + self._scanning = False + @asyncio.coroutine def scan(self): if self._scanning: return self._scanning = True - new_explist = yield from _scan_experiments(self.log_fn) + + new_head_rev = self.backend.get_head_rev() + wd = self.backend.request_rev(new_head_rev) + self.backend.release_rev(self.head_rev) + self.head_rev = new_head_rev + new_explist = yield from _scan_experiments(wd, self.log_fn) + _sync_explist(self.explist, new_explist) self._scanning = False def scan_async(self): asyncio.async(self.scan()) + + +class FilesystemBackend: + def __init__(self, root): + self.root = os.path.abspath(root) + + def get_head_rev(self): + return "N/A" + + def request_rev(self, rev): + return self.root + + def release_rev(self, rev): + pass + + +class _GitCheckout: + def __init__(self, git, rev): + self.path = tempfile.mkdtemp() + git.checkout_tree(git.get(rev), directory=self.path) + self.ref_count = 1 + logger.info("checked out revision %s into %s", rev, self.path) + + def dispose(self): + logger.info("disposing of checkout in folder %s", self.path) + shutil.rmtree(self.path) + + +class GitBackend: + def __init__(self, root): + # lazy import - make dependency optional + import pygit2 + + self.git = pygit2.Repository(root) + self.checkouts = dict() + + def get_head_rev(self): + return str(self.git.head.target) + + def request_rev(self, rev): + if rev in self.checkouts: + co = self.checkouts[rev] + co.ref_count += 1 + else: + co = _GitCheckout(self.git, rev) + self.checkouts[rev] = co + return co.path + + def release_rev(self, rev): + co = self.checkouts[rev] + co.ref_count -= 1 + if not co.ref_count: + co.dispose() + del self.checkouts[rev] diff --git a/artiq/master/scheduler.py b/artiq/master/scheduler.py index 93afb0508..56751a926 100644 --- a/artiq/master/scheduler.py +++ b/artiq/master/scheduler.py @@ -47,11 +47,12 @@ def _mk_worker_method(name): class Run: def __init__(self, rid, pipeline_name, - expid, priority, due_date, flush, + wd, expid, priority, due_date, flush, worker_handlers, notifier): # called through pool self.rid = rid self.pipeline_name = pipeline_name + self.wd = wd self.expid = expid self.priority = priority self.due_date = due_date @@ -103,7 +104,8 @@ class Run: @asyncio.coroutine def build(self): - yield from self._build(self.rid, self.pipeline_name, self.expid, + yield from self._build(self.rid, self.pipeline_name, + self.wd, self.expid, self.priority) prepare = _mk_worker_method("prepare") @@ -124,18 +126,26 @@ class RIDCounter: class RunPool: - def __init__(self, ridc, worker_handlers, notifier): + def __init__(self, ridc, worker_handlers, notifier, repo_backend): self.runs = dict() self.submitted_cb = None self._ridc = ridc self._worker_handlers = worker_handlers self._notifier = notifier + self._repo_backend = repo_backend def submit(self, expid, priority, due_date, flush, pipeline_name): + # mutates expid to insert head repository revision if None # called through scheduler rid = self._ridc.get() - run = Run(rid, pipeline_name, expid, priority, due_date, flush, + if "repo_rev" in expid: + if expid["repo_rev"] is None: + expid["repo_rev"] = self._repo_backend.get_head_rev() + wd = self._repo_backend.request_rev(expid["repo_rev"]) + else: + wd = None + run = Run(rid, pipeline_name, wd, expid, priority, due_date, flush, self._worker_handlers, self._notifier) self.runs[rid] = run if self.submitted_cb is not None: @@ -147,7 +157,10 @@ class RunPool: # called through deleter if rid not in self.runs: return - yield from self.runs[rid].close() + run = self.runs[rid] + yield from run.close() + if "repo_rev" in run.expid: + self._repo_backend.release_rev(run.expid["repo_rev"]) del self.runs[rid] @@ -280,12 +293,12 @@ class AnalyzeStage(TaskObject): class Pipeline: - def __init__(self, ridc, deleter, worker_handlers, notifier): + def __init__(self, ridc, deleter, worker_handlers, notifier, repo_backend): flush_tracker = WaitSet() def delete_cb(rid): deleter.delete(rid) flush_tracker.discard(rid) - self.pool = RunPool(ridc, worker_handlers, notifier) + self.pool = RunPool(ridc, worker_handlers, notifier, repo_backend) self._prepare = PrepareStage(flush_tracker, delete_cb, self.pool, asyncio.Queue(maxsize=1)) self._run = RunStage(delete_cb, @@ -348,11 +361,12 @@ class Deleter(TaskObject): class Scheduler: - def __init__(self, next_rid, worker_handlers): + def __init__(self, next_rid, worker_handlers, repo_backend): self.notifier = Notifier(dict()) self._pipelines = dict() self._worker_handlers = worker_handlers + self._repo_backend = repo_backend self._terminated = False self._ridc = RIDCounter(next_rid) @@ -374,6 +388,7 @@ class Scheduler: logger.warning("some pipelines were not garbage-collected") def submit(self, pipeline_name, expid, priority, due_date, flush): + # mutates expid to insert head repository revision if None if self._terminated: return try: @@ -381,7 +396,8 @@ class Scheduler: except KeyError: logger.debug("creating pipeline '%s'", pipeline_name) pipeline = Pipeline(self._ridc, self._deleter, - self._worker_handlers, self.notifier) + self._worker_handlers, self.notifier, + self._repo_backend) self._pipelines[pipeline_name] = pipeline pipeline.start() return pipeline.pool.submit(expid, priority, due_date, flush, pipeline_name) diff --git a/artiq/master/worker.py b/artiq/master/worker.py index 919906ca2..100b4e4ee 100644 --- a/artiq/master/worker.py +++ b/artiq/master/worker.py @@ -209,13 +209,14 @@ class Worker: return completed @asyncio.coroutine - def build(self, rid, pipeline_name, expid, priority, timeout=15.0): + def build(self, rid, pipeline_name, wd, expid, priority, timeout=15.0): self.rid = rid yield from self._create_process() yield from self._worker_action( {"action": "build", "rid": rid, "pipeline_name": pipeline_name, + "wd": wd, "expid": expid, "priority": priority}, timeout) diff --git a/artiq/master/worker_impl.py b/artiq/master/worker_impl.py index c0e10fe45..6fea52513 100644 --- a/artiq/master/worker_impl.py +++ b/artiq/master/worker_impl.py @@ -1,5 +1,6 @@ import sys import time +import os from artiq.protocols import pyon from artiq.tools import file_import @@ -44,8 +45,6 @@ def make_parent_action(action, argnames, exception=ParentActionError): return parent_action - - class LogForwarder: def __init__(self): self.buffer = "" @@ -175,7 +174,12 @@ def main(): start_time = time.localtime() rid = obj["rid"] expid = obj["expid"] - exp = get_exp(expid["file"], expid["class_name"]) + if obj["wd"] is not None: + # Using repository + expf = os.path.join(obj["wd"], expid["file"]) + else: + expf = expid["file"] + exp = get_exp(expf, expid["class_name"]) dmgr.virtual_devices["scheduler"].set_run_info( obj["pipeline_name"], expid, obj["priority"]) exp_inst = exp(dmgr, ParentPDB, rdb, @@ -194,6 +198,11 @@ def main(): f = get_hdf5_output(start_time, rid, exp.__name__) try: rdb.write_hdf5(f) + if "repo_rev" in expid: + rr = expid["repo_rev"] + dtype = "S{}".format(len(rr)) + dataset = f.create_dataset("repo_rev", (), dtype) + dataset[()] = rr.encode() finally: f.close() put_object({"action": "completed"}) diff --git a/artiq/test/scheduler.py b/artiq/test/scheduler.py index 9c1b2717e..60bbd83c3 100644 --- a/artiq/test/scheduler.py +++ b/artiq/test/scheduler.py @@ -67,7 +67,7 @@ class SchedulerCase(unittest.TestCase): def test_steps(self): loop = self.loop - scheduler = Scheduler(0, _handlers) + scheduler = Scheduler(0, _handlers, None) expid = _get_expid("EmptyExperiment") expect = _get_basic_steps(1, expid) @@ -102,7 +102,7 @@ class SchedulerCase(unittest.TestCase): def test_pause(self): loop = self.loop - scheduler = Scheduler(0, _handlers) + scheduler = Scheduler(0, _handlers, None) expid_bg = _get_expid("BackgroundExperiment") expid = _get_expid("EmptyExperiment") @@ -133,7 +133,7 @@ class SchedulerCase(unittest.TestCase): def test_flush(self): loop = self.loop - scheduler = Scheduler(0, _handlers) + scheduler = Scheduler(0, _handlers, None) expid = _get_expid("EmptyExperiment") expect = _get_basic_steps(1, expid, 1, True) diff --git a/artiq/test/worker.py b/artiq/test/worker.py index b40e7b6c8..abb321e27 100644 --- a/artiq/test/worker.py +++ b/artiq/test/worker.py @@ -38,7 +38,7 @@ class WatchdogTimeoutInBuild(EnvExperiment): @asyncio.coroutine def _call_worker(worker, expid): try: - yield from worker.build(0, "main", expid, 0) + yield from worker.build(0, "main", None, expid, 0) yield from worker.prepare() yield from worker.run() yield from worker.analyze() diff --git a/setup.py b/setup.py index 1936af587..763320836 100755 --- a/setup.py +++ b/setup.py @@ -4,6 +4,7 @@ from setuptools import setup, find_packages, Command import sys import os + if sys.version_info[:3] < (3, 4, 3): raise Exception("You need at least Python 3.4.3 to run ARTIQ") @@ -20,7 +21,7 @@ class PushDocCommand(Command): requirements = [ "sphinx", "sphinx-argparse", "pyserial", "numpy", "scipy", "python-dateutil", "prettytable", "h5py", "pydaqmx", "pyelftools", - "quamash", "pyqtgraph", "llvmlite_artiq" + "quamash", "pyqtgraph", "llvmlite_artiq", "pygit2" ] scripts = [ @@ -63,5 +64,5 @@ setup( entry_points={ "console_scripts": scripts, }, - cmdclass={"push_doc":PushDocCommand} + cmdclass={"push_doc": PushDocCommand} ) From c6e5eaa73a81a1bec8910a2ad4adf1253759a970 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 7 Aug 2015 20:48:48 +0800 Subject: [PATCH 044/191] doc/manual: fix confusing board config instructions --- doc/manual/installing.rst | 53 +++++++++++++++++++++------------------ 1 file changed, 28 insertions(+), 25 deletions(-) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index 985acd80f..55400c3d3 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -70,8 +70,6 @@ Next step (for KC705) is to flash MAC and IP addresses to the board: Installing from source ---------------------- -You can skip the first two steps if you already installed from conda. - Preparing the build environment for the core device ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -229,6 +227,34 @@ These steps are required to generate bitstream (``.bit``) files, build the MiSoC The communication parameters are 115200 8-N-1. +Installing the host-side software +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* Install the llvmlite Python bindings: :: + + $ cd ~/artiq-dev + $ git clone https://github.com/m-labs/llvmlite + $ git checkout artiq + $ cd llvmlite + $ LLVM_CONFIG=/usr/local/llvm-or1k/bin/llvm-config python3 setup.py install --user + +* Install ARTIQ: :: + + $ cd ~/artiq-dev + $ git clone --recursive https://github.com/m-labs/artiq # if not already done + $ cd artiq + $ python3 setup.py develop --user + +* Build the documentation: :: + + $ cd ~/artiq-dev/artiq/doc/manual + $ make html + +Configuring the core device +--------------------------- + +This should be done after either installation methods (conda or source). + .. _flash-mac-ip-addr: * Set the MAC and IP address in the :ref:`core device configuration flash storage `: @@ -295,29 +321,6 @@ To flash the ``idle`` kernel: .. note:: You can find more information about how to use the ``artiq_coretool`` utility on the :ref:`Utilities ` page. -Installing the host-side software -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -* Install the llvmlite Python bindings: :: - - $ cd ~/artiq-dev - $ git clone https://github.com/m-labs/llvmlite - $ git checkout artiq - $ cd llvmlite - $ LLVM_CONFIG=/usr/local/llvm-or1k/bin/llvm-config python3 setup.py install --user - -* Install ARTIQ: :: - - $ cd ~/artiq-dev - $ git clone --recursive https://github.com/m-labs/artiq # if not already done - $ cd artiq - $ python3 setup.py develop --user - -* Build the documentation: :: - - $ cd ~/artiq-dev/artiq/doc/manual - $ make html - Ubuntu 14.04 specific instructions ---------------------------------- From 54a568c5108eec813eb4ff50b32ccf917b29d440 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 7 Aug 2015 20:49:13 +0800 Subject: [PATCH 045/191] conda/llvmdev-or1k: cleanup, bump version number --- conda/llvmdev-or1k/meta.yaml | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/conda/llvmdev-or1k/meta.yaml b/conda/llvmdev-or1k/meta.yaml index 9406b8943..bb3ad55bf 100644 --- a/conda/llvmdev-or1k/meta.yaml +++ b/conda/llvmdev-or1k/meta.yaml @@ -1,6 +1,6 @@ package: name: llvmdev-or1k - version: "3.4" + version: "3.5.0" source: git_url: https://github.com/openrisc/llvm-or1k @@ -11,16 +11,12 @@ build: requirements: build: - - system [linux and not armv6] + - system [linux] - cmake [linux] run: - - system [linux and not armv6] - -#test: -#commands: -#- clang --help [linux and not armv6] + - system [linux] about: home: http://llvm.org/ - license: Open Source (http://llvm.org/releases/3.4/LICENSE.TXT) + license: Open Source (http://llvm.org/releases/3.5.0/LICENSE.TXT) summary: Development headers and libraries for LLVM From be55487d2da3c0cd6c08a6cbae712965e061c7e8 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 7 Aug 2015 21:12:00 +0800 Subject: [PATCH 046/191] comm_generic: cleanup --- artiq/coredevice/comm_generic.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/artiq/coredevice/comm_generic.py b/artiq/coredevice/comm_generic.py index 88fee184b..4ac2598df 100644 --- a/artiq/coredevice/comm_generic.py +++ b/artiq/coredevice/comm_generic.py @@ -108,10 +108,7 @@ class CommGeneric: if ty != _D2HMsgType.IDENT_REPLY: raise IOError("Incorrect reply from device: {}".format(ty)) (reply, ) = struct.unpack("B", self.read(1)) - runtime_id = chr(reply) - for i in range(3): - (reply, ) = struct.unpack("B", self.read(1)) - runtime_id += chr(reply) + runtime_id = self.read(4).decode() if runtime_id != "AROR": raise UnsupportedDevice("Unsupported runtime ID: {}" .format(runtime_id)) @@ -238,9 +235,11 @@ class CommGeneric: length, ty = self._read_header() if ty != _D2HMsgType.LOG_REPLY: raise IOError("Incorrect request from device: "+str(ty)) - r = "" - for i in range(length - 9): - c = struct.unpack("B", self.read(1))[0] - if c: - r += chr(c) - return r + log = self.read(length - 9).decode() + try: + idx = log.index("\x00") + except ValueError: + pass + else: + log = log[:idx] + return log From 54d85efc2a1fc6f430458e66f3367c3a80e0ffd2 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 11:08:04 +0800 Subject: [PATCH 047/191] master,gui: show Git commit messages in schedule --- artiq/gui/schedule.py | 9 +++++++-- artiq/gui/tools.py | 37 +++++++++++++++++++++++++++---------- artiq/master/repository.py | 10 ++++++---- artiq/master/scheduler.py | 14 ++++++++------ 4 files changed, 48 insertions(+), 22 deletions(-) diff --git a/artiq/gui/schedule.py b/artiq/gui/schedule.py index 69324e07a..ab11714c1 100644 --- a/artiq/gui/schedule.py +++ b/artiq/gui/schedule.py @@ -5,7 +5,7 @@ from quamash import QtGui, QtCore from pyqtgraph import dockarea from artiq.protocols.sync_struct import Subscriber -from artiq.gui.tools import DictSyncModel +from artiq.gui.tools import elide, DictSyncModel class _ScheduleModel(DictSyncModel): @@ -37,7 +37,10 @@ class _ScheduleModel(DictSyncModel): elif column == 5: expid = v["expid"] if "repo_rev" in expid: - return expid["repo_rev"] + r = expid["repo_rev"] + if v["repo_msg"]: + r += "\n" + elide(v["repo_msg"], 40) + return r else: return "Outside repo." elif column == 6: @@ -63,6 +66,8 @@ class ScheduleDock(dockarea.Dock): self.table.setSelectionMode(QtGui.QAbstractItemView.SingleSelection) self.table.horizontalHeader().setResizeMode( QtGui.QHeaderView.ResizeToContents) + self.table.verticalHeader().setResizeMode( + QtGui.QHeaderView.ResizeToContents) self.addWidget(self.table) self.table.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu) diff --git a/artiq/gui/tools.py b/artiq/gui/tools.py index ac003cf9c..7d673cc55 100644 --- a/artiq/gui/tools.py +++ b/artiq/gui/tools.py @@ -1,12 +1,24 @@ from quamash import QtCore -def force_spinbox_value(spinbox, value): - if spinbox.minimum() > value: - spinbox.setMinimum(value) - if spinbox.maximum() < value: - spinbox.setMaximum(value) - spinbox.setValue(value) +def elide(s, maxlen): + elided = False + if len(s) > maxlen: + s = s[:maxlen] + elided = True + try: + idx = s.index("\n") + except ValueError: + pass + else: + s = s[:idx] + elided = True + if elided: + maxlen -= 3 + if len(s) > maxlen: + s = s[:maxlen] + s += "..." + return s def short_format(v): @@ -16,10 +28,7 @@ def short_format(v): if t is int or t is float: return str(v) elif t is str: - if len(v) < 15: - return "\"" + v + "\"" - else: - return "\"" + v[:12] + "\"..." + return "\"" + elide(v, 15) + "\"" else: r = t.__name__ if t is list or t is dict or t is set: @@ -27,6 +36,14 @@ def short_format(v): return r +def force_spinbox_value(spinbox, value): + if spinbox.minimum() > value: + spinbox.setMinimum(value) + if spinbox.maximum() < value: + spinbox.setMaximum(value) + spinbox.setValue(value) + + class _SyncSubstruct: def __init__(self, update_cb, ref): self.update_cb = update_cb diff --git a/artiq/master/repository.py b/artiq/master/repository.py index 12e3f8a68..71eff04e1 100644 --- a/artiq/master/repository.py +++ b/artiq/master/repository.py @@ -70,7 +70,7 @@ class Repository: self._scanning = True new_head_rev = self.backend.get_head_rev() - wd = self.backend.request_rev(new_head_rev) + wd, _ = self.backend.request_rev(new_head_rev) self.backend.release_rev(self.head_rev) self.head_rev = new_head_rev new_explist = yield from _scan_experiments(wd, self.log_fn) @@ -90,7 +90,7 @@ class FilesystemBackend: return "N/A" def request_rev(self, rev): - return self.root + return self.root, None def release_rev(self, rev): pass @@ -99,7 +99,9 @@ class FilesystemBackend: class _GitCheckout: def __init__(self, git, rev): self.path = tempfile.mkdtemp() - git.checkout_tree(git.get(rev), directory=self.path) + commit = git.get(rev) + git.checkout_tree(commit, directory=self.path) + self.message = commit.message.strip() self.ref_count = 1 logger.info("checked out revision %s into %s", rev, self.path) @@ -126,7 +128,7 @@ class GitBackend: else: co = _GitCheckout(self.git, rev) self.checkouts[rev] = co - return co.path + return co.path, co.message def release_rev(self, rev): co = self.checkouts[rev] diff --git a/artiq/master/scheduler.py b/artiq/master/scheduler.py index 56751a926..fefa8539f 100644 --- a/artiq/master/scheduler.py +++ b/artiq/master/scheduler.py @@ -48,7 +48,7 @@ def _mk_worker_method(name): class Run: def __init__(self, rid, pipeline_name, wd, expid, priority, due_date, flush, - worker_handlers, notifier): + worker_handlers, notifier, **kwargs): # called through pool self.rid = rid self.pipeline_name = pipeline_name @@ -62,8 +62,7 @@ class Run: self._status = RunStatus.pending - self._notifier = notifier - self._notifier[self.rid] = { + notification = { "pipeline": self.pipeline_name, "expid": self.expid, "priority": self.priority, @@ -71,6 +70,9 @@ class Run: "flush": self.flush, "status": self._status.name } + notification.update(kwargs) + self._notifier = notifier + self._notifier[self.rid] = notification @property def status(self): @@ -142,11 +144,11 @@ class RunPool: if "repo_rev" in expid: if expid["repo_rev"] is None: expid["repo_rev"] = self._repo_backend.get_head_rev() - wd = self._repo_backend.request_rev(expid["repo_rev"]) + wd, repo_msg = self._repo_backend.request_rev(expid["repo_rev"]) else: - wd = None + wd, repo_msg = None, None run = Run(rid, pipeline_name, wd, expid, priority, due_date, flush, - self._worker_handlers, self._notifier) + self._worker_handlers, self._notifier, repo_msg=repo_msg) self.runs[rid] = run if self.submitted_cb is not None: self.submitted_cb() From 585bb236f810cc285074391a27eac3f1fd48933c Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 11:13:36 +0800 Subject: [PATCH 048/191] master: add ddb/pdb args --- artiq/frontend/artiq_master.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/artiq/frontend/artiq_master.py b/artiq/frontend/artiq_master.py index 939b67659..b63c52a2e 100755 --- a/artiq/frontend/artiq_master.py +++ b/artiq/frontend/artiq_master.py @@ -26,6 +26,11 @@ def get_argparser(): group.add_argument( "--port-control", default=3251, type=int, help="TCP port to listen to for control (default: %(default)d)") + group = parser.add_argument_group("databases") + group.add_argument("-d", "--ddb", default="ddb.pyon", + help="device database file") + group.add_argument("-p", "--pdb", default="pdb.pyon", + help="parameter database file") group = parser.add_argument_group("repository") group.add_argument( "-g", "--git", default=False, action="store_true", @@ -59,8 +64,8 @@ def main(): loop = asyncio.get_event_loop() atexit.register(lambda: loop.close()) - ddb = FlatFileDB("ddb.pyon") - pdb = FlatFileDB("pdb.pyon") + ddb = FlatFileDB(args.ddb) + pdb = FlatFileDB(args.pdb) rtr = Notifier(dict()) log = Log(1000) From 42010fcd4bbf38be5b87d36f11ba612e1eaede10 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 11:38:07 +0800 Subject: [PATCH 049/191] gui: regenerate argument setter when modified from repository --- artiq/gui/explorer.py | 49 ++++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index 4412320a6..e61a451f0 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -12,7 +12,8 @@ from artiq.gui.scan import ScanController class _ExplistModel(DictSyncModel): - def __init__(self, parent, init): + def __init__(self, explorer, parent, init): + self.explorer = explorer DictSyncModel.__init__(self, ["Experiment"], parent, init) @@ -23,6 +24,11 @@ class _ExplistModel(DictSyncModel): def convert(self, k, v, column): return k + def __setitem__(self, k, v): + DictSyncModel.__setitem__(self, k, v) + if k == self.explorer.selected_key: + self.explorer.update_selection(k, k) + class _FreeValueEntry(QtGui.QLineEdit): def __init__(self, procdesc): @@ -166,7 +172,8 @@ class ExplorerDock(dockarea.Dock): self.splitter.addWidget(grid) self.el = QtGui.QListView() - self.el.selectionChanged = self.update_argsetter + self.el.selectionChanged = self._selection_changed + self.selected_key = None grid.addWidget(self.el, 0, 0, colspan=4) self.datetime = QtGui.QDateTimeEdit() @@ -200,28 +207,35 @@ class ExplorerDock(dockarea.Dock): self.splitter.setSizes([grid.minimumSizeHint().width(), 1000]) self.state = dict() - def update_argsetter(self, selected, deselected): - deselected = deselected.indexes() + def update_selection(self, selected, deselected): if deselected: - row = deselected[0].row() - key = self.explist_model.row_to_key[row] - self.state[key] = self.argsetter.get_argument_values(False) + self.state[deselected] = self.argsetter.get_argument_values(False) - selected = selected.indexes() if selected: - row = selected[0].row() - key = self.explist_model.row_to_key[row] - expinfo = self.explist_model.backing_store[key] + expinfo = self.explist_model.backing_store[selected] arguments = expinfo["arguments"] sizes = self.splitter.sizes() self.argsetter.deleteLater() self.argsetter = _ArgumentSetter(self.dialog_parent, arguments) - if key in self.state: - arguments = self.state[key] + if selected in self.state: + arguments = self.state[selected] if arguments is not None: self.argsetter.set_argument_values(arguments, True) self.splitter.insertWidget(1, self.argsetter) self.splitter.setSizes(sizes) + self.selected_key = selected + + def _sel_to_key(self, selection): + selection = selection.indexes() + if selection: + row = selection[0].row() + return self.explist_model.row_to_key[row] + else: + return None + + def _selection_changed(self, selected, deselected): + self.update_selection(self._sel_to_key(selected), + self._sel_to_key(deselected)) def save_state(self): idx = self.el.selectedIndexes() @@ -248,7 +262,7 @@ class ExplorerDock(dockarea.Dock): yield from self.explist_subscriber.close() def init_explist_model(self, init): - self.explist_model = _ExplistModel(self.el, init) + self.explist_model = _ExplistModel(self, self.el, init) self.el.setModel(self.explist_model) return self.explist_model @@ -266,11 +280,8 @@ class ExplorerDock(dockarea.Dock): self.status_bar.showMessage("Submitted RID {}".format(rid)) def submit_clicked(self): - idx = self.el.selectedIndexes() - if idx: - row = idx[0].row() - key = self.explist_model.row_to_key[row] - expinfo = self.explist_model.backing_store[key] + if self.selected_key is not None: + expinfo = self.explist_model.backing_store[self.selected_key] if self.datetime_en.isChecked(): due_date = self.datetime.dateTime().toMSecsSinceEpoch()/1000 else: From c80f0fa07a4032a4798684de8762f208ed6642ee Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 11:44:19 +0800 Subject: [PATCH 050/191] master: delete head repository checkout on exit --- artiq/frontend/artiq_master.py | 1 + artiq/master/repository.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/artiq/frontend/artiq_master.py b/artiq/frontend/artiq_master.py index b63c52a2e..2d6fffd18 100755 --- a/artiq/frontend/artiq_master.py +++ b/artiq/frontend/artiq_master.py @@ -74,6 +74,7 @@ def main(): else: repo_backend = FilesystemBackend(args.repository) repository = Repository(repo_backend, log.log) + atexit.register(repository.close) repository.scan_async() worker_handlers = { diff --git a/artiq/master/repository.py b/artiq/master/repository.py index 71eff04e1..73ff9cde0 100644 --- a/artiq/master/repository.py +++ b/artiq/master/repository.py @@ -63,6 +63,10 @@ class Repository: self._scanning = False + def close(self): + # The object cannot be used anymore after calling this method. + self.backend.release_rev(self.head_rev) + @asyncio.coroutine def scan(self): if self._scanning: From 1818e8173b9aa5b52b11f80cb9d4787152836a34 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 16:25:55 +0800 Subject: [PATCH 051/191] client: log display support --- artiq/frontend/artiq_client.py | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/artiq/frontend/artiq_client.py b/artiq/frontend/artiq_client.py index 5650e5f09..49fa91651 100755 --- a/artiq/frontend/artiq_client.py +++ b/artiq/frontend/artiq_client.py @@ -82,10 +82,10 @@ def get_argparser(): parser_del_parameter.add_argument("name", help="name of the parameter") parser_show = subparsers.add_parser( - "show", help="show schedule, devices or parameters") + "show", help="show schedule, log, devices or parameters") parser_show.add_argument( "what", - help="select object to show: schedule/devices/parameters") + help="select object to show: schedule/log/devices/parameters") subparsers.add_parser("scan-repository", help="trigger a repository rescan") @@ -224,12 +224,42 @@ def _show_dict(args, notifier_name, display_fun): _run_subscriber(args.server, args.port, subscriber) +class _LogPrinter: + def __init__(self, init): + for rid, msg in init: + print(rid, msg) + + def append(self, x): + rid, msg = x + print(rid, msg) + + def insert(self, i, x): + rid, msg = x + print(rid, msg) + + def pop(self, i=-1): + pass + + def __delitem__(self, x): + pass + + def __setitem__(self, k, v): + pass + + +def _show_log(args): + subscriber = Subscriber("log", _LogPrinter) + _run_subscriber(args.server, args.port, subscriber) + + def main(): args = get_argparser().parse_args() action = args.action.replace("-", "_") if action == "show": if args.what == "schedule": _show_dict(args, "schedule", _show_schedule) + elif args.what == "log": + _show_log(args) elif args.what == "devices": _show_dict(args, "devices", _show_devices) elif args.what == "parameters": From c0030406c36921b1d5d6cc03bfbdfbe6c3ce6c37 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 21:24:13 +0800 Subject: [PATCH 052/191] doc: core device page --- .../{fpga_board_ports.rst => core_device.rst} | 21 ++++++++++++++++++- doc/manual/core_device_flash_storage.rst | 14 ------------- doc/manual/index.rst | 3 +-- 3 files changed, 21 insertions(+), 17 deletions(-) rename doc/manual/{fpga_board_ports.rst => core_device.rst} (75%) delete mode 100644 doc/manual/core_device_flash_storage.rst diff --git a/doc/manual/fpga_board_ports.rst b/doc/manual/core_device.rst similarity index 75% rename from doc/manual/fpga_board_ports.rst rename to doc/manual/core_device.rst index 52bd8dffe..e4334db48 100644 --- a/doc/manual/fpga_board_ports.rst +++ b/doc/manual/core_device.rst @@ -1,5 +1,24 @@ +Core device +=========== + +.. _core-device-flash-storage: + +Flash storage +************* + +The core device contains some flash space that can be used to store +some configuration data. + +This storage area is used to store the core device MAC address, IP address and even the idle kernel. + +The flash storage area is one sector (typically 64 kB) large and is organized as a list +of key-value records. + +This flash storage space can be accessed by using the artiq_coretool :ref:`core-device-access-tool`. + + FPGA board ports -================ +**************** KC705 ----- diff --git a/doc/manual/core_device_flash_storage.rst b/doc/manual/core_device_flash_storage.rst deleted file mode 100644 index cc5fe1b2d..000000000 --- a/doc/manual/core_device_flash_storage.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. _core-device-flash-storage: - -Core device flash storage -========================= - -The core device contains some flash space that can be used to store -some configuration data. - -This storage area is used to store the core device MAC address, IP address and even the idle kernel. - -The flash storage area is one sector (64 kB) large and is organized as a list -of key-value records. - -This flash storage space can be accessed by using the artiq_coretool.py :ref:`core-device-access-tool`. diff --git a/doc/manual/index.rst b/doc/manual/index.rst index 502ea2443..bcf584cf9 100644 --- a/doc/manual/index.rst +++ b/doc/manual/index.rst @@ -11,12 +11,11 @@ Contents: getting_started developing_a_ndsp management_system + core_device core_language_reference core_drivers_reference protocols_reference ndsp_reference - core_device_flash_storage utilities - fpga_board_ports default_network_ports faq From 0a6fcd9fa08c931a0f09cb1704b44a00d8e15c9c Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 21:24:23 +0800 Subject: [PATCH 053/191] Revert "comm_generic: cleanup" This reverts commit be55487d2da3c0cd6c08a6cbae712965e061c7e8. --- artiq/coredevice/comm_generic.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/artiq/coredevice/comm_generic.py b/artiq/coredevice/comm_generic.py index 4ac2598df..88fee184b 100644 --- a/artiq/coredevice/comm_generic.py +++ b/artiq/coredevice/comm_generic.py @@ -108,7 +108,10 @@ class CommGeneric: if ty != _D2HMsgType.IDENT_REPLY: raise IOError("Incorrect reply from device: {}".format(ty)) (reply, ) = struct.unpack("B", self.read(1)) - runtime_id = self.read(4).decode() + runtime_id = chr(reply) + for i in range(3): + (reply, ) = struct.unpack("B", self.read(1)) + runtime_id += chr(reply) if runtime_id != "AROR": raise UnsupportedDevice("Unsupported runtime ID: {}" .format(runtime_id)) @@ -235,11 +238,9 @@ class CommGeneric: length, ty = self._read_header() if ty != _D2HMsgType.LOG_REPLY: raise IOError("Incorrect request from device: "+str(ty)) - log = self.read(length - 9).decode() - try: - idx = log.index("\x00") - except ValueError: - pass - else: - log = log[:idx] - return log + r = "" + for i in range(length - 9): + c = struct.unpack("B", self.read(1))[0] + if c: + r += chr(c) + return r From e8aa825a9da8bcac35129d101b037b19effa7ec5 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 22:27:57 +0800 Subject: [PATCH 054/191] doc: git integration --- doc/manual/index.rst | 2 +- doc/manual/management_system.rst | 71 +++++++++++++++++++++++++------- 2 files changed, 58 insertions(+), 15 deletions(-) diff --git a/doc/manual/index.rst b/doc/manual/index.rst index bcf584cf9..1f6915468 100644 --- a/doc/manual/index.rst +++ b/doc/manual/index.rst @@ -9,13 +9,13 @@ Contents: introduction installing getting_started - developing_a_ndsp management_system core_device core_language_reference core_drivers_reference protocols_reference ndsp_reference + developing_a_ndsp utilities default_network_ports faq diff --git a/doc/manual/management_system.rst b/doc/manual/management_system.rst index ca6c35a98..b5686f04b 100644 --- a/doc/manual/management_system.rst +++ b/doc/manual/management_system.rst @@ -1,6 +1,11 @@ Management system ================= +The management system described below is optional: experiments can be run one by one using ``artiq_run``, and the controllers can run stand-alone (without a controller manager). For their very first steps with ARTIQ or in simple or particular cases, users do not need to deploy the management system. + +Components +********** + Master ------ @@ -8,10 +13,6 @@ The master is responsible for managing the parameter and device databases, the e The master is a headless component, and one or several clients (command-line or GUI) use the network to interact with it. -.. argparse:: - :ref: artiq.frontend.artiq_master.get_argparser - :prog: artiq_master - Controller manager ------------------ @@ -21,24 +22,66 @@ A controller manager connects to the master and uses the device database to dete Controller managers use the local network address of the connection to the master to filter the device database and run only those controllers that are allocated to the current node. Hostname resolution is supported. -.. argparse:: - :ref: artiq.frontend.artiq_ctlmgr.get_argparser - :prog: artiq_ctlmgr - Command-line client ------------------- -The command-line client connects to the master and permits modification and monitoring of the databases, monitoring the experiment schedule, and submitting experiments. +The command-line client connects to the master and permits modification and monitoring of the databases, monitoring the experiment schedule and log, and submitting experiments. + +GUI client +---------- + +The GUI client connects to the master and is the main way of interacting with it. The main features of the GUI are scheduling of experiments, setting of their arguments, examining the schedule, displaying real-time results, and debugging TTL and DDS channels in real time. + +Experiment scheduling +********************* + +Git integration +*************** + +The master may use a Git repository for the storage of experiment source code. Using Git has many advantages. For example, each result file (HDF5) contains the commit ID corresponding to the exact source code that produced it, which helps reproducibility. + +Even though the master also supports non-bare repositories, it is recommended to use a bare repository so that it can easily support push transactions from clients. Create it with e.g.: :: + + mkdir experiments + cd experiments + git init --bare + +You want Git to notify the master every time the repository is pushed to (updated), so that it is rescanned for experiments and e.g. the GUI controls and the experiment list are updated. + +Create a file named ``post-receive`` in the ``hooks`` folder (this folder has been created by the ``git`` command), containing the following: :: + + #!/bin/sh + artiq_client scan-repository + +Then set the execution permission on it: :: + + chmod 755 hooks/post-receive + +You may now run the master with the Git support enabled: :: + + artiq_master -g -r /path_to/experiments + +Push commits containing experiments to the bare repository using e.g. Git over SSH, and the new experiments should automatically appear in the GUI. + +.. note:: If you plan to run the ARTIQ system entirely on a single machine, you may also consider using a non-bare repository and the ``post-commit`` hook to trigger repository scans every time you commit changes (locally). + +The GUI always runs experiments from the repository. The command-line client, by default, runs experiment from the raw filesystem (which is useful for iterating rapidly without creating many disorganized commits). If you want to use the repository instead, simply pass the ``-R`` option. + +Reference +********* + +.. argparse:: + :ref: artiq.frontend.artiq_master.get_argparser + :prog: artiq_master + +.. argparse:: + :ref: artiq.frontend.artiq_ctlmgr.get_argparser + :prog: artiq_ctlmgr .. argparse:: :ref: artiq.frontend.artiq_client.get_argparser :prog: artiq_client -GUI client ----------- - -The GUI client connects to the master and is the main way of interacting with it. - .. argparse:: :ref: artiq.frontend.artiq_gui.get_argparser :prog: artiq_gui From 16af80ca3e67c827e56e5928d5c1466be999ea35 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 22:59:53 +0800 Subject: [PATCH 055/191] doc: add core device explanation --- doc/manual/core_device.rst | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/doc/manual/core_device.rst b/doc/manual/core_device.rst index e4334db48..fa8c8db99 100644 --- a/doc/manual/core_device.rst +++ b/doc/manual/core_device.rst @@ -1,20 +1,23 @@ Core device =========== +The core device is a FPGA-based hardware component that contains a softcore CPU tightly coupled with the so-called RTIO core that provides precision timing. The CPU executes Python code that is statically compiled by the ARTIQ compiler, and communicates with the core device peripherals (TTL, DDS, etc.) over the RTIO core. This architecture provides high timing resolution, low latency, low jitter, high level programming capabilities, and good integration with the rest of the Python experiment code. + +While it is possible to use all the other parts of ARTIQ (controllers, master, GUI, result management, etc.) without a core device, many experiments require it. + + .. _core-device-flash-storage: Flash storage ************* -The core device contains some flash space that can be used to store -some configuration data. +The core device contains some flash space that can be used to store configuration data. This storage area is used to store the core device MAC address, IP address and even the idle kernel. -The flash storage area is one sector (typically 64 kB) large and is organized as a list -of key-value records. +The flash storage area is one sector (typically 64 kB) large and is organized as a list of key-value records. -This flash storage space can be accessed by using the artiq_coretool :ref:`core-device-access-tool`. +This flash storage space can be accessed by using ``artiq_coretool`` (see: :ref:`core-device-access-tool`). FPGA board ports From 671a3f1833688da5a8fe4bb283fd6e1d1468fa55 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 23:05:00 +0800 Subject: [PATCH 056/191] doc: precision about dds batch time --- artiq/coredevice/dds.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/artiq/coredevice/dds.py b/artiq/coredevice/dds.py index 9ba4584d8..037df881c 100644 --- a/artiq/coredevice/dds.py +++ b/artiq/coredevice/dds.py @@ -33,13 +33,16 @@ class DDSBus: @kernel def batch_enter(self): """Starts a DDS command batch. All DDS commands are buffered - after this call, until ``batch_exit`` is called.""" + after this call, until ``batch_exit`` is called. + + The time of execution of the DDS commands is the time of entering the + batch (as closely as hardware permits).""" syscall("dds_batch_enter", now_mu()) @kernel def batch_exit(self): """Ends a DDS command batch. All buffered DDS commands are issued - on the bus, and FUD is pulsed at the time the batch started.""" + on the bus.""" syscall("dds_batch_exit") From 400b4148859058cae7a898500b39a13372b487f2 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 23:23:25 +0800 Subject: [PATCH 057/191] master,client: support scanning specific repo revisions --- artiq/frontend/artiq_client.py | 9 ++++++--- artiq/master/repository.py | 21 +++++++++++---------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/artiq/frontend/artiq_client.py b/artiq/frontend/artiq_client.py index 49fa91651..216c09809 100755 --- a/artiq/frontend/artiq_client.py +++ b/artiq/frontend/artiq_client.py @@ -87,8 +87,11 @@ def get_argparser(): "what", help="select object to show: schedule/log/devices/parameters") - subparsers.add_parser("scan-repository", - help="trigger a repository rescan") + parser_scan = subparsers.add_parser("scan-repository", + help="trigger a repository (re)scan") + parser_scan.add_argument("revision", default=None, nargs="?", + help="use a specific repository revision " + "(defaults to head)") return parser @@ -145,7 +148,7 @@ def _action_del_parameter(remote, args): def _action_scan_repository(remote, args): - remote.scan_async() + remote.scan_async(args.revision) def _show_schedule(schedule): diff --git a/artiq/master/repository.py b/artiq/master/repository.py index 73ff9cde0..fdb131c9d 100644 --- a/artiq/master/repository.py +++ b/artiq/master/repository.py @@ -57,33 +57,34 @@ class Repository: self.backend = backend self.log_fn = log_fn - self.head_rev = self.backend.get_head_rev() - self.backend.request_rev(self.head_rev) + self.cur_rev = self.backend.get_head_rev() + self.backend.request_rev(self.cur_rev) self.explist = Notifier(dict()) self._scanning = False def close(self): # The object cannot be used anymore after calling this method. - self.backend.release_rev(self.head_rev) + self.backend.release_rev(self.cur_rev) @asyncio.coroutine - def scan(self): + def scan(self, new_cur_rev=None): if self._scanning: return self._scanning = True - new_head_rev = self.backend.get_head_rev() - wd, _ = self.backend.request_rev(new_head_rev) - self.backend.release_rev(self.head_rev) - self.head_rev = new_head_rev + if new_cur_rev is None: + new_cur_rev = self.backend.get_head_rev() + wd, _ = self.backend.request_rev(new_cur_rev) + self.backend.release_rev(self.cur_rev) + self.cur_rev = new_cur_rev new_explist = yield from _scan_experiments(wd, self.log_fn) _sync_explist(self.explist, new_explist) self._scanning = False - def scan_async(self): - asyncio.async(self.scan()) + def scan_async(self, new_cur_rev=None): + asyncio.async(self.scan(new_cur_rev)) class FilesystemBackend: From f81b2eba4378d23ca0b1090a574176ebc8e0b97a Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 8 Aug 2015 23:36:12 +0800 Subject: [PATCH 058/191] master: better repo scan error handling --- artiq/master/repository.py | 22 ++++++++++++---------- artiq/tools.py | 12 ++++++++++++ 2 files changed, 24 insertions(+), 10 deletions(-) diff --git a/artiq/master/repository.py b/artiq/master/repository.py index fdb131c9d..556232014 100644 --- a/artiq/master/repository.py +++ b/artiq/master/repository.py @@ -6,6 +6,7 @@ import logging from artiq.protocols.sync_struct import Notifier from artiq.master.worker import Worker +from artiq.tools import exc_to_warning logger = logging.getLogger(__name__) @@ -72,19 +73,20 @@ class Repository: if self._scanning: return self._scanning = True + try: + if new_cur_rev is None: + new_cur_rev = self.backend.get_head_rev() + wd, _ = self.backend.request_rev(new_cur_rev) + self.backend.release_rev(self.cur_rev) + self.cur_rev = new_cur_rev + new_explist = yield from _scan_experiments(wd, self.log_fn) - if new_cur_rev is None: - new_cur_rev = self.backend.get_head_rev() - wd, _ = self.backend.request_rev(new_cur_rev) - self.backend.release_rev(self.cur_rev) - self.cur_rev = new_cur_rev - new_explist = yield from _scan_experiments(wd, self.log_fn) - - _sync_explist(self.explist, new_explist) - self._scanning = False + _sync_explist(self.explist, new_explist) + finally: + self._scanning = False def scan_async(self, new_cur_rev=None): - asyncio.async(self.scan(new_cur_rev)) + asyncio.async(exc_to_warning(self.scan(new_cur_rev))) class FilesystemBackend: diff --git a/artiq/tools.py b/artiq/tools.py index c6db084a9..de98adecd 100644 --- a/artiq/tools.py +++ b/artiq/tools.py @@ -11,6 +11,9 @@ from artiq.language.environment import is_experiment from artiq.protocols import pyon +logger = logging.getLogger(__name__) + + def parse_arguments(arguments): d = {} for argument in arguments: @@ -75,6 +78,15 @@ def init_logger(args): logging.basicConfig(level=logging.WARNING + args.quiet*10 - args.verbose*10) +@asyncio.coroutine +def exc_to_warning(coro): + try: + yield from coro + except: + logger.warning("asyncio coroutine terminated with exception", + exc_info=True) + + @asyncio.coroutine def asyncio_process_wait_timeout(process, timeout): # In Python < 3.5, asyncio.wait_for(process.wait(), ... From 799a58a44296ca2577a1d8152efdb4d8e1dba67b Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sun, 9 Aug 2015 13:53:06 +0800 Subject: [PATCH 059/191] doc: minor cleanup --- README.rst | 4 +--- doc/manual/introduction.rst | 2 +- doc/manual/management_system.rst | 2 +- 3 files changed, 3 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index efa1eeeee..7287516c8 100644 --- a/README.rst +++ b/README.rst @@ -1,12 +1,10 @@ .. image:: doc/logo/artiq.png .. image:: https://travis-ci.org/m-labs/artiq.svg :target: https://travis-ci.org/m-labs/artiq -.. image:: https://coveralls.io/repos/m-labs/artiq/badge.svg?branch=master - :target: https://coveralls.io/r/m-labs/artiq?branch=master ARTIQ (Advanced Real-Time Infrastructure for Quantum physics) is a next-generation control system for quantum information experiments. It is -being developed in partnership with the Ion Storage Group at NIST, and its +developed in partnership with the Ion Storage Group at NIST, and its applicability reaches beyond ion trapping. The system features a high-level programming language that helps describing diff --git a/doc/manual/introduction.rst b/doc/manual/introduction.rst index 0b224f38c..a538c3980 100644 --- a/doc/manual/introduction.rst +++ b/doc/manual/introduction.rst @@ -7,7 +7,7 @@ Introduction ARTIQ (Advanced Real-Time Infrastructure for Quantum physics) is a next-generation control system for quantum information experiments. It is -being developed in partnership with the Ion Storage Group at NIST, and its +developed in partnership with the Ion Storage Group at NIST, and its applicability reaches beyond ion trapping. The system features a high-level programming language that helps describing diff --git a/doc/manual/management_system.rst b/doc/manual/management_system.rst index b5686f04b..353e77575 100644 --- a/doc/manual/management_system.rst +++ b/doc/manual/management_system.rst @@ -63,7 +63,7 @@ You may now run the master with the Git support enabled: :: Push commits containing experiments to the bare repository using e.g. Git over SSH, and the new experiments should automatically appear in the GUI. -.. note:: If you plan to run the ARTIQ system entirely on a single machine, you may also consider using a non-bare repository and the ``post-commit`` hook to trigger repository scans every time you commit changes (locally). +.. note:: If you plan to run the ARTIQ system entirely on a single machine, you may also consider using a non-bare repository and the ``post-commit`` hook to trigger repository scans every time you commit changes (locally). The ARTIQ master never uses the repository's working directory, but only what is committed. The GUI always runs experiments from the repository. The command-line client, by default, runs experiment from the raw filesystem (which is useful for iterating rapidly without creating many disorganized commits). If you want to use the repository instead, simply pass the ``-R`` option. From 13c15173cc334770f04ddca1488296ca8f6d3214 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sun, 9 Aug 2015 17:12:06 +0800 Subject: [PATCH 060/191] examples/ddb: add --simulation for controllers --- examples/master/ddb.pyon | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/master/ddb.pyon b/examples/master/ddb.pyon index 3098391bd..df172f272 100644 --- a/examples/master/ddb.pyon +++ b/examples/master/ddb.pyon @@ -87,7 +87,7 @@ "host": "::1", "port": 4000, "target_name": "pdq2", - "command": "pdq2_controller -p {port} --bind {bind} --dump qc_q1_0.bin" + "command": "pdq2_controller -p {port} --bind {bind} --simulation --dump qc_q1_0.bin" }, "qc_q1_1": { "type": "controller", @@ -95,7 +95,7 @@ "host": "::1", "port": 4001, "target_name": "pdq2", - "command": "pdq2_controller -p {port} --bind {bind} --dump qc_q1_1.bin" + "command": "pdq2_controller -p {port} --bind {bind} --simulation --dump qc_q1_1.bin" }, "qc_q1_2": { "type": "controller", @@ -103,7 +103,7 @@ "host": "::1", "port": 4002, "target_name": "pdq2", - "command": "pdq2_controller -p {port} --bind {bind} --dump qc_q1_2.bin" + "command": "pdq2_controller -p {port} --bind {bind} --simulation --dump qc_q1_2.bin" }, "qc_q1_3": { "type": "controller", @@ -111,7 +111,7 @@ "host": "::1", "port": 4003, "target_name": "pdq2", - "command": "pdq2_controller -p {port} --bind {bind} --dump qc_q1_3.bin" + "command": "pdq2_controller -p {port} --bind {bind} --simulation --dump qc_q1_3.bin" }, "electrodes": { "type": "local", @@ -130,7 +130,7 @@ "host": "::1", "port": 3253, "target_name": "lda", - "command": "lda_controller -p {port} --bind {bind}" + "command": "lda_controller -p {port} --bind {bind} --simulation" }, "ttl_inout": "pmt0", From 479175870faf042f44c90056b1af40c29d0a3284 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sun, 9 Aug 2015 17:30:01 +0800 Subject: [PATCH 061/191] pdq2/driver: implement ping --- artiq/devices/pdq2/driver.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/artiq/devices/pdq2/driver.py b/artiq/devices/pdq2/driver.py index 79f4f9667..b5ecafdf4 100644 --- a/artiq/devices/pdq2/driver.py +++ b/artiq/devices/pdq2/driver.py @@ -213,3 +213,6 @@ class Pdq2: for frame_data in program: self.program_frame(frame_data) self.write_all() + + def ping(self): + return True From 3f68d0ba8fbcaf0a360a9d68fe2070fe17144a17 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sun, 9 Aug 2015 17:30:46 +0800 Subject: [PATCH 062/191] ctlmgr: ping controllers --- artiq/frontend/artiq_ctlmgr.py | 117 +++++++++++++++++++++++---------- 1 file changed, 82 insertions(+), 35 deletions(-) diff --git a/artiq/frontend/artiq_ctlmgr.py b/artiq/frontend/artiq_ctlmgr.py index 45ab109c4..d59476855 100755 --- a/artiq/frontend/artiq_ctlmgr.py +++ b/artiq/frontend/artiq_ctlmgr.py @@ -9,6 +9,7 @@ import shlex import socket from artiq.protocols.sync_struct import Subscriber +from artiq.protocols.pc_rpc import AsyncioClient from artiq.tools import verbosity_args, init_logger from artiq.tools import asyncio_process_wait_timeout @@ -35,8 +36,18 @@ def get_argparser(): class Controller: - def __init__(self, name, command, retry): - self.launch_task = asyncio.Task(self.launcher(name, command, retry)) + def __init__(self, name, ddb_entry): + self.name = name + self.command = ddb_entry["command"] + self.retry_timer = ddb_entry.get("retry_timer", 5) + + self.host = ddb_entry["host"] + self.port = ddb_entry["port"] + self.ping_timer = ddb_entry.get("ping_timer", 30) + self.ping_timeout = ddb_entry.get("ping_timeout", 30) + + self.process = None + self.launch_task = asyncio.Task(self.launcher()) @asyncio.coroutine def end(self): @@ -44,33 +55,70 @@ class Controller: yield from asyncio.wait_for(self.launch_task, None) @asyncio.coroutine - def launcher(self, name, command, retry): - process = None + def _ping_notimeout(self): + remote = AsyncioClient() + yield from remote.connect_rpc(self.host, self.port, None) + try: + targets, _ = remote.get_rpc_id() + remote.select_rpc_target(targets[0]) + ok = yield from remote.ping() + finally: + remote.close_rpc() + return ok + + @asyncio.coroutine + def _ping(self): + try: + return (yield from asyncio.wait_for( + self._ping_notimeout(), self.ping_timeout)) + except: + return False + + @asyncio.coroutine + def _wait_and_ping(self): + while True: + try: + yield from asyncio_process_wait_timeout(self.process, + self.ping_timer) + except asyncio.TimeoutError: + logger.debug("pinging controller %s", self.name) + ok = yield from self._ping() + if not ok: + logger.warning("Controller %s ping failed", self.name) + yield from self._terminate() + return + + @asyncio.coroutine + def launcher(self): try: while True: logger.info("Starting controller %s with command: %s", - name, command) + self.name, self.command) try: - process = yield from asyncio.create_subprocess_exec( - *shlex.split(command)) - yield from asyncio.shield(process.wait()) + self.process = yield from asyncio.create_subprocess_exec( + *shlex.split(self.command)) + yield from self._wait_and_ping() except FileNotFoundError: - logger.warning("Controller %s failed to start", name) + logger.warning("Controller %s failed to start", self.name) else: - logger.warning("Controller %s exited", name) - logger.warning("Restarting in %.1f seconds", retry) - yield from asyncio.sleep(retry) + logger.warning("Controller %s exited", self.name) + logger.warning("Restarting in %.1f seconds", self.retry_timer) + yield from asyncio.sleep(self.retry_timer) except asyncio.CancelledError: - logger.info("Terminating controller %s", name) - if process is not None and process.returncode is None: - process.send_signal(signal.SIGTERM) - logger.debug("Signal sent") - try: - yield from asyncio_process_wait_timeout(process, 5.0) - except asyncio.TimeoutError: - logger.warning("Controller %s did not respond to SIGTERM", - name) - process.send_signal(signal.SIGKILL) + yield from self._terminate() + + @asyncio.coroutine + def _terminate(self): + logger.info("Terminating controller %s", self.name) + if self.process is not None and self.process.returncode is None: + self.process.send_signal(signal.SIGTERM) + logger.debug("Signal sent") + try: + yield from asyncio_process_wait_timeout(self.process, 5.0) + except asyncio.TimeoutError: + logger.warning("Controller %s did not respond to SIGTERM", + self.name) + self.process.send_signal(signal.SIGKILL) def get_ip_addresses(host): @@ -82,8 +130,7 @@ def get_ip_addresses(host): class Controllers: - def __init__(self, retry_command): - self.retry_command = retry_command + def __init__(self): self.host_filter = None self.active_or_queued = set() self.queue = asyncio.Queue() @@ -95,10 +142,10 @@ class Controllers: while True: action, param = yield from self.queue.get() if action == "set": - k, command = param + k, ddb_entry = param if k in self.active: yield from self.active[k].end() - self.active[k] = Controller(k, command, self.retry_command) + self.active[k] = Controller(k, ddb_entry) elif action == "del": yield from self.active[param].end() del self.active[param] @@ -108,10 +155,10 @@ class Controllers: def __setitem__(self, k, v): if (isinstance(v, dict) and v["type"] == "controller" and self.host_filter in get_ip_addresses(v["host"])): - command = v["command"].format(name=k, - bind=self.host_filter, - port=v["port"]) - self.queue.put_nowait(("set", (k, command))) + v["command"] = v["command"].format(name=k, + bind=self.host_filter, + port=v["port"]) + self.queue.put_nowait(("set", (k, v))) self.active_or_queued.add(k) def __delitem__(self, k): @@ -131,8 +178,8 @@ class Controllers: class ControllerDB: - def __init__(self, retry_command): - self.current_controllers = Controllers(retry_command) + def __init__(self): + self.current_controllers = Controllers() def set_host_filter(self, host_filter): self.current_controllers.host_filter = host_filter @@ -146,8 +193,8 @@ class ControllerDB: @asyncio.coroutine -def ctlmgr(server, port, retry_master, retry_command): - controller_db = ControllerDB(retry_command) +def ctlmgr(server, port, retry_master): + controller_db = ControllerDB() try: subscriber = Subscriber("devices", controller_db.sync_struct_init) while True: @@ -187,7 +234,7 @@ def main(): try: task = asyncio.Task(ctlmgr( - args.server, args.port, args.retry_master, args.retry_command)) + args.server, args.port, args.retry_master)) try: loop.run_forever() finally: From a21049d7795c12fe92e44828d0d3a97eb0fcefde Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sun, 9 Aug 2015 18:28:56 +0800 Subject: [PATCH 063/191] ctlmgr: exponential backoff --- artiq/frontend/artiq_ctlmgr.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/artiq/frontend/artiq_ctlmgr.py b/artiq/frontend/artiq_ctlmgr.py index d59476855..7b23ce747 100755 --- a/artiq/frontend/artiq_ctlmgr.py +++ b/artiq/frontend/artiq_ctlmgr.py @@ -40,12 +40,14 @@ class Controller: self.name = name self.command = ddb_entry["command"] self.retry_timer = ddb_entry.get("retry_timer", 5) + self.retry_timer_backoff = ddb_entry.get("retry_timer_backoff", 1.1) self.host = ddb_entry["host"] self.port = ddb_entry["port"] self.ping_timer = ddb_entry.get("ping_timer", 30) self.ping_timeout = ddb_entry.get("ping_timeout", 30) + self.retry_timer_cur = self.retry_timer self.process = None self.launch_task = asyncio.Task(self.launcher()) @@ -69,8 +71,11 @@ class Controller: @asyncio.coroutine def _ping(self): try: - return (yield from asyncio.wait_for( - self._ping_notimeout(), self.ping_timeout)) + ok = yield from asyncio.wait_for(self._ping_notimeout(), + self.ping_timeout) + if ok: + self.retry_timer_cur = self.retry_timer + return ok except: return False @@ -102,8 +107,9 @@ class Controller: logger.warning("Controller %s failed to start", self.name) else: logger.warning("Controller %s exited", self.name) - logger.warning("Restarting in %.1f seconds", self.retry_timer) - yield from asyncio.sleep(self.retry_timer) + logger.warning("Restarting in %.1f seconds", self.retry_timer_cur) + yield from asyncio.sleep(self.retry_timer_cur) + self.retry_timer_cur *= self.retry_timer_backoff except asyncio.CancelledError: yield from self._terminate() From 4b195663f6e1cbe95331d224ab653ef537cdb7ee Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sun, 9 Aug 2015 20:40:15 +0800 Subject: [PATCH 064/191] pc_rpc/Server: add built-in terminate --- artiq/protocols/pc_rpc.py | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/artiq/protocols/pc_rpc.py b/artiq/protocols/pc_rpc.py index 1f9151a4f..0884903f1 100644 --- a/artiq/protocols/pc_rpc.py +++ b/artiq/protocols/pc_rpc.py @@ -240,7 +240,8 @@ class BestEffortClient: network errors are suppressed and connections are retried in the background. - RPC calls that failed because of network errors return ``None``. + RPC calls that failed because of network errors return ``None``. Other RPC + calls are blocking and return the correct value. :param firstcon_timeout: Timeout to use during the first (blocking) connection attempt at object initialization. @@ -397,12 +398,19 @@ class Server(_AsyncioServer): exposed to the client. Keys are names identifying each object. Clients select one of these objects using its name upon connection. :param id_parameters: An optional human-readable string giving more - information about the parameters of the server. + information about the server. + :param builtin_terminate: If set, the server provides a built-in + ``terminate`` method that unblocks any tasks waiting on + ``wait_terminate``. This is useful to handle server termination + requests from clients. """ - def __init__(self, targets, id_parameters=None): + def __init__(self, targets, id_parameters=None, builtin_terminate=False): _AsyncioServer.__init__(self) self.targets = targets self.id_parameters = id_parameters + self.builtin_terminate = builtin_terminate + if builtin_terminate: + self._terminate_request = asyncio.Event() @asyncio.coroutine def _handle_connection_cr(self, reader, writer): @@ -448,9 +456,13 @@ class Server(_AsyncioServer): obj = {"status": "ok", "ret": doc} elif obj["action"] == "call": logger.debug("calling %s", _PrettyPrintCall(obj)) - method = getattr(target, obj["name"]) - ret = method(*obj["args"], **obj["kwargs"]) - obj = {"status": "ok", "ret": ret} + if self.builtin_terminate and obj["name"] == "terminate": + self._terminate_request.set() + obj = {"status": "ok", "ret": None} + else: + method = getattr(target, obj["name"]) + ret = method(*obj["args"], **obj["kwargs"]) + obj = {"status": "ok", "ret": ret} else: raise ValueError("Unknown action: {}" .format(obj["action"])) @@ -462,18 +474,23 @@ class Server(_AsyncioServer): finally: writer.close() + @asyncio.coroutine + def wait_terminate(self): + yield from self._terminate_request.wait() + def simple_server_loop(targets, host, port, id_parameters=None): - """Runs a server until an exception is raised (e.g. the user hits Ctrl-C). + """Runs a server until an exception is raised (e.g. the user hits Ctrl-C) + or termination is requested by a client. See ``Server`` for a description of the parameters. """ loop = asyncio.get_event_loop() try: - server = Server(targets, id_parameters) + server = Server(targets, id_parameters, True) loop.run_until_complete(server.start(host, port)) try: - loop.run_forever() + loop.run_until_complete(server.wait_terminate()) finally: loop.run_until_complete(server.stop()) finally: From 54b11a392a17ed6a8a16461ce12c623fc822ccf8 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sun, 9 Aug 2015 20:41:11 +0800 Subject: [PATCH 065/191] ctlmgr: graceful controller termination --- artiq/frontend/artiq_ctlmgr.py | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/artiq/frontend/artiq_ctlmgr.py b/artiq/frontend/artiq_ctlmgr.py index 7b23ce747..ce59bc7a4 100755 --- a/artiq/frontend/artiq_ctlmgr.py +++ b/artiq/frontend/artiq_ctlmgr.py @@ -4,7 +4,6 @@ import asyncio import argparse import os import logging -import signal import shlex import socket @@ -46,6 +45,7 @@ class Controller: self.port = ddb_entry["port"] self.ping_timer = ddb_entry.get("ping_timer", 30) self.ping_timeout = ddb_entry.get("ping_timeout", 30) + self.term_timeout = ddb_entry.get("term_timeout", 30) self.retry_timer_cur = self.retry_timer self.process = None @@ -57,21 +57,21 @@ class Controller: yield from asyncio.wait_for(self.launch_task, None) @asyncio.coroutine - def _ping_notimeout(self): + def _call_controller(self, method): remote = AsyncioClient() yield from remote.connect_rpc(self.host, self.port, None) try: targets, _ = remote.get_rpc_id() remote.select_rpc_target(targets[0]) - ok = yield from remote.ping() + r = yield from getattr(remote, method)() finally: remote.close_rpc() - return ok + return r @asyncio.coroutine def _ping(self): try: - ok = yield from asyncio.wait_for(self._ping_notimeout(), + ok = yield from asyncio.wait_for(self._call_controller("ping"), self.ping_timeout) if ok: self.retry_timer_cur = self.retry_timer @@ -92,6 +92,8 @@ class Controller: logger.warning("Controller %s ping failed", self.name) yield from self._terminate() return + else: + break @asyncio.coroutine def launcher(self): @@ -117,14 +119,22 @@ class Controller: def _terminate(self): logger.info("Terminating controller %s", self.name) if self.process is not None and self.process.returncode is None: - self.process.send_signal(signal.SIGTERM) - logger.debug("Signal sent") try: - yield from asyncio_process_wait_timeout(self.process, 5.0) - except asyncio.TimeoutError: - logger.warning("Controller %s did not respond to SIGTERM", + yield from asyncio.wait_for(self._call_controller("terminate"), + self.term_timeout) + except: + logger.warning("Controller %s did not respond to terminate " + "command, killing", self.name) + self.process.kill() + try: + yield from asyncio_process_wait_timeout(self.process, + self.term_timeout) + except: + logger.warning("Controller %s failed to exit, killing", self.name) - self.process.send_signal(signal.SIGKILL) + self.process.kill() + yield from self.process.wait() + logger.debug("Controller %s terminated", self.name) def get_ip_addresses(host): From 95b56e85a328a89d773175828591c120265eaa4a Mon Sep 17 00:00:00 2001 From: whitequark Date: Sun, 9 Aug 2015 23:33:00 +0300 Subject: [PATCH 066/191] Add binutils patch. --- doc/manual/installing.rst | 12 ++++++-- ...ils-2.25.1-or1k-R_PCREL-pcrel_offset.patch | 29 +++++++++++++++++++ 2 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 misc/binutils-2.25.1-or1k-R_PCREL-pcrel_offset.patch diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index 55400c3d3..995710427 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -81,6 +81,11 @@ and the ARTIQ kernels. $ mkdir ~/artiq-dev +* Clone ARTIQ repository: :: + + $ cd ~/artiq-dev + $ git clone --recursive https://github.com/m-labs/artiq + * Install OpenRISC binutils (or1k-linux-...): :: $ cd ~/artiq-dev @@ -88,8 +93,11 @@ and the ARTIQ kernels. $ tar xvf binutils-2.25.1.tar.bz2 $ rm binutils-2.25.1.tar.bz2 - $ mkdir binutils-2.25.1/build - $ cd binutils-2.25.1/build + $ cd binutils-2.25.1 + $ patch -p1 <~/artiq-dev/misc/binutils-2.25.1-or1k-R_PCREL-pcrel_offset.patch + + $ mkdir build + $ cd build $ ../configure --target=or1k-linux --prefix=/usr/local $ make -j4 $ sudo make install diff --git a/misc/binutils-2.25.1-or1k-R_PCREL-pcrel_offset.patch b/misc/binutils-2.25.1-or1k-R_PCREL-pcrel_offset.patch new file mode 100644 index 000000000..22bdc3d12 --- /dev/null +++ b/misc/binutils-2.25.1-or1k-R_PCREL-pcrel_offset.patch @@ -0,0 +1,29 @@ +--- a/bfd/elf32-or1k.c 2015-08-02 15:45:34.275922535 +0300 ++++ b/bfd/elf32-or1k.c 2015-08-02 15:45:36.635931622 +0300 +@@ -199,7 +199,7 @@ + FALSE, /* partial_inplace */ + 0, /* src_mask */ + 0xffffffff, /* dst_mask */ +- FALSE), /* pcrel_offset */ ++ TRUE), /* pcrel_offset */ + + HOWTO (R_OR1K_16_PCREL, + 0, /* rightshift */ +@@ -213,7 +213,7 @@ + FALSE, /* partial_inplace */ + 0, /* src_mask */ + 0xffff, /* dst_mask */ +- FALSE), /* pcrel_offset */ ++ TRUE), /* pcrel_offset */ + + HOWTO (R_OR1K_8_PCREL, + 0, /* rightshift */ +@@ -227,7 +227,7 @@ + FALSE, /* partial_inplace */ + 0, /* src_mask */ + 0xff, /* dst_mask */ +- FALSE), /* pcrel_offset */ ++ TRUE), /* pcrel_offset */ + + HOWTO (R_OR1K_GOTPC_HI16, /* Type. */ + 16, /* Rightshift. */ \ No newline at end of file From 9772676f2d58f8722cecfc02a2534a5a89142bd3 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 10 Aug 2015 15:16:52 +0800 Subject: [PATCH 067/191] doc: cleanup shell prompts --- doc/manual/installing.rst | 2 +- doc/manual/management_system.rst | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index 55400c3d3..0f4720ca2 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -286,7 +286,7 @@ This should be done after either installation methods (conda or source). You should see something like this in the serial console: :: - ~/dev/misoc$ ./tools/flterm --port /dev/ttyUSB1 + $ ./tools/flterm --port /dev/ttyUSB1 [FLTERM] Starting... MiSoC BIOS http://m-labs.hk diff --git a/doc/manual/management_system.rst b/doc/manual/management_system.rst index 353e77575..e5f83280a 100644 --- a/doc/manual/management_system.rst +++ b/doc/manual/management_system.rst @@ -42,9 +42,9 @@ The master may use a Git repository for the storage of experiment source code. U Even though the master also supports non-bare repositories, it is recommended to use a bare repository so that it can easily support push transactions from clients. Create it with e.g.: :: - mkdir experiments - cd experiments - git init --bare + $ mkdir experiments + $ cd experiments + $ git init --bare You want Git to notify the master every time the repository is pushed to (updated), so that it is rescanned for experiments and e.g. the GUI controls and the experiment list are updated. @@ -55,11 +55,11 @@ Create a file named ``post-receive`` in the ``hooks`` folder (this folder has be Then set the execution permission on it: :: - chmod 755 hooks/post-receive + $ chmod 755 hooks/post-receive You may now run the master with the Git support enabled: :: - artiq_master -g -r /path_to/experiments + $ artiq_master -g -r /path_to/experiments Push commits containing experiments to the bare repository using e.g. Git over SSH, and the new experiments should automatically appear in the GUI. From d9d74661c1eadcc618647ca4193065792c2bec4e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 10 Aug 2015 15:17:02 +0800 Subject: [PATCH 068/191] doc: scheduling --- doc/manual/management_system.rst | 42 ++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/doc/manual/management_system.rst b/doc/manual/management_system.rst index e5f83280a..8d972a12d 100644 --- a/doc/manual/management_system.rst +++ b/doc/manual/management_system.rst @@ -35,6 +35,48 @@ The GUI client connects to the master and is the main way of interacting with it Experiment scheduling ********************* +Basics +------ + +To use hardware resources more efficiently, potentially compute-intensive pre-computation and analysis phases of other experiments is executed in parallel with the body of the current experiment that accesses the hardware. + +Experiments are divided into three phases that are programmed by the user: + +1. The preparation stage, that pre-fetches and pre-computes any data that necessary to run the experiment. Users may implement this stage by overloading the ``prepare`` method. It is not permitted to access hardware in this stage, as doing so may conflict with other experiments using the same devices. +2. The running stage, that corresponds to the body of the experiment, and typically accesses hardware. Users must implement this stage and overload the ``run`` method. +3. The analysis stage, where raw results collected in the running stage are post-processed and may lead to updates of the parameter database. This stage may be implemented by overloading the ``analyze`` method. + +.. note:: Only the ``run`` method implementation is mandatory; if the experiment does not fit into the pipelined scheduling model, it can leave one or both of the other methods empty (which is the default). + +The three phases of several experiments are then executed in a pipelined manner by the scheduler in the ARTIQ master: experiment A executes its preparation stage, then experiment A executes its running stage while experiment B executes its preparation stage, and so on. + +Priorities and timed runs +------------------------- + +When determining what experiment to begin executing next (i.e. entering the preparation stage), the scheduling looks at the following factors, by decreasing order of precedence: + +1. Experiments may be scheduled with a due date. If there is one and it is not reached yet, the experiment is not eligible for preparation. +2. The integer priority value specified by the user. +3. The due date itself. The earlier the due date, the earlier the experiment is scheduled. +4. The run identifier (RID), an integer that is incremented at each experiment submission. This ensures that, all other things being equal, experiments are scheduled in the same order as they are submitted. + +Pauses +------ + +In the run stage, an experiment may yield to the scheduler by calling the ``pause`` method. If there are other experiments with higher priority (e.g. a high-priority timed experiment has reached its due date), they are preemptively executed, and then the ``pause`` method returns. Otherwise, the ``pause`` method returns immediately. + +The experiment must place the hardware in a safe state and disconnect from the core device (typically, by using ``self.core.comm.close()``) before calling ``pause``. + +Accessing the ``pause`` method is done through a virtual device called ``scheduler`` that is accessible to all experiments. The scheduler virtual device is requested like regular devices using ``get_device`` or ``attr_device``. + +Multiple pipelines +------------------ + +Multiple pipelines can operate in parallel inside the same master. It is the responsibility of the user to ensure that experiments scheduled in one pipeline will never conflict with those of another pipeline over resources (e.g. same devices). + +Pipelines are identified by their name, and are automatically created (when an experiment is scheduled with a pipeline name that does not exist) and destroyed (when it runs empty). + + Git integration *************** From 393576fc015a41221a6d8abcec079f9a3cf89f8b Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Mon, 10 Aug 2015 12:00:10 +0200 Subject: [PATCH 069/191] conda: add missing build.sh for llvmdev-or1k pkg --- conda/llvmdev-or1k/build.sh | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 conda/llvmdev-or1k/build.sh diff --git a/conda/llvmdev-or1k/build.sh b/conda/llvmdev-or1k/build.sh new file mode 100644 index 000000000..9310d567d --- /dev/null +++ b/conda/llvmdev-or1k/build.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +cd tools +git clone https://github.com/openrisc/clang-or1k clang +cd .. +mkdir build +cd build +cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DLLVM_TARGETS_TO_BUILD="OR1K;X86" -DCMAKE_BUILD_TYPE=Debug +make -j2 +make install From c57ce6d75006c211d7be4331a0a89103c6c268eb Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Mon, 10 Aug 2015 12:23:05 +0200 Subject: [PATCH 070/191] conda: llvmdev should be built in Release mode with assertions enabled --- conda/llvmdev-or1k/bld.bat | 3 ++- conda/llvmdev-or1k/build.sh | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/conda/llvmdev-or1k/bld.bat b/conda/llvmdev-or1k/bld.bat index ef75e9db1..2c93638ee 100644 --- a/conda/llvmdev-or1k/bld.bat +++ b/conda/llvmdev-or1k/bld.bat @@ -11,7 +11,8 @@ set CMAKE_GENERATOR_TOOLSET=v120_xp @rem Reduce build times and package size by removing unused stuff set CMAKE_CUSTOM=-DLLVM_TARGETS_TO_BUILD=OR1K -DLLVM_INCLUDE_TESTS=OFF ^ -DLLVM_INCLUDE_TOOLS=OFF -DLLVM_INCLUDE_UTILS=OFF ^ --DLLVM_INCLUDE_DOCS=OFF -DLLVM_INCLUDE_EXAMPLES=OFF +-DLLVM_INCLUDE_DOCS=OFF -DLLVM_INCLUDE_EXAMPLES=OFF ^ +-DLLVM_ENABLE_ASSERTIONS=ON cmake -G "%CMAKE_GENERATOR%" -T "%CMAKE_GENERATOR_TOOLSET%" ^ -DCMAKE_BUILD_TYPE="%BUILD_CONFIG%" -DCMAKE_PREFIX_PATH=%LIBRARY_PREFIX% ^ -DCMAKE_INSTALL_PREFIX:PATH=%LIBRARY_PREFIX% %CMAKE_CUSTOM% %SRC_DIR% diff --git a/conda/llvmdev-or1k/build.sh b/conda/llvmdev-or1k/build.sh index 9310d567d..391f592cc 100644 --- a/conda/llvmdev-or1k/build.sh +++ b/conda/llvmdev-or1k/build.sh @@ -5,6 +5,6 @@ git clone https://github.com/openrisc/clang-or1k clang cd .. mkdir build cd build -cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DLLVM_TARGETS_TO_BUILD="OR1K;X86" -DCMAKE_BUILD_TYPE=Debug +cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DLLVM_TARGETS_TO_BUILD="OR1K;X86" -DCMAKE_BUILD_TYPE=Rel -DLLVM_ENABLE_ASSERTIONS=ON make -j2 make install From 52de6311a44077d97412e1fa1fbc197969f3f96a Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 10 Aug 2015 20:07:24 +0800 Subject: [PATCH 071/191] test/scheduler: add repo_msg --- artiq/test/scheduler.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/artiq/test/scheduler.py b/artiq/test/scheduler.py index 60bbd83c3..580214725 100644 --- a/artiq/test/scheduler.py +++ b/artiq/test/scheduler.py @@ -37,7 +37,8 @@ def _get_basic_steps(rid, expid, priority=0, flush=False): return [ {"action": "setitem", "key": rid, "value": {"pipeline": "main", "status": "pending", "priority": priority, - "expid": expid, "due_date": None, "flush": flush}, + "expid": expid, "due_date": None, "flush": flush, + "repo_msg": None}, "path": []}, {"action": "setitem", "key": "status", "value": "preparing", "path": [rid]}, @@ -89,7 +90,8 @@ class SchedulerCase(unittest.TestCase): expect.insert(0, {"action": "setitem", "key": 0, "value": {"pipeline": "main", "status": "pending", "priority": 99, - "expid": expid, "due_date": late, "flush": False}, + "expid": expid, "due_date": late, "flush": False, + "repo_msg": None}, "path": []}) scheduler.submit("main", expid, 99, late, False) From b700f591f17c37873931a5ff9db3b8ae065dfa6a Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 10 Aug 2015 20:07:39 +0800 Subject: [PATCH 072/191] protocols/pc_rpc: add missing import --- artiq/protocols/pc_rpc.py | 1 + 1 file changed, 1 insertion(+) diff --git a/artiq/protocols/pc_rpc.py b/artiq/protocols/pc_rpc.py index 0884903f1..1787a2111 100644 --- a/artiq/protocols/pc_rpc.py +++ b/artiq/protocols/pc_rpc.py @@ -18,6 +18,7 @@ import threading import time import logging import inspect +from operator import itemgetter from artiq.protocols import pyon from artiq.protocols.asyncio_server import AsyncioServer as _AsyncioServer From 06badd1dc12595deaa12ccf2f4731acdef7cfec9 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 10 Aug 2015 21:58:11 +0800 Subject: [PATCH 073/191] scheduler: refactor, fix pipeline hazards --- artiq/master/scheduler.py | 203 ++++++++++++++++++++++---------------- artiq/test/scheduler.py | 2 +- artiq/tools.py | 47 ++++----- 3 files changed, 137 insertions(+), 115 deletions(-) diff --git a/artiq/master/scheduler.py b/artiq/master/scheduler.py index fefa8539f..0e1b0f1b6 100644 --- a/artiq/master/scheduler.py +++ b/artiq/master/scheduler.py @@ -4,8 +4,7 @@ from enum import Enum from time import time from artiq.master.worker import Worker -from artiq.tools import (asyncio_wait_or_cancel, asyncio_queue_peek, - TaskObject, WaitSet) +from artiq.tools import asyncio_wait_or_cancel, TaskObject, Condition from artiq.protocols.sync_struct import Notifier @@ -20,7 +19,7 @@ class RunStatus(Enum): running = 4 run_done = 5 analyzing = 6 - analyze_done = 7 + deleting = 7 paused = 8 @@ -48,7 +47,7 @@ def _mk_worker_method(name): class Run: def __init__(self, rid, pipeline_name, wd, expid, priority, due_date, flush, - worker_handlers, notifier, **kwargs): + pool, **kwargs): # called through pool self.rid = rid self.pipeline_name = pipeline_name @@ -58,7 +57,7 @@ class Run: self.due_date = due_date self.flush = flush - self.worker = Worker(worker_handlers) + self.worker = Worker(pool.worker_handlers) self._status = RunStatus.pending @@ -71,8 +70,9 @@ class Run: "status": self._status.name } notification.update(kwargs) - self._notifier = notifier + self._notifier = pool.notifier self._notifier[self.rid] = notification + self._state_changed = pool.state_changed @property def status(self): @@ -83,6 +83,7 @@ class Run: self._status = value if not self.worker.closed.is_set(): self._notifier[self.rid]["status"] = self._status.name + self._state_changed.notify() # The run with the largest priority_key is to be scheduled first def priority_key(self, now=None): @@ -130,28 +131,27 @@ class RIDCounter: class RunPool: def __init__(self, ridc, worker_handlers, notifier, repo_backend): self.runs = dict() - self.submitted_cb = None + self.state_changed = Condition() - self._ridc = ridc - self._worker_handlers = worker_handlers - self._notifier = notifier - self._repo_backend = repo_backend + self.ridc = ridc + self.worker_handlers = worker_handlers + self.notifier = notifier + self.repo_backend = repo_backend def submit(self, expid, priority, due_date, flush, pipeline_name): - # mutates expid to insert head repository revision if None - # called through scheduler - rid = self._ridc.get() + # mutates expid to insert head repository revision if None. + # called through scheduler. + rid = self.ridc.get() if "repo_rev" in expid: if expid["repo_rev"] is None: - expid["repo_rev"] = self._repo_backend.get_head_rev() - wd, repo_msg = self._repo_backend.request_rev(expid["repo_rev"]) + expid["repo_rev"] = self.repo_backend.get_head_rev() + wd, repo_msg = self.repo_backend.request_rev(expid["repo_rev"]) else: wd, repo_msg = None, None run = Run(rid, pipeline_name, wd, expid, priority, due_date, flush, - self._worker_handlers, self._notifier, repo_msg=repo_msg) + self, repo_msg=repo_msg) self.runs[rid] = run - if self.submitted_cb is not None: - self.submitted_cb() + self.state_changed.notify() return rid @asyncio.coroutine @@ -162,47 +162,72 @@ class RunPool: run = self.runs[rid] yield from run.close() if "repo_rev" in run.expid: - self._repo_backend.release_rev(run.expid["repo_rev"]) + self.repo_backend.release_rev(run.expid["repo_rev"]) del self.runs[rid] class PrepareStage(TaskObject): - def __init__(self, flush_tracker, delete_cb, pool, outq): - self.flush_tracker = flush_tracker - self.delete_cb = delete_cb + def __init__(self, pool, delete_cb): self.pool = pool - self.outq = outq + self.delete_cb = delete_cb - self.pool_submitted = asyncio.Event() - self.pool.submitted_cb = lambda: self.pool_submitted.set() + def _get_run(self): + """If a run should get prepared now, return it. + Otherwise, return a float representing the time before the next timed + run becomes due, or None if there is no such run.""" + now = time() + pending_runs = filter(lambda r: r.status == RunStatus.pending, + self.pool.runs.values()) + try: + candidate = max(pending_runs, key=lambda r: r.priority_key(now)) + except ValueError: + # pending_runs is an empty sequence + return None + + prepared_runs = filter(lambda r: r.status == RunStatus.prepare_done, + self.pool.runs.values()) + try: + top_prepared_run = max(prepared_runs, + key=lambda r: r.priority_key()) + except ValueError: + # there are no existing prepared runs - go ahead with + pass + else: + # prepare (as well) only if it has higher priority than + # the highest priority prepared run + if top_prepared_run.priority_key() >= candidate.priority_key(): + return None + + if candidate.due_date is None or candidate.due_date < now: + return candidate + else: + return candidate.due_date - now @asyncio.coroutine - def _push_runs(self): - """Pushes all runs that have no due date of have a due date in the - past. - - Returns the time before the next schedulable run, or None if the - pool is empty.""" + def _do(self): while True: - now = time() - pending_runs = filter(lambda r: r.status == RunStatus.pending, - self.pool.runs.values()) - try: - run = max(pending_runs, key=lambda r: r.priority_key(now)) - except ValueError: - # pending_runs is an empty sequence - return None - if run.due_date is None or run.due_date < now: + run = self._get_run() + if run is None: + yield from self.pool.state_changed.wait() + elif isinstance(run, float): + yield from asyncio_wait_or_cancel([self.pool.state_changed.wait()], + timeout=run) + else: if run.flush: run.status = RunStatus.flushing - yield from asyncio_wait_or_cancel( - [self.flush_tracker.wait_empty(), - run.worker.closed.wait()], - return_when=asyncio.FIRST_COMPLETED) + while not all(r.status in (RunStatus.pending, + RunStatus.deleting) + or r is run + for r in self.pool.runs.values()): + ev = [self.pool.state_changed.wait(), + run.worker.closed.wait()] + yield from asyncio_wait_or_cancel( + ev, return_when=asyncio.FIRST_COMPLETED) + if run.worker.closed.is_set(): + break if run.worker.closed.is_set(): - continue + continue run.status = RunStatus.preparing - self.flush_tracker.add(run.rid) try: yield from run.build() yield from run.prepare() @@ -211,44 +236,38 @@ class PrepareStage(TaskObject): "deleting RID %d", run.rid, exc_info=True) self.delete_cb(run.rid) - run.status = RunStatus.prepare_done - yield from self.outq.put(run) - else: - return run.due_date - now - - @asyncio.coroutine - def _do(self): - while True: - next_timed_in = yield from self._push_runs() - if next_timed_in is None: - # pool is empty - wait for something to be added to it - yield from self.pool_submitted.wait() - else: - # wait for next_timed_in seconds, or until the pool is modified - yield from asyncio_wait_or_cancel([self.pool_submitted.wait()], - timeout=next_timed_in) - self.pool_submitted.clear() + else: + run.status = RunStatus.prepare_done class RunStage(TaskObject): - def __init__(self, delete_cb, inq, outq): + def __init__(self, pool, delete_cb): + self.pool = pool self.delete_cb = delete_cb - self.inq = inq - self.outq = outq + + def _get_run(self): + prepared_runs = filter(lambda r: r.status == RunStatus.prepare_done, + self.pool.runs.values()) + try: + r = max(prepared_runs, key=lambda r: r.priority_key()) + except ValueError: + # prepared_runs is an empty sequence + r = None + return r @asyncio.coroutine def _do(self): stack = [] while True: - try: - next_irun = asyncio_queue_peek(self.inq) - except asyncio.QueueEmpty: - next_irun = None + next_irun = self._get_run() if not stack or ( next_irun is not None and next_irun.priority_key() > stack[-1].priority_key()): - stack.append((yield from self.inq.get())) + while next_irun is None: + yield from self.pool.state_changed.wait() + next_irun = self._get_run() + stack.append(next_irun) run = stack.pop() try: @@ -266,21 +285,33 @@ class RunStage(TaskObject): else: if completed: run.status = RunStatus.run_done - yield from self.outq.put(run) else: run.status = RunStatus.paused stack.append(run) class AnalyzeStage(TaskObject): - def __init__(self, delete_cb, inq): + def __init__(self, pool, delete_cb): + self.pool = pool self.delete_cb = delete_cb - self.inq = inq + + def _get_run(self): + run_runs = filter(lambda r: r.status == RunStatus.run_done, + self.pool.runs.values()) + try: + r = max(run_runs, key=lambda r: r.priority_key()) + except ValueError: + # run_runs is an empty sequence + r = None + return r @asyncio.coroutine def _do(self): while True: - run = yield from self.inq.get() + run = self._get_run() + while run is None: + yield from self.pool.state_changed.wait() + run = self._get_run() run.status = RunStatus.analyzing try: yield from run.analyze() @@ -290,22 +321,16 @@ class AnalyzeStage(TaskObject): "deleting RID %d", run.rid, exc_info=True) self.delete_cb(run.rid) - run.status = RunStatus.analyze_done - self.delete_cb(run.rid) + else: + self.delete_cb(run.rid) class Pipeline: def __init__(self, ridc, deleter, worker_handlers, notifier, repo_backend): - flush_tracker = WaitSet() - def delete_cb(rid): - deleter.delete(rid) - flush_tracker.discard(rid) self.pool = RunPool(ridc, worker_handlers, notifier, repo_backend) - self._prepare = PrepareStage(flush_tracker, delete_cb, - self.pool, asyncio.Queue(maxsize=1)) - self._run = RunStage(delete_cb, - self._prepare.outq, asyncio.Queue(maxsize=1)) - self._analyze = AnalyzeStage(delete_cb, self._run.outq) + self._prepare = PrepareStage(self.pool, deleter.delete) + self._run = RunStage(self.pool, deleter.delete) + self._analyze = AnalyzeStage(self.pool, deleter.delete) def start(self): self._prepare.start() @@ -327,6 +352,10 @@ class Deleter(TaskObject): def delete(self, rid): logger.debug("delete request for RID %d", rid) + for pipeline in self._pipelines.values(): + if rid in pipeline.pool.runs: + pipeline.pool.runs[rid].status = RunStatus.deleting + break self._queue.put_nowait(rid) @asyncio.coroutine diff --git a/artiq/test/scheduler.py b/artiq/test/scheduler.py index 580214725..a3783863e 100644 --- a/artiq/test/scheduler.py +++ b/artiq/test/scheduler.py @@ -50,7 +50,7 @@ def _get_basic_steps(rid, expid, priority=0, flush=False): "path": [rid]}, {"action": "setitem", "key": "status", "value": "analyzing", "path": [rid]}, - {"action": "setitem", "key": "status", "value": "analyze_done", + {"action": "setitem", "key": "status", "value": "deleting", "path": [rid]}, {"action": "delitem", "key": rid, "path": []} ] diff --git a/artiq/tools.py b/artiq/tools.py index de98adecd..844cbd291 100644 --- a/artiq/tools.py +++ b/artiq/tools.py @@ -5,6 +5,7 @@ import logging import sys import asyncio import time +import collections import os.path from artiq.language.environment import is_experiment @@ -125,14 +126,6 @@ def asyncio_wait_or_cancel(fs, **kwargs): return fs -def asyncio_queue_peek(q): - """Like q.get_nowait(), but does not remove the item from the queue.""" - if q._queue: - return q._queue[0] - else: - raise asyncio.QueueEmpty - - class TaskObject: def start(self): self.task = asyncio.async(self._do()) @@ -151,25 +144,25 @@ class TaskObject: raise NotImplementedError -class WaitSet: - def __init__(self): - self._s = set() - self._ev = asyncio.Event() - - def _update_ev(self): - if self._s: - self._ev.clear() +class Condition: + def __init__(self, *, loop=None): + if loop is not None: + self._loop = loop else: - self._ev.set() - - def add(self, e): - self._s.add(e) - self._update_ev() - - def discard(self, e): - self._s.discard(e) - self._update_ev() + self._loop = asyncio.get_event_loop() + self._waiters = collections.deque() @asyncio.coroutine - def wait_empty(self): - yield from self._ev.wait() + def wait(self): + """Wait until notified.""" + fut = asyncio.Future(loop=self._loop) + self._waiters.append(fut) + try: + yield from fut + finally: + self._waiters.remove(fut) + + def notify(self): + for fut in self._waiters: + if not fut.done(): + fut.set_result(False) From 80e8928c70565208cc44f398557d93bd7612ae94 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Mon, 10 Aug 2015 20:33:49 +0200 Subject: [PATCH 074/191] conda: llvmlite-artiq has been rebuilt with an updated version --- conda/llvmlite-artiq/meta.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/llvmlite-artiq/meta.yaml b/conda/llvmlite-artiq/meta.yaml index d59db9375..af5253bea 100644 --- a/conda/llvmlite-artiq/meta.yaml +++ b/conda/llvmlite-artiq/meta.yaml @@ -15,7 +15,7 @@ requirements: - python build: - number: 1 + number: 2 test: imports: From b70b2252d55810d76f98760b76b463533482973a Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Tue, 11 Aug 2015 12:55:41 +0200 Subject: [PATCH 075/191] conda: add pygit2+libgit2 recipes + pygit2 dependency in artiq pkg --- conda/artiq/meta.yaml | 1 + conda/libgit2/build.sh | 7 +++++++ conda/libgit2/meta.yaml | 20 ++++++++++++++++++++ conda/pygit2/build.sh | 2 ++ conda/pygit2/meta.yaml | 28 ++++++++++++++++++++++++++++ 5 files changed, 58 insertions(+) create mode 100644 conda/libgit2/build.sh create mode 100644 conda/libgit2/meta.yaml create mode 100644 conda/pygit2/build.sh create mode 100644 conda/pygit2/meta.yaml diff --git a/conda/artiq/meta.yaml b/conda/artiq/meta.yaml index 67991b4b9..3de0d7250 100644 --- a/conda/artiq/meta.yaml +++ b/conda/artiq/meta.yaml @@ -48,6 +48,7 @@ requirements: - quamash - pyqtgraph - flterm # [linux] + - pygit2 test: imports: diff --git a/conda/libgit2/build.sh b/conda/libgit2/build.sh new file mode 100644 index 000000000..210fa20ae --- /dev/null +++ b/conda/libgit2/build.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +mkdir build +cd build +cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX +make -j2 +make install diff --git a/conda/libgit2/meta.yaml b/conda/libgit2/meta.yaml new file mode 100644 index 000000000..cfad84964 --- /dev/null +++ b/conda/libgit2/meta.yaml @@ -0,0 +1,20 @@ +package: + name: libgit2 + version: 0.22.3 + +source: + git_url: https://github.com/libgit2/libgit2 + git_tag: v0.22.3 + +build: + number: 0 + +requirements: + build: + - system # [linux] + - cmake # [linux] + +about: + home: https://libgit2.github.com/ + license: GPLv2 with a special Linking Exception + summary: 'libgit2 is a portable, pure C implementation of the Git core methods provided as a re-entrant linkable library with a solid API, allowing you to write native speed custom Git applications in any language with bindings.' diff --git a/conda/pygit2/build.sh b/conda/pygit2/build.sh new file mode 100644 index 000000000..833768d01 --- /dev/null +++ b/conda/pygit2/build.sh @@ -0,0 +1,2 @@ +export LIBGIT2=$PREFIX +$PYTHON setup.py install diff --git a/conda/pygit2/meta.yaml b/conda/pygit2/meta.yaml new file mode 100644 index 000000000..273c6f444 --- /dev/null +++ b/conda/pygit2/meta.yaml @@ -0,0 +1,28 @@ +package: + name: pygit2 + version: 0.22.1 + +source: + git_url: https://github.com/libgit2/pygit2 + git_tag: v0.22.1 + +build: + number: 0 + +requirements: + build: + - system # [linux] + - python + - libgit2 + - cffi >=0.8.1 + - pkgconfig # [linux] + run: + - system # [linux] + - python + - libgit2 + - cffi >=0.8.1 + +about: + home: http://www.pygit2.org/ + license: GPLv2 with a special Linking Exception + summary: 'Pygit2 is a set of Python bindings to the libgit2 shared library, libgit2 implements the core of Git.' From 075bf331ac440dcab49ee73a522acfe2230a0104 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Tue, 11 Aug 2015 14:05:41 +0200 Subject: [PATCH 076/191] travis: use more recent libstdc++ --- .travis.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.travis.yml b/.travis.yml index 7f3498cb7..0892bc27c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,6 +2,12 @@ language: python python: - '3.4' sudo: false +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - libstdc++6 env: global: - BUILD_SOC=1 From 80805407bf0d78dcd86f7499c08d04425e16c76c Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Tue, 11 Aug 2015 16:44:22 +0200 Subject: [PATCH 077/191] conda: add Windows support for pygit2 and libgit2 packages --- conda/libgit2/bld.bat | 20 ++++++++++++++++++++ conda/pygit2/bld.bat | 3 +++ 2 files changed, 23 insertions(+) create mode 100644 conda/libgit2/bld.bat create mode 100644 conda/pygit2/bld.bat diff --git a/conda/libgit2/bld.bat b/conda/libgit2/bld.bat new file mode 100644 index 000000000..c385a825c --- /dev/null +++ b/conda/libgit2/bld.bat @@ -0,0 +1,20 @@ +mkdir build +cd build +REM Configure step +if "%ARCH%"=="32" ( +set CMAKE_GENERATOR=Visual Studio 12 2013 +) else ( +set CMAKE_GENERATOR=Visual Studio 12 2013 Win64 +) +set CMAKE_GENERATOR_TOOLSET=v120_xp +cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DSTDCALL=OFF %SRC_DIR% +if errorlevel 1 exit 1 +REM Build step +cmake --build . +if errorlevel 1 exit 1 +REM Install step +cmake --build . --target install +if errorlevel 1 exit 1 +REM Hack to help pygit2 to find libgit2 +mkdir %PREFIX%\Scripts +copy "%PREFIX%\bin\git2.dll" "%PREFIX%\Scripts\" \ No newline at end of file diff --git a/conda/pygit2/bld.bat b/conda/pygit2/bld.bat new file mode 100644 index 000000000..0b9010888 --- /dev/null +++ b/conda/pygit2/bld.bat @@ -0,0 +1,3 @@ +set LIBGIT2=%PREFIX% +set VS100COMNTOOLS=%VS120COMNTOOLS% +%PYTHON% setup.py install \ No newline at end of file From a6ab066c87d2d24200020952f7c4ecbf6f02b2b9 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 11 Aug 2015 23:22:36 +0800 Subject: [PATCH 078/191] ctlmgr: support immediate controller retry --- artiq/frontend/artiq_ctlmgr.py | 113 +++++++++++++++++---------- doc/manual/default_network_ports.rst | 2 + 2 files changed, 72 insertions(+), 43 deletions(-) diff --git a/artiq/frontend/artiq_ctlmgr.py b/artiq/frontend/artiq_ctlmgr.py index ce59bc7a4..878d6e649 100755 --- a/artiq/frontend/artiq_ctlmgr.py +++ b/artiq/frontend/artiq_ctlmgr.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import asyncio +import atexit import argparse import os import logging @@ -8,9 +9,9 @@ import shlex import socket from artiq.protocols.sync_struct import Subscriber -from artiq.protocols.pc_rpc import AsyncioClient +from artiq.protocols.pc_rpc import AsyncioClient, Server from artiq.tools import verbosity_args, init_logger -from artiq.tools import asyncio_process_wait_timeout +from artiq.tools import TaskObject, asyncio_process_wait_timeout, Condition logger = logging.getLogger(__name__) @@ -29,8 +30,11 @@ def get_argparser(): "--retry-master", default=5.0, type=float, help="retry timer for reconnecting to master") parser.add_argument( - "--retry-command", default=5.0, type=float, - help="retry timer for restarting a controller command") + "--bind", default="::1", + help="hostname or IP address to bind to") + parser.add_argument( + "--bind-port", default=3249, type=int, + help="TCP port to listen to for control (default: %(default)d)") return parser @@ -48,6 +52,7 @@ class Controller: self.term_timeout = ddb_entry.get("term_timeout", 30) self.retry_timer_cur = self.retry_timer + self.retry_now = Condition() self.process = None self.launch_task = asyncio.Task(self.launcher()) @@ -109,8 +114,13 @@ class Controller: logger.warning("Controller %s failed to start", self.name) else: logger.warning("Controller %s exited", self.name) - logger.warning("Restarting in %.1f seconds", self.retry_timer_cur) - yield from asyncio.sleep(self.retry_timer_cur) + logger.warning("Restarting in %.1f seconds", + self.retry_timer_cur) + try: + yield from asyncio.wait_for(self.retry_now.wait(), + self.retry_timer_cur) + except asyncio.TimeoutError: + pass self.retry_timer_cur *= self.retry_timer_backoff except asyncio.CancelledError: yield from self._terminate() @@ -208,34 +218,47 @@ class ControllerDB: return self.current_controllers -@asyncio.coroutine -def ctlmgr(server, port, retry_master): - controller_db = ControllerDB() - try: - subscriber = Subscriber("devices", controller_db.sync_struct_init) - while True: - try: - def set_host_filter(): - s = subscriber.writer.get_extra_info("socket") - localhost = s.getsockname()[0] - controller_db.set_host_filter(localhost) - yield from subscriber.connect(server, port, set_host_filter) +class ControllerManager(TaskObject): + def __init__(self, server, port, retry_master): + self.server = server + self.port = port + self.retry_master = retry_master + self.controller_db = ControllerDB() + + @asyncio.coroutine + def _do(self): + try: + subscriber = Subscriber("devices", + self.controller_db.sync_struct_init) + while True: try: - yield from asyncio.wait_for(subscriber.receive_task, None) - finally: - yield from subscriber.close() - except (ConnectionAbortedError, ConnectionError, - ConnectionRefusedError, ConnectionResetError) as e: - logger.warning("Connection to master failed (%s: %s)", - e.__class__.__name__, str(e)) - else: - logger.warning("Connection to master lost") - logger.warning("Retrying in %.1f seconds", retry_master) - yield from asyncio.sleep(retry_master) - except asyncio.CancelledError: - pass - finally: - yield from controller_db.current_controllers.shutdown() + def set_host_filter(): + s = subscriber.writer.get_extra_info("socket") + localhost = s.getsockname()[0] + self.controller_db.set_host_filter(localhost) + yield from subscriber.connect(self.server, self.port, + set_host_filter) + try: + yield from asyncio.wait_for(subscriber.receive_task, None) + finally: + yield from subscriber.close() + except (ConnectionAbortedError, ConnectionError, + ConnectionRefusedError, ConnectionResetError) as e: + logger.warning("Connection to master failed (%s: %s)", + e.__class__.__name__, str(e)) + else: + logger.warning("Connection to master lost") + logger.warning("Retrying in %.1f seconds", self.retry_master) + yield from asyncio.sleep(self.retry_master) + except asyncio.CancelledError: + pass + finally: + yield from self.controller_db.current_controllers.shutdown() + + def retry_now(self, k): + """If a controller is disabled and pending retry, perform that retry + now.""" + self.controller_db.current_controllers.active[k].retry_now.notify() def main(): @@ -247,18 +270,22 @@ def main(): asyncio.set_event_loop(loop) else: loop = asyncio.get_event_loop() + atexit.register(lambda: loop.close()) - try: - task = asyncio.Task(ctlmgr( - args.server, args.port, args.retry_master)) - try: - loop.run_forever() - finally: - task.cancel() - loop.run_until_complete(asyncio.wait_for(task, None)) + ctlmgr = ControllerManager(args.server, args.port, args.retry_master) + ctlmgr.start() + atexit.register(lambda: loop.run_until_complete(ctlmgr.stop())) + + class CtlMgrRPC: + retry_now = ctlmgr.retry_now + + rpc_target = CtlMgrRPC() + rpc_server = Server({"ctlmgr": rpc_target}, builtin_terminate=True) + loop.run_until_complete(rpc_server.start(args.bind, args.bind_port)) + atexit.register(lambda: loop.run_until_complete(rpc_server.stop())) + + loop.run_until_complete(rpc_server.wait_terminate()) - finally: - loop.close() if __name__ == "__main__": main() diff --git a/doc/manual/default_network_ports.rst b/doc/manual/default_network_ports.rst index c5728e428..f9c4f9c09 100644 --- a/doc/manual/default_network_ports.rst +++ b/doc/manual/default_network_ports.rst @@ -8,6 +8,8 @@ Default network ports +--------------------------+--------------+ | Core device (mon/inj) | 3250 (UDP) | +--------------------------+--------------+ +| Controller manager | 3249 | ++--------------------------+--------------+ | Master (notifications) | 3250 | +--------------------------+--------------+ | Master (control) | 3251 | From 998db5121bf00c79e31dabf5153539826b814904 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 11 Aug 2015 23:29:52 +0800 Subject: [PATCH 079/191] pc_rpc: id_parameters -> description --- artiq/frontend/artiq_rpctool.py | 10 +++++----- artiq/frontend/pdq2_controller.py | 2 +- artiq/protocols/pc_rpc.py | 28 ++++++++++++++-------------- 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/artiq/frontend/artiq_rpctool.py b/artiq/frontend/artiq_rpctool.py index a26d70cf7..a8a166891 100755 --- a/artiq/frontend/artiq_rpctool.py +++ b/artiq/frontend/artiq_rpctool.py @@ -29,10 +29,10 @@ def get_argparser(): return parser -def list_targets(target_names, id_parameters): +def list_targets(target_names, description): print("Target(s): " + ", ".join(target_names)) - if id_parameters is not None: - print("Parameters: " + id_parameters) + if description is not None: + print("Description: " + description) def list_methods(remote): @@ -85,7 +85,7 @@ def main(): remote = Client(args.server, args.port, None) - targets, id_parameters = remote.get_rpc_id() + targets, description = remote.get_rpc_id() if args.action != "list-targets": # If no target specified and remote has only one, then use this one. @@ -99,7 +99,7 @@ def main(): remote.select_rpc_target(args.target) if args.action == "list-targets": - list_targets(targets, id_parameters) + list_targets(targets, description) elif args.action == "list-methods": list_methods(remote) elif args.action == "call": diff --git a/artiq/frontend/pdq2_controller.py b/artiq/frontend/pdq2_controller.py index 577cdce2b..f84a6a404 100755 --- a/artiq/frontend/pdq2_controller.py +++ b/artiq/frontend/pdq2_controller.py @@ -39,7 +39,7 @@ def main(): dev = Pdq2(url=args.device, dev=port) try: simple_server_loop({"pdq2": dev}, args.bind, args.port, - id_parameters="device=" + str(args.device)) + description="device=" + str(args.device)) finally: dev.close() diff --git a/artiq/protocols/pc_rpc.py b/artiq/protocols/pc_rpc.py index 1787a2111..d99c62803 100644 --- a/artiq/protocols/pc_rpc.py +++ b/artiq/protocols/pc_rpc.py @@ -79,7 +79,7 @@ class Client: server_identification = self.__recv() self.__target_names = server_identification["targets"] - self.__id_parameters = server_identification["parameters"] + self.__description = server_identification["description"] if target_name is not None: self.select_rpc_target(target_name) except: @@ -94,9 +94,9 @@ class Client: self.__socket.sendall((target_name + "\n").encode()) def get_rpc_id(self): - """Returns a tuple (target_names, id_parameters) containing the + """Returns a tuple (target_names, description) containing the identification information of the server.""" - return (self.__target_names, self.__id_parameters) + return (self.__target_names, self.__description) def close_rpc(self): """Closes the connection to the RPC server. @@ -157,7 +157,7 @@ class AsyncioClient: self.__reader = None self.__writer = None self.__target_names = None - self.__id_parameters = None + self.__description = None @asyncio.coroutine def connect_rpc(self, host, port, target_name): @@ -170,7 +170,7 @@ class AsyncioClient: self.__writer.write(_init_string) server_identification = yield from self.__recv() self.__target_names = server_identification["targets"] - self.__id_parameters = server_identification["parameters"] + self.__description = server_identification["description"] if target_name is not None: self.select_rpc_target(target_name) except: @@ -186,9 +186,9 @@ class AsyncioClient: self.__writer.write((target_name + "\n").encode()) def get_rpc_id(self): - """Returns a tuple (target_names, id_parameters) containing the + """Returns a tuple (target_names, description) containing the identification information of the server.""" - return (self.__target_names, self.__id_parameters) + return (self.__target_names, self.__description) def close_rpc(self): """Closes the connection to the RPC server. @@ -199,7 +199,7 @@ class AsyncioClient: self.__reader = None self.__writer = None self.__target_names = None - self.__id_parameters = None + self.__description = None def __send(self, obj): line = pyon.encode(obj) + "\n" @@ -398,17 +398,17 @@ class Server(_AsyncioServer): :param targets: A dictionary of objects providing the RPC methods to be exposed to the client. Keys are names identifying each object. Clients select one of these objects using its name upon connection. - :param id_parameters: An optional human-readable string giving more + :param description: An optional human-readable string giving more information about the server. :param builtin_terminate: If set, the server provides a built-in ``terminate`` method that unblocks any tasks waiting on ``wait_terminate``. This is useful to handle server termination requests from clients. """ - def __init__(self, targets, id_parameters=None, builtin_terminate=False): + def __init__(self, targets, description=None, builtin_terminate=False): _AsyncioServer.__init__(self) self.targets = targets - self.id_parameters = id_parameters + self.description = description self.builtin_terminate = builtin_terminate if builtin_terminate: self._terminate_request = asyncio.Event() @@ -422,7 +422,7 @@ class Server(_AsyncioServer): obj = { "targets": sorted(self.targets.keys()), - "parameters": self.id_parameters + "description": self.description } line = pyon.encode(obj) + "\n" writer.write(line.encode()) @@ -480,7 +480,7 @@ class Server(_AsyncioServer): yield from self._terminate_request.wait() -def simple_server_loop(targets, host, port, id_parameters=None): +def simple_server_loop(targets, host, port, description=None): """Runs a server until an exception is raised (e.g. the user hits Ctrl-C) or termination is requested by a client. @@ -488,7 +488,7 @@ def simple_server_loop(targets, host, port, id_parameters=None): """ loop = asyncio.get_event_loop() try: - server = Server(targets, id_parameters, True) + server = Server(targets, description, True) loop.run_until_complete(server.start(host, port)) try: loop.run_until_complete(server.wait_terminate()) From f073dfaee51d466ff1f7d953e87ce0aabc901ce5 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 13 Aug 2015 12:20:12 +0800 Subject: [PATCH 080/191] ttl: add input/output doc --- artiq/coredevice/ttl.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/artiq/coredevice/ttl.py b/artiq/coredevice/ttl.py index 866e884e5..b655a256d 100644 --- a/artiq/coredevice/ttl.py +++ b/artiq/coredevice/ttl.py @@ -87,10 +87,12 @@ class TTLInOut: @kernel def output(self): + """Set the direction to output.""" self.set_oe(True) @kernel def input(self): + """Set the direction to input.""" self.set_oe(False) @kernel @@ -107,12 +109,16 @@ class TTLInOut: @kernel def on(self): - """Set the output to a logic high state.""" + """Set the output to a logic high state. + + The channel must be in output mode.""" self.set_o(True) @kernel def off(self): - """Set the output to a logic low state.""" + """Set the output to a logic low state. + + The channel must be in output mode.""" self.set_o(False) @kernel From a1c7efd0aee48b119e744588ee1315bef520ec37 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 13 Aug 2015 14:47:05 +0800 Subject: [PATCH 081/191] doc: use m-labs anaconda account --- doc/manual/installing.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index a6f7bc256..9d7523205 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -25,7 +25,7 @@ If it prints the help of the ``conda`` command, your install is OK. If not, then make sure your ``$PATH`` environment variable contains the path to anaconda3/bin (or miniconda3/bin):: $ echo $PATH - /home/fallen/miniconda3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games + /home/..../miniconda3/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games If your ``$PATH`` misses reference the miniconda3/bin or anaconda3/bin you can fix this by typing:: @@ -36,7 +36,7 @@ Installing the host side software For this, you need to add our binstar repository to your conda configuration:: - $ conda config --add channels http://conda.anaconda.org/fallen/channel/dev + $ conda config --add channels http://conda.anaconda.org/m-labs/channel/dev Then you can install the ARTIQ package, it will pull all the necessary dependencies:: From f2911d67b7e62d6d750aa9cbe72b79fc5fa99b7d Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Thu, 13 Aug 2015 18:32:37 +0800 Subject: [PATCH 082/191] Enable TCP keepalive on the core device Automatically runs the idle experiment a few seconds after the master stops responding. Thanks Florent for figuring out TCP_KEEPIDLE_DEFAULT needed to be set in addition to the other options. Closes #31 --- soc/runtime/liblwip/lwipopts.h | 4 ++++ soc/runtime/net_server.c | 17 ++++++++++------- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/soc/runtime/liblwip/lwipopts.h b/soc/runtime/liblwip/lwipopts.h index bca6722a3..147322dd8 100644 --- a/soc/runtime/liblwip/lwipopts.h +++ b/soc/runtime/liblwip/lwipopts.h @@ -105,6 +105,10 @@ a lot of data that needs to be copied, this should be set high. */ /* ---------- TCP options ---------- */ #define LWIP_TCP 1 +#define LWIP_TCP_KEEPALIVE 1 +#define TCP_KEEPIDLE_DEFAULT 1250 +#define TCP_KEEPINTVL_DEFAULT 1000 +#define TCP_KEEPCNT_DEFAULT 3 #define TCP_TTL 255 /* Controls if TCP should queue segments that arrive out of diff --git a/soc/runtime/net_server.c b/soc/runtime/net_server.c index a28b396bd..fa06474b6 100644 --- a/soc/runtime/net_server.c +++ b/soc/runtime/net_server.c @@ -60,14 +60,16 @@ static void net_server_close(struct net_server_connstate *cs, struct tcp_pcb *pc active_pcb = NULL; } - /* lwip loves to call back with broken pointers. Prevent that. */ - tcp_arg(pcb, NULL); - tcp_recv(pcb, NULL); - tcp_sent(pcb, NULL); - tcp_err(pcb, NULL); + if(pcb) { + /* lwip loves to call back with broken pointers. Prevent that. */ + tcp_arg(pcb, NULL); + tcp_recv(pcb, NULL); + tcp_sent(pcb, NULL); + tcp_err(pcb, NULL); + tcp_close(pcb); + } cs_free(cs); - tcp_close(pcb); } static err_t net_server_recv(void *arg, struct tcp_pcb *pcb, struct pbuf *p, err_t err) @@ -155,7 +157,7 @@ static void net_server_err(void *arg, err_t err) struct net_server_connstate *cs; cs = (struct net_server_connstate *)arg; - cs_free(cs); + net_server_close(cs, NULL); } static struct tcp_pcb *listen_pcb; @@ -177,6 +179,7 @@ static err_t net_server_accept(void *arg, struct tcp_pcb *newpcb, err_t err) void net_server_init(void) { listen_pcb = tcp_new(); + listen_pcb->so_options |= SOF_KEEPALIVE; tcp_bind(listen_pcb, IP_ADDR_ANY, 1381); listen_pcb = tcp_listen(listen_pcb); tcp_accept(listen_pcb, net_server_accept); From 1ce8bbe3ae798cfb992e91550f507a484e3471e3 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Thu, 13 Aug 2015 18:50:13 +0200 Subject: [PATCH 083/191] conda: add recipe for libssh2 package --- conda/libssh2/bld.bat | 17 +++++++++++++++++ conda/libssh2/build.sh | 7 +++++++ conda/libssh2/meta.yaml | 23 +++++++++++++++++++++++ 3 files changed, 47 insertions(+) create mode 100644 conda/libssh2/bld.bat create mode 100644 conda/libssh2/build.sh create mode 100644 conda/libssh2/meta.yaml diff --git a/conda/libssh2/bld.bat b/conda/libssh2/bld.bat new file mode 100644 index 000000000..636211e40 --- /dev/null +++ b/conda/libssh2/bld.bat @@ -0,0 +1,17 @@ +mkdir build +cd build +REM Configure step +if "%ARCH%"=="32" ( +set CMAKE_GENERATOR=Visual Studio 12 2013 +) else ( +set CMAKE_GENERATOR=Visual Studio 12 2013 Win64 +) +set CMAKE_GENERATOR_TOOLSET=v120_xp +cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DOPENSSL_ROOT_DIR=%PREFIX%\Library\ %SRC_DIR% +if errorlevel 1 exit 1 +REM Build step +cmake --build . +if errorlevel 1 exit 1 +REM Install step +cmake --build . --target install +if errorlevel 1 exit 1 \ No newline at end of file diff --git a/conda/libssh2/build.sh b/conda/libssh2/build.sh new file mode 100644 index 000000000..a604361f2 --- /dev/null +++ b/conda/libssh2/build.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +mkdir build +cd build +cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DOPENSSL_ROOT_DIR=$PREFIX +make -j2 +make install diff --git a/conda/libssh2/meta.yaml b/conda/libssh2/meta.yaml new file mode 100644 index 000000000..9a65c8f5d --- /dev/null +++ b/conda/libssh2/meta.yaml @@ -0,0 +1,23 @@ +package: + name: libssh2 + version: 1.6.0 + +source: + git_url: https://github.com/libssh2/libssh2 + git_tag: libssh2-1.6.0 + +build: + number: 0 + +requirements: + build: + - system # [linux] + - cmake # [linux] + - openssl + run: + - openssl + +about: + home: http://www.libssh2.org/ + license: BSD + summary: 'libssh2 is a client-side C library implementing the SSH2 protocol' From 12b315700999555b520ed86b277cc4baaa847be7 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 14 Aug 2015 10:37:40 +0800 Subject: [PATCH 084/191] doc: add precision about Git commit management --- doc/manual/management_system.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/management_system.rst b/doc/manual/management_system.rst index 8d972a12d..b52325945 100644 --- a/doc/manual/management_system.rst +++ b/doc/manual/management_system.rst @@ -105,7 +105,7 @@ You may now run the master with the Git support enabled: :: Push commits containing experiments to the bare repository using e.g. Git over SSH, and the new experiments should automatically appear in the GUI. -.. note:: If you plan to run the ARTIQ system entirely on a single machine, you may also consider using a non-bare repository and the ``post-commit`` hook to trigger repository scans every time you commit changes (locally). The ARTIQ master never uses the repository's working directory, but only what is committed. +.. note:: If you plan to run the ARTIQ system entirely on a single machine, you may also consider using a non-bare repository and the ``post-commit`` hook to trigger repository scans every time you commit changes (locally). The ARTIQ master never uses the repository's working directory, but only what is committed. More precisely, it fetches by default the last (atomically) completed commit at the time of experiment submission and checks it out in a temporary folder (which solves the problem of concurrent repository access). The GUI always runs experiments from the repository. The command-line client, by default, runs experiment from the raw filesystem (which is useful for iterating rapidly without creating many disorganized commits). If you want to use the repository instead, simply pass the ``-R`` option. From 966d0ee3f255ea75c6fa1b38d674ff83f889d9e4 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Fri, 14 Aug 2015 10:36:03 +0200 Subject: [PATCH 085/191] pxi6733: fix verification of the number of buffered sample values --- artiq/devices/pxi6733/driver.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/artiq/devices/pxi6733/driver.py b/artiq/devices/pxi6733/driver.py index 22839ee04..2d139dc85 100644 --- a/artiq/devices/pxi6733/driver.py +++ b/artiq/devices/pxi6733/driver.py @@ -115,9 +115,9 @@ class DAQmx: ret = t.WriteAnalogF64(samps_per_channel, False, 0, self.daq.DAQmx_Val_GroupByChannel, values, byref(num_samps_written), None) - if num_samps_written.value != nb_values: - raise IOError("Error: only {} sample values were written" - .format(num_samps_written.value)) + if num_samps_written.value != samps_per_channel: + raise IOError("Error: only {} sample values per channel were" + "written".format(num_samps_written.value)) if ret: raise IOError("Error while writing samples to the channel buffer") From 4ac79fb1003839247c148e132c1ed34efdc39097 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Fri, 14 Aug 2015 16:56:46 +0200 Subject: [PATCH 086/191] conda: update libssh2 libgit2 pygit2 to use anaconda libs instead of system ones --- conda/libgit2/bld.bat | 2 +- conda/libgit2/build.sh | 2 +- conda/libgit2/meta.yaml | 9 ++++++++- conda/libssh2/bld.bat | 2 +- conda/libssh2/build.sh | 2 +- conda/libssh2/meta.yaml | 2 +- conda/pygit2/meta.yaml | 2 +- 7 files changed, 14 insertions(+), 7 deletions(-) diff --git a/conda/libgit2/bld.bat b/conda/libgit2/bld.bat index c385a825c..268c18cd9 100644 --- a/conda/libgit2/bld.bat +++ b/conda/libgit2/bld.bat @@ -7,7 +7,7 @@ set CMAKE_GENERATOR=Visual Studio 12 2013 set CMAKE_GENERATOR=Visual Studio 12 2013 Win64 ) set CMAKE_GENERATOR_TOOLSET=v120_xp -cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DSTDCALL=OFF %SRC_DIR% +cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DSTDCALL=OFF -DCMAKE_PREFIX_PATH=$PREFIX %SRC_DIR% if errorlevel 1 exit 1 REM Build step cmake --build . diff --git a/conda/libgit2/build.sh b/conda/libgit2/build.sh index 210fa20ae..dc4a85aa0 100644 --- a/conda/libgit2/build.sh +++ b/conda/libgit2/build.sh @@ -2,6 +2,6 @@ mkdir build cd build -cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX +cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DCMAKE_PREFIX_PATH=$PREFIX make -j2 make install diff --git a/conda/libgit2/meta.yaml b/conda/libgit2/meta.yaml index cfad84964..5741b44b4 100644 --- a/conda/libgit2/meta.yaml +++ b/conda/libgit2/meta.yaml @@ -7,12 +7,19 @@ source: git_tag: v0.22.3 build: - number: 0 + number: 1 requirements: build: - system # [linux] - cmake # [linux] + - openssl + - libssh2 + - zlib + run: + - openssl + - zlib + - libssh2 about: home: https://libgit2.github.com/ diff --git a/conda/libssh2/bld.bat b/conda/libssh2/bld.bat index 636211e40..ed957bd42 100644 --- a/conda/libssh2/bld.bat +++ b/conda/libssh2/bld.bat @@ -7,7 +7,7 @@ set CMAKE_GENERATOR=Visual Studio 12 2013 set CMAKE_GENERATOR=Visual Studio 12 2013 Win64 ) set CMAKE_GENERATOR_TOOLSET=v120_xp -cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DOPENSSL_ROOT_DIR=%PREFIX%\Library\ %SRC_DIR% +cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%PREFIX% -DOPENSSL_ROOT_DIR=%PREFIX%\Library -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=OFF -DBUILD_EXAMPLES=OFF -DCMAKE_PREFIX_PATH=$PREFIX %SRC_DIR% if errorlevel 1 exit 1 REM Build step cmake --build . diff --git a/conda/libssh2/build.sh b/conda/libssh2/build.sh index a604361f2..773dda78b 100644 --- a/conda/libssh2/build.sh +++ b/conda/libssh2/build.sh @@ -2,6 +2,6 @@ mkdir build cd build -cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DOPENSSL_ROOT_DIR=$PREFIX +cmake .. -DCMAKE_INSTALL_PREFIX=$PREFIX -DOPENSSL_ROOT_DIR=$PREFIX -DBUILD_SHARED_LIBS=ON -DBUILD_TESTING=OFF -DBUILD_EXAMPLES=OFF -DCMAKE_PREFIX_PATH=$PREFIX make -j2 make install diff --git a/conda/libssh2/meta.yaml b/conda/libssh2/meta.yaml index 9a65c8f5d..28c0f59b6 100644 --- a/conda/libssh2/meta.yaml +++ b/conda/libssh2/meta.yaml @@ -7,7 +7,7 @@ source: git_tag: libssh2-1.6.0 build: - number: 0 + number: 1 requirements: build: diff --git a/conda/pygit2/meta.yaml b/conda/pygit2/meta.yaml index 273c6f444..fcc222f29 100644 --- a/conda/pygit2/meta.yaml +++ b/conda/pygit2/meta.yaml @@ -7,7 +7,7 @@ source: git_tag: v0.22.1 build: - number: 0 + number: 1 requirements: build: From e6f3285144cff2b1a536f4e5a6562ba8efa2cd35 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Fri, 14 Aug 2015 18:18:42 +0200 Subject: [PATCH 087/191] travis: use the new m-labs anaconda repository during CI --- .travis/get-anaconda.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis/get-anaconda.sh b/.travis/get-anaconda.sh index a4c2524b9..146c7b84f 100755 --- a/.travis/get-anaconda.sh +++ b/.travis/get-anaconda.sh @@ -9,5 +9,4 @@ conda update -q conda conda info -a conda install conda-build jinja2 conda create -q -n py34 python=$TRAVIS_PYTHON_VERSION -conda config --add channels fallen -conda config --add channels https://conda.anaconda.org/fallen/channel/dev +conda config --add channels https://conda.anaconda.org/m-labs/channel/dev From cf74cd0f38d7079fae0659b4d8375d3d71ca0c03 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Fri, 14 Aug 2015 18:59:30 +0200 Subject: [PATCH 088/191] travis: dont overwrite conda env PATH, escape dollar signs, fixes #84 --- .travis/get-toolchain.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis/get-toolchain.sh b/.travis/get-toolchain.sh index fdf8195d1..9b0ca48cc 100755 --- a/.travis/get-toolchain.sh +++ b/.travis/get-toolchain.sh @@ -21,8 +21,8 @@ done export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:$PATH export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH -echo "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh -echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PATH" >> $HOME/.mlabs/build_settings.sh +echo "export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh +echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:\$PATH" >> $HOME/.mlabs/build_settings.sh or1k-linux-as --version llc --version From 8669f1d61db7cacc676db464ffbcd54b76e3e8e0 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Fri, 14 Aug 2015 21:43:46 +0200 Subject: [PATCH 089/191] manual: llvmlite-artiq conda package needs a recent libstdc++6 --- doc/manual/installing.rst | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index 9d7523205..b95dadb42 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -42,6 +42,25 @@ Then you can install the ARTIQ package, it will pull all the necessary dependenc $ conda install artiq +.. note:: + You need to have a recent libstdc++6. + If you are installing on e.g. Ubuntu 14.04 LTS: you need to update your + libstdc++6. + + If the following command outputs `GLIBCXX_3.4.20`, then you don't need to + upgrade:: + + $ strings /usr/lib/x86_64-linux-gnu/libstdc++.so.6 | grep GLIBCXX_3.4.20 + GLIBCXX_3.4.20 + + Adapt the previous command to the location of libstdc++ on your system. + + To upgrade, on Ubuntu you need to do the following:: + + $ sudo add-apt-repository ppa:ubuntu-toolchain-r/test + $ sudo apt-get update + $ sudo apt-get install libstdc++6 + Preparing the core device FPGA board ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From 34a9c8c12d3eb37dfba8456f04c3f759c35abeb5 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 15 Aug 2015 09:16:00 +0800 Subject: [PATCH 090/191] artiq_run: add dummy pause method (closes #100) --- artiq/frontend/artiq_run.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/artiq/frontend/artiq_run.py b/artiq/frontend/artiq_run.py index 06595746c..976e3edf6 100755 --- a/artiq/frontend/artiq_run.py +++ b/artiq/frontend/artiq_run.py @@ -51,6 +51,9 @@ class DummyScheduler: def delete(self, rid): logger.info("Deleting RID %s", rid) + def pause(self): + pass + def get_argparser(with_file=True): parser = argparse.ArgumentParser( From 0be0b199c14f654b29272f943c62e59a70d77e7e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 15 Aug 2015 15:29:41 +0800 Subject: [PATCH 091/191] gui: save/restore state of pyqtgraph plots (closes #98) --- artiq/gui/displays.py | 21 ++++++++++++++++++++- artiq/gui/results.py | 12 ++++++++++-- 2 files changed, 30 insertions(+), 3 deletions(-) diff --git a/artiq/gui/displays.py b/artiq/gui/displays.py index 512159c7e..a08aed041 100644 --- a/artiq/gui/displays.py +++ b/artiq/gui/displays.py @@ -94,6 +94,12 @@ class NumberDisplay(dockarea.Dock): n = "---" self.number.display(n) + def save_state(self): + return None + + def restore_state(self, state): + pass + class XYDisplaySettings(_SimpleSettings): _window_title = "XY plot" @@ -155,6 +161,12 @@ class XYDisplay(dockarea.Dock): if fit is not None: self.plot.plot(x, fit) + def save_state(self): + return self.plot.saveState() + + def restore_state(self, state): + self.plot.restoreState(state) + class HistogramDisplaySettings(_SimpleSettings): _window_title = "Histogram" @@ -191,7 +203,14 @@ class HistogramDisplay(dockarea.Dock): if y and len(x) == len(y) + 1: self.plot.clear() - self.plot.plot(x, y, stepMode=True, fillLevel=0, brush=(0, 0, 255, 150)) + self.plot.plot(x, y, stepMode=True, fillLevel=0, + brush=(0, 0, 255, 150)) + + def save_state(self): + return self.plot.saveState() + + def restore_state(self, state): + self.plot.restoreState(state) display_types = OrderedDict([ diff --git a/artiq/gui/results.py b/artiq/gui/results.py index 9c4c321c8..0ed872a6a 100644 --- a/artiq/gui/results.py +++ b/artiq/gui/results.py @@ -120,20 +120,28 @@ class ResultsDock(dockarea.Dock): dsp.sigClosed.connect(on_close) self.dock_area.addDock(dsp) self.dock_area.floatDock(dsp) + return dsp def save_state(self): r = dict() for name, display in self.displays.items(): r[name] = { "ty": _get_display_type_name(type(display)), - "settings": display.settings + "settings": display.settings, + "state": display.save_state() } return r def restore_state(self, state): for name, desc in state.items(): try: - self.create_display(desc["ty"], None, name, desc["settings"]) + dsp = self.create_display(desc["ty"], None, name, + desc["settings"]) except: logger.warning("Failed to create display '%s'", name, exc_info=True) + try: + dsp.restore_state(desc["state"]) + except: + logger.warning("Failed to restore display state of '%s'", + name, exc_info=True) From dee844510c526810d58fe56adbf25de5c41768e9 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 15 Aug 2015 16:03:00 +0800 Subject: [PATCH 092/191] comm_tcp: enable TCP keepalive on host side as well --- artiq/coredevice/comm_tcp.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/artiq/coredevice/comm_tcp.py b/artiq/coredevice/comm_tcp.py index eda672750..f61212128 100644 --- a/artiq/coredevice/comm_tcp.py +++ b/artiq/coredevice/comm_tcp.py @@ -1,5 +1,6 @@ import logging import socket +import sys from artiq.coredevice.comm_generic import CommGeneric @@ -7,6 +8,22 @@ from artiq.coredevice.comm_generic import CommGeneric logger = logging.getLogger(__name__) +def set_keepalive(sock, after_idle, interval, max_fails): + if sys.platform.startswith("linux"): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, after_idle) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, interval) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, max_fails) + elif sys.platform.startswith("win") or sys.platform.startswith("cygwin"): + # setting max_fails is not supported, typically ends up being 5 or 10 + # depending on Windows version + sock.ioctl(socket.SIO_KEEPALIVE_VALS, + (1, after_idle*1000, interval*1000)) + else: + logger.warning("TCP keepalive not supported on platform '%s', ignored", + sys.platform) + + class Comm(CommGeneric): def __init__(self, dmgr, host, port=1381): self.host = host @@ -16,6 +33,7 @@ class Comm(CommGeneric): if hasattr(self, "socket"): return self.socket = socket.create_connection((self.host, self.port)) + set_keepalive(self.socket, 3, 2, 3) logger.debug("connected to host %s on port %d", self.host, self.port) self.write(b"ARTIQ coredev\n") From fd3fefec52a97bd88ce19676fe22dd49c2fa492c Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 15:44:40 +0800 Subject: [PATCH 093/191] add InfluxDB bridge --- artiq/frontend/artiq_influxdb.py | 206 +++++++++++++++++++++++++++ doc/manual/default_network_ports.rst | 2 + doc/manual/utilities.rst | 7 + setup.py | 8 +- 4 files changed, 222 insertions(+), 1 deletion(-) create mode 100755 artiq/frontend/artiq_influxdb.py diff --git a/artiq/frontend/artiq_influxdb.py b/artiq/frontend/artiq_influxdb.py new file mode 100755 index 000000000..af93070c4 --- /dev/null +++ b/artiq/frontend/artiq_influxdb.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python3 + +import argparse +import logging +import asyncio +import atexit +import fnmatch +from functools import partial + +import aiohttp + +from artiq.tools import verbosity_args, init_logger +from artiq.tools import TaskObject +from artiq.protocols.sync_struct import Subscriber +from artiq.protocols.pc_rpc import Server +from artiq.protocols import pyon + + +logger = logging.getLogger(__name__) + + +def get_argparser(): + parser = argparse.ArgumentParser( + description="ARTIQ data to InfluxDB bridge") + group = parser.add_argument_group("master") + group.add_argument( + "--server-master", default="::1", + help="hostname or IP of the master to connect to") + group.add_argument( + "--port-master", default=3250, type=int, + help="TCP port to use to connect to the master") + group.add_argument( + "--retry-master", default=5.0, type=float, + help="retry timer for reconnecting to master") + group = parser.add_argument_group("database") + group.add_argument( + "--baseurl-db", default="http://localhost:8086", + help="base URL to access InfluxDB (default: %(default)s)") + group.add_argument( + "--user-db", default="", help="InfluxDB username") + group.add_argument( + "--password-db", default="", help="InfluxDB password") + group.add_argument( + "--database", default="db", help="database name to use") + group.add_argument( + "--table", default="lab", help="table name to use") + group = parser.add_argument_group("filter") + group.add_argument( + "--bind", default="::1", + help="hostname or IP address to bind to") + group.add_argument( + "--bind-port", default=3248, type=int, + help="TCP port to listen to for control (default: %(default)d)") + group.add_argument( + "--filter-file", default="influxdb_filter.pyon", + help="file to save the filter in (default: %(default)s)") + verbosity_args(parser) + return parser + + +class DBWriter(TaskObject): + def __init__(self, base_url, user, password, database, table): + self.base_url = base_url + self.user = user + self.password = password + self.database = database + self.table = table + + self._queue = asyncio.Queue(100) + + def update(self, k, v): + try: + self._queue.put_nowait((k, v)) + except asyncio.QueueFull: + logger.warning("failed to update parameter '%s': " + "too many pending updates", k) + + @asyncio.coroutine + def _do(self): + while True: + k, v = yield from self._queue.get() + url = self.base_url + "/write" + params = {"u": self.user, "p": self.password, "db": self.database, + "consistency": "any", "precision": "n"} + data = "{} {}={}".format(self.table, k, v) + try: + response = yield from aiohttp.request( + "POST", url, params=params, data=data) + except: + logger.warning("got exception trying to update '%s'", + k, exc_info=True) + else: + if response.status not in (200, 204): + logger.warning("got HTTP status %d trying to update '%s'", + response.status, k) + response.close() + + +class Parameters: + def __init__(self, filter_function, writer, init): + self.filter_function = filter_function + self.writer = writer + + def __setitem__(self, k, v): + try: + v = float(v) + except: + pass + else: + if self.filter_function(k): + self.writer.update(k, v) + + +class MasterReader(TaskObject): + def __init__(self, server, port, retry, filter_function, writer): + self.server = server + self.port = port + self.retry = retry + + self.filter_function = filter_function + self.writer = writer + + @asyncio.coroutine + def _do(self): + subscriber = Subscriber( + "parameters", + partial(Parameters, self.filter_function, self.writer)) + while True: + try: + yield from subscriber.connect(self.server, self.port) + try: + yield from asyncio.wait_for(subscriber.receive_task, None) + finally: + yield from subscriber.close() + except (ConnectionAbortedError, ConnectionError, + ConnectionRefusedError, ConnectionResetError) as e: + logger.warning("Connection to master failed (%s: %s)", + e.__class__.__name__, str(e)) + else: + logger.warning("Connection to master lost") + logger.warning("Retrying in %.1f seconds", self.retry) + yield from asyncio.sleep(self.retry) + + +class Filter: + def __init__(self, filter_file): + self.filter_file = filter_file + self.filter = [] + try: + self.filter = pyon.load_file(self.filter_file) + except FileNotFoundError: + logger.info("no filter file found, using empty filter") + + def _save(self): + pyon.store_file(self.filter_file, self.filter) + + def _filter(self, k): + for pattern in self.filter: + if fnmatch.fnmatchcase(k, pattern): + return False + return True + + def add_pattern(self, pattern): + """Add a name pattern to ignore.""" + if pattern not in self.filter: + self.filter.append(pattern) + self._save() + + def remove_pattern(self, pattern): + """Remove a pattern name to ignore.""" + self.pattern.remove(pattern) + self._save() + + def get_patterns(self): + """Show ignore patterns.""" + return self.filter + + +def main(): + args = get_argparser().parse_args() + init_logger(args) + + loop = asyncio.get_event_loop() + atexit.register(lambda: loop.close()) + + writer = DBWriter(args.baseurl_db, + args.user_db, args.password_db, + args.database, args.table) + writer.start() + atexit.register(lambda: loop.run_until_complete(writer.stop())) + + filter = Filter(args.filter_file) + rpc_server = Server({"influxdb_filter": filter}, builtin_terminate=True) + loop.run_until_complete(rpc_server.start(args.bind, args.bind_port)) + atexit.register(lambda: loop.run_until_complete(rpc_server.stop())) + + reader = MasterReader(args.server_master, args.port_master, + args.retry_master, filter._filter, writer) + reader.start() + atexit.register(lambda: loop.run_until_complete(reader.stop())) + + loop.run_until_complete(rpc_server.wait_terminate()) + + +if __name__ == "__main__": + main() diff --git a/doc/manual/default_network_ports.rst b/doc/manual/default_network_ports.rst index f9c4f9c09..35e576d2b 100644 --- a/doc/manual/default_network_ports.rst +++ b/doc/manual/default_network_ports.rst @@ -8,6 +8,8 @@ Default network ports +--------------------------+--------------+ | Core device (mon/inj) | 3250 (UDP) | +--------------------------+--------------+ +| InfluxDB bridge | 3248 | ++--------------------------+--------------+ | Controller manager | 3249 | +--------------------------+--------------+ | Master (notifications) | 3250 | diff --git a/doc/manual/utilities.rst b/doc/manual/utilities.rst index 23df5b581..306ebe0f9 100644 --- a/doc/manual/utilities.rst +++ b/doc/manual/utilities.rst @@ -152,3 +152,10 @@ it:: .. argparse:: :ref: artiq.frontend.artiq_coretool.get_argparser :prog: artiq_coretool + +Data to InfluxDB bridge +----------------------- + +.. argparse:: + :ref: artiq.frontend.artiq_influxdb.get_argparser + :prog: artiq_influxdb diff --git a/setup.py b/setup.py index 763320836..cbbb51de6 100755 --- a/setup.py +++ b/setup.py @@ -8,20 +8,25 @@ import os if sys.version_info[:3] < (3, 4, 3): raise Exception("You need at least Python 3.4.3 to run ARTIQ") + class PushDocCommand(Command): description = "uploads the documentation to m-labs.hk" user_options = [] + def initialize_options(self): pass + def finalize_options(self): pass + def run(self): os.system("rsync -avz doc/manual/_build/html/ shell.serverraum.org:~/web/m-labs.hk/artiq/manual") + requirements = [ "sphinx", "sphinx-argparse", "pyserial", "numpy", "scipy", "python-dateutil", "prettytable", "h5py", "pydaqmx", "pyelftools", - "quamash", "pyqtgraph", "llvmlite_artiq", "pygit2" + "quamash", "pyqtgraph", "llvmlite_artiq", "pygit2", "aiohttp" ] scripts = [ @@ -30,6 +35,7 @@ scripts = [ "artiq_coretool=artiq.frontend.artiq_coretool:main", "artiq_ctlmgr=artiq.frontend.artiq_ctlmgr:main", "artiq_gui=artiq.frontend.artiq_gui:main", + "artiq_influxdb=artiq.frontend.artiq_influxdb:main", "artiq_master=artiq.frontend.artiq_master:main", "artiq_mkfs=artiq.frontend.artiq_mkfs:main", "artiq_rpctool=artiq.frontend.artiq_rpctool:main", From abbf5eb059ebd3c31fd5f28bca4cb0e6d65540c5 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 15:44:54 +0800 Subject: [PATCH 094/191] gui: minor cleanup --- artiq/frontend/artiq_gui.py | 1 - 1 file changed, 1 deletion(-) diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py index 39a0f1142..b33e19c43 100755 --- a/artiq/frontend/artiq_gui.py +++ b/artiq/frontend/artiq_gui.py @@ -48,7 +48,6 @@ class MainWindow(QtGui.QMainWindow): def __init__(self, app): QtGui.QMainWindow.__init__(self) self.setWindowIcon(QtGui.QIcon(os.path.join(data_dir, "icon.png"))) - #self.resize(1400, 800) self.setWindowTitle("ARTIQ") self.exit_request = asyncio.Event() From 324cafae3d75865f28e4978bf98fe6a3bd1632b7 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 15:45:08 +0800 Subject: [PATCH 095/191] rpctool: use pprint --- artiq/frontend/artiq_rpctool.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/artiq/frontend/artiq_rpctool.py b/artiq/frontend/artiq_rpctool.py index a8a166891..acfa886d1 100755 --- a/artiq/frontend/artiq_rpctool.py +++ b/artiq/frontend/artiq_rpctool.py @@ -4,6 +4,7 @@ import argparse import textwrap import sys import numpy as np # Needed to use numpy in RPC call arguments on cmd line +import pprint from artiq.protocols.pc_rpc import Client @@ -77,7 +78,7 @@ def call_method(remote, method_name, args): method = getattr(remote, method_name) ret = method(*[eval(arg) for arg in args]) if ret is not None: - print("{}".format(ret)) + pprint.pprint(ret) def main(): From a64c6a321bb505bee5f411f4d8131de14ee4dcd5 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Mon, 17 Aug 2015 11:53:49 +0200 Subject: [PATCH 096/191] conda: add aiohttp dependency on artiq pkg + recipe for aiohttp pkg --- conda/aiohttp/bld.bat | 2 ++ conda/aiohttp/build.sh | 3 +++ conda/aiohttp/meta.yaml | 36 ++++++++++++++++++++++++++++++++++++ conda/artiq/meta.yaml | 1 + 4 files changed, 42 insertions(+) create mode 100644 conda/aiohttp/bld.bat create mode 100644 conda/aiohttp/build.sh create mode 100644 conda/aiohttp/meta.yaml diff --git a/conda/aiohttp/bld.bat b/conda/aiohttp/bld.bat new file mode 100644 index 000000000..c40a9bbef --- /dev/null +++ b/conda/aiohttp/bld.bat @@ -0,0 +1,2 @@ +"%PYTHON%" setup.py install +if errorlevel 1 exit 1 diff --git a/conda/aiohttp/build.sh b/conda/aiohttp/build.sh new file mode 100644 index 000000000..8e25a1455 --- /dev/null +++ b/conda/aiohttp/build.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +$PYTHON setup.py install diff --git a/conda/aiohttp/meta.yaml b/conda/aiohttp/meta.yaml new file mode 100644 index 000000000..c251daaad --- /dev/null +++ b/conda/aiohttp/meta.yaml @@ -0,0 +1,36 @@ +package: + name: aiohttp + version: "0.17.2" + +source: + fn: aiohttp-0.17.2.tar.gz + url: https://pypi.python.org/packages/source/a/aiohttp/aiohttp-0.17.2.tar.gz + md5: 7640928fd4b5c1ccf1f8bcad276d39d6 + +build: + number: 0 + +requirements: + build: + - python + - setuptools + - chardet + + run: + - python + - chardet + +test: + # Python imports + imports: + - aiohttp + + requires: + - chardet + - gunicorn + - nose + +about: + home: https://github.com/KeepSafe/aiohttp/ + license: Apache Software License + summary: 'http client/server for asyncio' diff --git a/conda/artiq/meta.yaml b/conda/artiq/meta.yaml index 3de0d7250..45e0c194d 100644 --- a/conda/artiq/meta.yaml +++ b/conda/artiq/meta.yaml @@ -49,6 +49,7 @@ requirements: - pyqtgraph - flterm # [linux] - pygit2 + - aiohttp test: imports: From 2410ef79e5e33ebe276531991d3b078a4399775a Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 23:03:18 +0800 Subject: [PATCH 097/191] gui: display numpy scalars in parameters --- artiq/gui/tools.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/artiq/gui/tools.py b/artiq/gui/tools.py index 7d673cc55..da94ae7e7 100644 --- a/artiq/gui/tools.py +++ b/artiq/gui/tools.py @@ -1,4 +1,5 @@ from quamash import QtCore +import numpy as np def elide(s, maxlen): @@ -21,11 +22,19 @@ def elide(s, maxlen): return s +_scalar_types = { + int, float, + np.int8, np.int16, np.int32, np.int64, + np.uint8, np.uint16, np.uint32, np.uint64, + np.float16, np.float32, np.float64 +} + + def short_format(v): if v is None: return "None" t = type(v) - if t is int or t is float: + if t in _scalar_types: return str(v) elif t is str: return "\"" + elide(v, 15) + "\"" From 21b170ed32d73c729fa5a570e8a1573dcccaa303 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 23:03:43 +0800 Subject: [PATCH 098/191] pc_rpc/Server: show builtin terminate in method list --- artiq/protocols/pc_rpc.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/artiq/protocols/pc_rpc.py b/artiq/protocols/pc_rpc.py index d99c62803..f001d3a26 100644 --- a/artiq/protocols/pc_rpc.py +++ b/artiq/protocols/pc_rpc.py @@ -454,6 +454,17 @@ class Server(_AsyncioServer): argspec = inspect.getfullargspec(method) doc["methods"][name] = (dict(argspec.__dict__), inspect.getdoc(method)) + if self.builtin_terminate: + doc["methods"]["terminate"] = ( + { + "args": ["self"], + "defaults": None, + "varargs": None, + "varkw": None, + "kwonlyargs": [], + "kwonlydefaults": [], + }, + "Terminate the server.") obj = {"status": "ok", "ret": doc} elif obj["action"] == "call": logger.debug("calling %s", _PrettyPrintCall(obj)) From e7d495cf2c5d19896d069da299093a854243f134 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 23:14:18 +0800 Subject: [PATCH 099/191] gui/tools: better detection of scalar types --- artiq/gui/tools.py | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/artiq/gui/tools.py b/artiq/gui/tools.py index da94ae7e7..04476b25d 100644 --- a/artiq/gui/tools.py +++ b/artiq/gui/tools.py @@ -22,19 +22,11 @@ def elide(s, maxlen): return s -_scalar_types = { - int, float, - np.int8, np.int16, np.int32, np.int64, - np.uint8, np.uint16, np.uint32, np.uint64, - np.float16, np.float32, np.float64 -} - - def short_format(v): if v is None: return "None" t = type(v) - if t in _scalar_types: + if np.issubdtype(t, int) or np.issubdtype(t, float): return str(v) elif t is str: return "\"" + elide(v, 15) + "\"" From 278adf193d2acb07c9721106d83fd3b8f238380c Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 23:17:13 +0800 Subject: [PATCH 100/191] test/pc_rpc: use builtin_terminate --- artiq/test/pc_rpc.py | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/artiq/test/pc_rpc.py b/artiq/test/pc_rpc.py index 1b60d245f..5bd0a64cd 100644 --- a/artiq/test/pc_rpc.py +++ b/artiq/test/pc_rpc.py @@ -45,7 +45,7 @@ class RPCCase(unittest.TestCase): self.assertEqual(test_object, test_object_back) with self.assertRaises(pc_rpc.RemoteError): remote.non_existing_method() - remote.quit() + remote.terminate() finally: remote.close_rpc() @@ -68,7 +68,7 @@ class RPCCase(unittest.TestCase): self.assertEqual(test_object, test_object_back) with self.assertRaises(pc_rpc.RemoteError): yield from remote.non_existing_method() - yield from remote.quit() + yield from remote.terminate() finally: remote.close_rpc() @@ -97,16 +97,6 @@ class FireAndForgetCase(unittest.TestCase): class Echo: - def __init__(self): - self.terminate_notify = asyncio.Semaphore(0) - - @asyncio.coroutine - def wait_quit(self): - yield from self.terminate_notify.acquire() - - def quit(self): - self.terminate_notify.release() - def echo(self, x): return x @@ -116,10 +106,10 @@ def run_server(): asyncio.set_event_loop(loop) try: echo = Echo() - server = pc_rpc.Server({"test": echo}) + server = pc_rpc.Server({"test": echo}, builtin_terminate=True) loop.run_until_complete(server.start(test_address, test_port)) try: - loop.run_until_complete(echo.wait_quit()) + loop.run_until_complete(server.wait_terminate()) finally: loop.run_until_complete(server.stop()) finally: From c71d207e90153f32277ddbf1d2ab321ff315ae5b Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 23:41:21 +0800 Subject: [PATCH 101/191] core/break_realtime: only increase now --- artiq/coredevice/core.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/artiq/coredevice/core.py b/artiq/coredevice/core.py index 536a10054..79ba4d969 100644 --- a/artiq/coredevice/core.py +++ b/artiq/coredevice/core.py @@ -124,4 +124,6 @@ class Core: @kernel def break_realtime(self): - at_mu(syscall("rtio_get_counter") + 125000) + min_now = syscall("rtio_get_counter") + 125000 + if now_mu() < min_now: + at_mu(min_now) From cd199f16eccb4b01483a46dd66876df9d0786687 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 23:41:55 +0800 Subject: [PATCH 102/191] doc: add note about setting core device IP --- doc/manual/getting_started.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/getting_started.rst b/doc/manual/getting_started.rst index 5a5fd1329..b2254173d 100644 --- a/doc/manual/getting_started.rst +++ b/doc/manual/getting_started.rst @@ -23,7 +23,7 @@ As a very first step, we will turn on a LED on the core device. Create a file `` The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``attr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``attr_device`` like any other. -Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run.py``). You can open the database files using a text editor - their contents are in a human-readable format. +Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run.py``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). Run your code using ``artiq_run``, which is part of the ARTIQ front-end tools: :: From c625f2e7c9bdc9173b163241ae48ec96407adb73 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 17 Aug 2015 23:50:24 +0800 Subject: [PATCH 103/191] ttl: minor docstring cleanup --- artiq/coredevice/ttl.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/artiq/coredevice/ttl.py b/artiq/coredevice/ttl.py index b655a256d..d54710e66 100644 --- a/artiq/coredevice/ttl.py +++ b/artiq/coredevice/ttl.py @@ -6,7 +6,6 @@ class TTLOut: This should be used with output-only channels. - :param core: core device :param channel: channel number """ def __init__(self, dmgr, channel): @@ -70,7 +69,6 @@ class TTLInOut: This should be used with bidirectional channels. - :param core: core device :param channel: channel number """ def __init__(self, dmgr, channel): @@ -216,7 +214,6 @@ class TTLClockGen: This should be used with TTL channels that have a clock generator built into the gateware (not compatible with regular TTL channels). - :param core: core device :param channel: channel number """ def __init__(self, dmgr, channel): From a0f1b02eb511bbd53c830ee7eef2faa681c5ce5b Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Mon, 17 Aug 2015 17:50:19 +0200 Subject: [PATCH 104/191] Revert "manual: llvmlite-artiq conda package needs a recent libstdc++6" This reverts commit 8669f1d61db7cacc676db464ffbcd54b76e3e8e0. This is no longer needed since the llvmlite-artiq package for linux-64 has been rebuilt on an older Ubuntu (14.04 LTS) --- doc/manual/installing.rst | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index b95dadb42..9d7523205 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -42,25 +42,6 @@ Then you can install the ARTIQ package, it will pull all the necessary dependenc $ conda install artiq -.. note:: - You need to have a recent libstdc++6. - If you are installing on e.g. Ubuntu 14.04 LTS: you need to update your - libstdc++6. - - If the following command outputs `GLIBCXX_3.4.20`, then you don't need to - upgrade:: - - $ strings /usr/lib/x86_64-linux-gnu/libstdc++.so.6 | grep GLIBCXX_3.4.20 - GLIBCXX_3.4.20 - - Adapt the previous command to the location of libstdc++ on your system. - - To upgrade, on Ubuntu you need to do the following:: - - $ sudo add-apt-repository ppa:ubuntu-toolchain-r/test - $ sudo apt-get update - $ sudo apt-get install libstdc++6 - Preparing the core device FPGA board ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ From c94dafdecab1c5416ae8a23d50a2386842631c8e Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Mon, 17 Aug 2015 17:51:36 +0200 Subject: [PATCH 105/191] Revert "travis: use more recent libstdc++" This reverts commit 075bf331ac440dcab49ee73a522acfe2230a0104. This is no longer needed since the llvmlite-artiq package for linux-64 has been rebuilt on an older Ubuntu (14.04 LTS) --- .travis.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0892bc27c..7f3498cb7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,12 +2,6 @@ language: python python: - '3.4' sudo: false -addons: - apt: - sources: - - ubuntu-toolchain-r-test - packages: - - libstdc++6 env: global: - BUILD_SOC=1 From 19a77b51f2219965d7c0eda51ca344edf47f08d3 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Mon, 17 Aug 2015 22:02:01 +0200 Subject: [PATCH 106/191] conda: make aiohttp testing not depend on gunicorn on Windows platform --- conda/aiohttp/meta.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda/aiohttp/meta.yaml b/conda/aiohttp/meta.yaml index c251daaad..2b196ffc1 100644 --- a/conda/aiohttp/meta.yaml +++ b/conda/aiohttp/meta.yaml @@ -27,7 +27,7 @@ test: requires: - chardet - - gunicorn + - gunicorn # [not win] - nose about: From 66b5ca99d801656aba0970e7999f65628e7d7e40 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 09:59:48 +0800 Subject: [PATCH 107/191] influxdb: better filtering --- artiq/frontend/artiq_influxdb.py | 60 +++++++++++++++++++++----------- 1 file changed, 40 insertions(+), 20 deletions(-) diff --git a/artiq/frontend/artiq_influxdb.py b/artiq/frontend/artiq_influxdb.py index af93070c4..8c57b595e 100755 --- a/artiq/frontend/artiq_influxdb.py +++ b/artiq/frontend/artiq_influxdb.py @@ -52,8 +52,8 @@ def get_argparser(): "--bind-port", default=3248, type=int, help="TCP port to listen to for control (default: %(default)d)") group.add_argument( - "--filter-file", default="influxdb_filter.pyon", - help="file to save the filter in (default: %(default)s)") + "--pattern-file", default="influxdb_patterns.pyon", + help="file to save the patterns in (default: %(default)s)") verbosity_args(parser) return parser @@ -143,37 +143,57 @@ class MasterReader(TaskObject): class Filter: - def __init__(self, filter_file): - self.filter_file = filter_file - self.filter = [] + def __init__(self, pattern_file): + self.pattern_file = pattern_file + self.patterns = [] try: - self.filter = pyon.load_file(self.filter_file) + self.patterns = pyon.load_file(self.pattern_file) except FileNotFoundError: - logger.info("no filter file found, using empty filter") + logger.info("no pattern file found, logging everything") def _save(self): - pyon.store_file(self.filter_file, self.filter) + pyon.store_file(self.pattern_file, self.patterns) + # Privatize so that it is not shown in artiq_rpctool list-methods. def _filter(self, k): - for pattern in self.filter: + take = "+" + for pattern in self.patterns: + sign = "-" + if pattern[0] in "+-": + sign, pattern = pattern[0], pattern[1:] if fnmatch.fnmatchcase(k, pattern): - return False - return True + take = sign + return take == "+" - def add_pattern(self, pattern): - """Add a name pattern to ignore.""" - if pattern not in self.filter: - self.filter.append(pattern) + def add_pattern(self, pattern, index=None): + """Add a pattern. + + Optional + and - pattern prefixes specify whether to ignore or log + keys matching the rest of the pattern. + Default (in the absence of prefix) is to ignore. Keys that match no + pattern are logged. Last matched pattern takes precedence. + + The optional index parameter specifies where to insert the pattern. + By default, patterns are added at the end. If index is an integer, it + specifies the index where the pattern is inserted. If it is a string, + that string must match an existing pattern and the new pattern is + inserted immediately after it.""" + if pattern not in self.patterns: + if index is None: + index = len(self.patterns) + if isinstance(index, str): + index = self.patterns.index(index) + 1 + self.patterns.insert(index, pattern) self._save() def remove_pattern(self, pattern): - """Remove a pattern name to ignore.""" - self.pattern.remove(pattern) + """Remove a pattern.""" + self.patterns.remove(pattern) self._save() def get_patterns(self): - """Show ignore patterns.""" - return self.filter + """Show existing patterns.""" + return self.patterns def main(): @@ -189,7 +209,7 @@ def main(): writer.start() atexit.register(lambda: loop.run_until_complete(writer.stop())) - filter = Filter(args.filter_file) + filter = Filter(args.pattern_file) rpc_server = Server({"influxdb_filter": filter}, builtin_terminate=True) loop.run_until_complete(rpc_server.start(args.bind, args.bind_port)) atexit.register(lambda: loop.run_until_complete(rpc_server.stop())) From 2275017651d621c69b32c1bac3f95d779e6bc01e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 13:34:15 +0800 Subject: [PATCH 108/191] influxdb: better error reporting --- artiq/frontend/artiq_influxdb.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/artiq/frontend/artiq_influxdb.py b/artiq/frontend/artiq_influxdb.py index 8c57b595e..f719afc8f 100755 --- a/artiq/frontend/artiq_influxdb.py +++ b/artiq/frontend/artiq_influxdb.py @@ -91,8 +91,12 @@ class DBWriter(TaskObject): k, exc_info=True) else: if response.status not in (200, 204): - logger.warning("got HTTP status %d trying to update '%s'", - response.status, k) + content = (yield from response.content.read()).decode() + if content: + content = content[:-1] # drop \n + logger.warning("got HTTP status %d " + "trying to update '%s': %s", + response.status, k, content) response.close() From 2ac8c53bbe25dfb7223a1022fc11c6b46ccdf797 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 13:34:42 +0800 Subject: [PATCH 109/191] influxdb: use types --- artiq/frontend/artiq_influxdb.py | 27 ++++++++++++++++++++------- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/artiq/frontend/artiq_influxdb.py b/artiq/frontend/artiq_influxdb.py index f719afc8f..42ef923c2 100755 --- a/artiq/frontend/artiq_influxdb.py +++ b/artiq/frontend/artiq_influxdb.py @@ -7,6 +7,7 @@ import atexit import fnmatch from functools import partial +import numpy as np import aiohttp from artiq.tools import verbosity_args, init_logger @@ -100,19 +101,31 @@ class DBWriter(TaskObject): response.close() +def format_influxdb(v): + if isinstance(v, bool): + if v: + return "t" + else: + return "f" + elif np.issubdtype(type(v), int): + return "{}i".format(v) + elif np.issubdtype(type(v), float): + return "{}".format(v) + elif isinstance(v, str): + return '"' + v.replace('"', '\\"') + '"' + else: + return None + + class Parameters: def __init__(self, filter_function, writer, init): self.filter_function = filter_function self.writer = writer def __setitem__(self, k, v): - try: - v = float(v) - except: - pass - else: - if self.filter_function(k): - self.writer.update(k, v) + v_db = format_influxdb(v) + if v_db is not None and self.filter_function(k): + self.writer.update(k, v_db) class MasterReader(TaskObject): From ed00ca148570194d5ffa2fae6314c221172ca493 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 13:35:05 +0800 Subject: [PATCH 110/191] influxdb: do not crash on parameter deletion --- artiq/frontend/artiq_influxdb.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/artiq/frontend/artiq_influxdb.py b/artiq/frontend/artiq_influxdb.py index 42ef923c2..72ec1b9f0 100755 --- a/artiq/frontend/artiq_influxdb.py +++ b/artiq/frontend/artiq_influxdb.py @@ -127,6 +127,9 @@ class Parameters: if v_db is not None and self.filter_function(k): self.writer.update(k, v_db) + def __delitem__(self, k): + pass + class MasterReader(TaskObject): def __init__(self, server, port, retry, filter_function, writer): From 600e8335f2b48e813245575898ab232cce0cffa3 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 14:49:23 +0800 Subject: [PATCH 111/191] influxdb: tag-based schema, better type support --- artiq/frontend/artiq_influxdb.py | 44 +++++++++++++++++--------------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/artiq/frontend/artiq_influxdb.py b/artiq/frontend/artiq_influxdb.py index 72ec1b9f0..299695030 100755 --- a/artiq/frontend/artiq_influxdb.py +++ b/artiq/frontend/artiq_influxdb.py @@ -59,6 +59,26 @@ def get_argparser(): return parser +def influxdb_str(s): + return '"' + s.replace('"', '\\"') + '"' + + +def format_influxdb(v): + if isinstance(v, bool): + if v: + return "bool", "t" + else: + return "bool", "f" + elif np.issubdtype(type(v), int): + return "int", "{}i".format(v) + elif np.issubdtype(type(v), float): + return "float", "{}".format(v) + elif isinstance(v, str): + return "str", influxdb_str(v) + else: + return "pyon", influxdb_str(pyon.encode(v)) + + class DBWriter(TaskObject): def __init__(self, base_url, user, password, database, table): self.base_url = base_url @@ -83,7 +103,8 @@ class DBWriter(TaskObject): url = self.base_url + "/write" params = {"u": self.user, "p": self.password, "db": self.database, "consistency": "any", "precision": "n"} - data = "{} {}={}".format(self.table, k, v) + fmt_ty, fmt_v = format_influxdb(v) + data = "{},parameter={} {}={}".format(self.table, k, fmt_ty, fmt_v) try: response = yield from aiohttp.request( "POST", url, params=params, data=data) @@ -101,31 +122,14 @@ class DBWriter(TaskObject): response.close() -def format_influxdb(v): - if isinstance(v, bool): - if v: - return "t" - else: - return "f" - elif np.issubdtype(type(v), int): - return "{}i".format(v) - elif np.issubdtype(type(v), float): - return "{}".format(v) - elif isinstance(v, str): - return '"' + v.replace('"', '\\"') + '"' - else: - return None - - class Parameters: def __init__(self, filter_function, writer, init): self.filter_function = filter_function self.writer = writer def __setitem__(self, k, v): - v_db = format_influxdb(v) - if v_db is not None and self.filter_function(k): - self.writer.update(k, v_db) + if self.filter_function(k): + self.writer.update(k, v) def __delitem__(self, k): pass From 2c15bd3e44086de30e5a5f5fe1db9780fc1c12c2 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 15:20:42 +0800 Subject: [PATCH 112/191] kc705: add TTL channel on SMA GPIO N --- examples/master/ddb.pyon | 8 +++++++- soc/targets/artiq_kc705.py | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/examples/master/ddb.pyon b/examples/master/ddb.pyon index df172f272..00e823a28 100644 --- a/examples/master/ddb.pyon +++ b/examples/master/ddb.pyon @@ -49,11 +49,17 @@ "class": "TTLOut", "arguments": {"channel": 5} }, + "ttl_sma": { + "type": "local", + "module": "artiq.coredevice.ttl", + "class": "TTLInOut", + "arguments": {"channel": 17} + }, "led": { "type": "local", "module": "artiq.coredevice.ttl", "class": "TTLOut", - "arguments": {"channel": 17} + "arguments": {"channel": 18} }, "dds_bus": { diff --git a/soc/targets/artiq_kc705.py b/soc/targets/artiq_kc705.py index a768e2f07..1ffc83aea 100644 --- a/soc/targets/artiq_kc705.py +++ b/soc/targets/artiq_kc705.py @@ -148,6 +148,9 @@ class NIST_QC1(_NIST_QCx): self.submodules += phy rtio_channels.append(rtio.Channel.from_phy(phy)) + phy = ttl_simple.Inout(platform.request("user_sma_gpio_n")) + self.submodules += phy + rtio_channels.append(rtio.Channel.from_phy(phy)) phy = ttl_simple.Output(platform.request("user_led", 2)) self.submodules += phy rtio_channels.append(rtio.Channel.from_phy(phy)) @@ -187,6 +190,9 @@ class NIST_QC2(_NIST_QCx): self.submodules += phy rtio_channels.append(rtio.Channel.from_phy(phy)) + phy = ttl_simple.Inout(platform.request("user_sma_gpio_n")) + self.submodules += phy + rtio_channels.append(rtio.Channel.from_phy(phy)) phy = ttl_simple.Output(platform.request("user_led", 2)) self.submodules += phy rtio_channels.append(rtio.Channel.from_phy(phy)) From 178816243c08b30eff4bc5f59a0895b38f4b78ae Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 16:04:27 +0800 Subject: [PATCH 113/191] doc/core_device: update, add KC705 QC1 TTL table --- doc/manual/core_device.rst | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/doc/manual/core_device.rst b/doc/manual/core_device.rst index fa8c8db99..6b0120ec8 100644 --- a/doc/manual/core_device.rst +++ b/doc/manual/core_device.rst @@ -26,7 +26,25 @@ FPGA board ports KC705 ----- -The main target board for the ARTIQ core device is the KC705 development board from Xilinx. +The main target board for the ARTIQ core device is the KC705 development board from Xilinx. It supports the NIST QC1 hardware via an adapter, and the NIST QC2 hardware (FMC). + +With the QC1 hardware, the TTL lines are mapped as follows: + ++--------------+------------+--------------+ +| RTIO channel | TTL line | Capability | ++==============+============+==============+ +| 0 | PMT0 | Input | ++--------------+------------+--------------+ +| 1 | PMT1 | Input | ++--------------+------------+--------------+ +| 2-16 | TTL0-14 | Output | ++--------------+------------+--------------+ +| 17 | SMA_GPIO_N | Input+Output | ++--------------+------------+--------------+ +| 18 | LED | Output | ++--------------+------------+--------------+ +| 19 | TTL15 | Clock | ++--------------+------------+--------------+ Pipistrello ----------- @@ -44,17 +62,15 @@ When plugged to an adapter, the NIST QC1 hardware can be used. The TTL lines are +--------------+----------+------------+ | 2-16 | TTL0-14 | Output | +--------------+----------+------------+ -| 17 | TTL15 | Clock | +| 17 | EXT_LED | Output | +--------------+----------+------------+ -| 18 | EXT_LED | Output | +| 18 | USER_LED | Output | +--------------+----------+------------+ -| 19 | USER_LED | Output | -+--------------+----------+------------+ -| 20 | DDS | Output | +| 19 | TTL15 | Clock | +--------------+----------+------------+ The input only limitation on channels 0 and 1 comes from the QC-DAQ adapter. When the adapter is not used (and physically unplugged from the Pipistrello board), the corresponding pins on the Pipistrello can be used as outputs. Do not configure these channels as outputs when the adapter is plugged, as this would cause electrical contention. The board can accept an external RTIO clock connected to PMT2. If the DDS box -does not drive the PMT2 pair, use XTRIG and patch the XTRIG transciever output +does not drive the PMT2 pair, use XTRIG and patch the XTRIG transceiver output on the adapter board onto C:15 disconnecting PMT2. From c97c6e2b9a8a7ea654736e2a9c3f403787613602 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 22:34:22 +0800 Subject: [PATCH 114/191] doc/ndsp_reference: reorganize --- doc/manual/ndsp_reference.rst | 74 +++++++++++++++++------------------ 1 file changed, 35 insertions(+), 39 deletions(-) diff --git a/doc/manual/ndsp_reference.rst b/doc/manual/ndsp_reference.rst index fa2f85e31..5dcf96ae0 100644 --- a/doc/manual/ndsp_reference.rst +++ b/doc/manual/ndsp_reference.rst @@ -34,6 +34,15 @@ Client Lab Brick Digital Attenuator (LDA) ---------------------------------- +Driver +++++++ + +.. automodule:: artiq.devices.lda.driver + :members: + +Controller +++++++++++ + On Linux, you need to give your user access to the USB device. You can do that by creating a file under ``/etc/udev/rules.d/`` named @@ -56,15 +65,6 @@ Also, the ``SN:`` prefix is mandatory. You can choose the LDA model with the ``-P`` parameter. The default is LDA-102. -Driver -++++++ - -.. automodule:: artiq.devices.lda.driver - :members: - -Controller -++++++++++ - .. argparse:: :ref: artiq.frontend.lda_controller.get_argparser :prog: lda_controller @@ -88,6 +88,25 @@ Controller Thorlabs T-Cube --------------- +TDC001 Driver ++++++++++++++ + +.. autoclass:: artiq.devices.thorlabs_tcube.driver.Tdc + :members: + +TPZ001 Driver ++++++++++++++ + +.. autoclass:: artiq.devices.thorlabs_tcube.driver.Tpz + :members: + +Controller +++++++++++ + +.. argparse:: + :ref: artiq.frontend.thorlabs_tcube_controller.get_argparser + :prog: thorlabs_controller + .. _tdc001-controller-usage-example: TDC001 controller usage example @@ -149,28 +168,21 @@ Then, send commands to it via the ``artiq_rpctool`` utility:: $ artiq_rpctool ::1 3255 call set_output_volts 150 # set output voltage to 150 V $ artiq_rpctool ::1 3255 call close # close the device -TDC001 Driver -+++++++++++++ +NI PXI6733 +---------- -.. autoclass:: artiq.devices.thorlabs_tcube.driver.Tdc - :members: +Driver +++++++ -TPZ001 Driver -+++++++++++++ - -.. autoclass:: artiq.devices.thorlabs_tcube.driver.Tpz +.. automodule:: artiq.devices.pxi6733.driver :members: Controller ++++++++++ .. argparse:: - :ref: artiq.frontend.thorlabs_tcube_controller.get_argparser - :prog: thorlabs_controller - - -NI PXI6733 ----------- + :ref: artiq.frontend.pxi6733_controller.get_argparser + :prog: pxi6733_controller PXI6733 controller usage example ++++++++++++++++++++++++++++++++ @@ -201,19 +213,3 @@ Then, send a load_sample_values command to it via the ``artiq_rpctool`` utility: This loads 4 voltage values as a numpy float array: 1.0 V, 2.0 V, 3.0 V, 4.0 V Then the device is set up to output those samples at each rising edge of the clock. - -Driver -++++++ - -.. automodule:: artiq.devices.pxi6733.driver - :members: - -Controller -++++++++++ - -Usage example - -.. argparse:: - :ref: artiq.frontend.pxi6733_controller.get_argparser - :prog: pxi6733_controller - From 9f0ada49e56d9e9e2db310a39ad67ecbba9248af Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 23:29:40 +0800 Subject: [PATCH 115/191] doc: environment --- artiq/language/environment.py | 4 +++ doc/manual/environment.rst | 51 ++++++++++++++++++++++++++++++++++ doc/manual/getting_started.rst | 2 +- doc/manual/index.rst | 1 + 4 files changed, 57 insertions(+), 1 deletion(-) create mode 100644 doc/manual/environment.rst diff --git a/artiq/language/environment.py b/artiq/language/environment.py index 1132e7f72..534197800 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -285,6 +285,10 @@ class Experiment: class EnvExperiment(Experiment, HasEnvironment): + """Base class for experiments that use the ``HasEnvironment`` environment + manager. + + Most experiment should derive from this class.""" pass diff --git a/doc/manual/environment.rst b/doc/manual/environment.rst new file mode 100644 index 000000000..8fbcda495 --- /dev/null +++ b/doc/manual/environment.rst @@ -0,0 +1,51 @@ +The environment +=============== + +Experiments interact with an environment that consists of devices, parameters, arguments and results. Access to the environment is handled by the class :class:`artiq.language.environment.EnvExperiment` that experiments should derive from. + +.. _ddb: + +The device database +------------------- + +The device database contains information about the devices available in a ARTIQ installation, what drivers to use, what controllers to use and on what machine, and where the devices are connected. + +The master (or ``artiq_run``) instantiates the device drivers (and the RPC clients in the case of controllers) for the experiments based on the contents of the device database. + +The device database is stored in the memory of the master and is backed by a PYON file typically called ``ddb.pyon``. + +The device database is a Python dictionary whose keys are the device names, and values can have several types. + +Local devices ++++++++++++++ + +Local device entries are dictionaries that contain a ``type`` field set to ``local``. They correspond to device drivers that are created locally on the master (as opposed to going through the controller mechanism). The fields ``module`` and ``class`` determine the location of the Python class that the driver consists of. The ``arguments`` field is another (possibly empty) dictionary that contains arguments to pass to the device driver constructor. + +Controllers ++++++++++++ + +Controller entries are dictionaries whose ``type`` field is set to ``controller``. When an experiment requests such a device, a RPC client (see :class:`artiq.protocols.pc_rpc`) is created and connected to the appropriate controller. Controller entries are also used by controller managers to determine what controllers to run. + +The ``best_effort`` field is a boolean that determines whether to use :class:`artiq.protocols.pc_rpc.Client` or :class:`artiq.protocols.pc_rpc.BestEffortClient`. The ``host`` and ``port`` fields configure the TCP connection. The ``target`` field contains the name of the RPC target to use (you may use ``artiq_rpctool`` on a controller to list its targets). Controller managers run the ``command`` field in a shell to launch the controller, after replacing ``{port}`` and ``{bind}`` by respectively the TCP port the controller should listen to (matches the ``port`` field) and an appropriate bind address for the controller's listening socket. + +Aliases ++++++++ + +If an entry is a string, that string is used as a key for another lookup in the device database. + +The parameter database +---------------------- + +The parameter database is a key-value store that is global to all experiments. It is stored in the memory of the master and is backed by a PYON file typically called ``pdb.pyon``. It may be used to communicate values across experiments; for example, a periodic calibration experiment may update a parameter read by payload experiments. + +Arguments +--------- + +Arguments are values that parameterize the behavior of an experiment and are set before the experiment is executed. + +Requesting the values of arguments can only be done in the build phase of an experiment. The value requests are also used to define the GUI widgets shown in the explorer when the experiment is selected. + +Results +------- + +Results are the output of an experiment. They are archived after in the HDF5 format after the experiment is run. Experiments may define real-time results that are (additionally) distributed to all clients connected to the master; for example, the ARTIQ GUI may plot them while the experiment is in progress to give rapid feedback to the user. Real-time results are a global key-value store (similar to the parameter database); experiments should use distinctive real-time result names in order to avoid conflicts. diff --git a/doc/manual/getting_started.rst b/doc/manual/getting_started.rst index b2254173d..fd14b9956 100644 --- a/doc/manual/getting_started.rst +++ b/doc/manual/getting_started.rst @@ -23,7 +23,7 @@ As a very first step, we will turn on a LED on the core device. Create a file `` The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``attr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``attr_device`` like any other. -Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run.py``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). +Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run.py``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. Run your code using ``artiq_run``, which is part of the ARTIQ front-end tools: :: diff --git a/doc/manual/index.rst b/doc/manual/index.rst index 1f6915468..af51eef69 100644 --- a/doc/manual/index.rst +++ b/doc/manual/index.rst @@ -10,6 +10,7 @@ Contents: installing getting_started management_system + environment core_device core_language_reference core_drivers_reference From c3f3763af17329f756b5f5e0c777f9cadfe0cc2c Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 18 Aug 2015 23:31:06 +0800 Subject: [PATCH 116/191] doc: units --- doc/manual/core_language_reference.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/core_language_reference.rst b/doc/manual/core_language_reference.rst index 9f0a21d06..f4efa4335 100644 --- a/doc/manual/core_language_reference.rst +++ b/doc/manual/core_language_reference.rst @@ -18,5 +18,5 @@ The most commonly used features from those modules can be imported with ``from a :mod:`artiq.language.units` module ---------------------------------- -.. automodule:: artiq.language.units - :members: +This module contains floating point constants that correspond to common physical units (ns, MHz, ...). +They are provided for convenience (e.g write ``MHz`` instead of ``1000000.0``) and code clarity purposes. From da1398b183052122d7d4420bef59b6cb8d559858 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Wed, 19 Aug 2015 12:49:30 +0200 Subject: [PATCH 117/191] pxi6733: fix crash when samples are all the same When samples are all the same, min = max, which PyDAQmx does not like. This avoids the following crash reported by Kathie: C:\Users\rabi\artiq\artiq\frontend [master]> python .\artiq_rpctool.py ::1 3256 call load_sample_values 100000 'np.array([0.0,0.0],dtype=float)' Traceback (most recent call last): File ".\artiq_rpctool.py", line 112, in main() File ".\artiq_rpctool.py", line 107, in main call_method(remote, args.method, args.args) File ".\artiq_rpctool.py", line 79, in call_method ret = method(*[eval(arg) for arg in args]) File "C:\Users\rabi\AppData\Local\Continuum\Anaconda3\lib\site-packages\artiq- 0.0+dev-py3.4.egg\artiq\protocols\pc_rpc.py", line 142, in proxy return self.__do_rpc(name, args, kwargs) File "C:\Users\rabi\AppData\Local\Continuum\Anaconda3\lib\site-packages\artiq- 0.0+dev-py3.4.egg\artiq\protocols\pc_rpc.py", line 134, in __do_rpc return self.__do_action(obj) File "C:\Users\rabi\AppData\Local\Continuum\Anaconda3\lib\site-packages\artiq- 0.0+dev-py3.4.egg\artiq\protocols\pc_rpc.py", line 128, in __do_action raise RemoteError(obj["message"]) artiq.protocols.pc_rpc.RemoteError: Traceback (most recent call last): File "C:\Users\rabi\AppData\Local\Continuum\Anaconda3\lib\site-packages\artiq- 0.0+dev-py3.4.egg\artiq\protocols\pc_rpc.py", line 476, in _handle_connection_cr ret = method(*obj["args"], **obj["kwargs"]) File "C:\Users\rabi\AppData\Local\Continuum\Anaconda3\lib\site-packages\artiq- 0.0+dev-py3.4.egg\artiq\devices\pxi6733\driver.py", line 117, in load_sample_val ues byref(num_samps_written), None) File "", line 3, in WriteAnalogF64 File "", line 2, in function File "C:\Users\rabi\AppData\Local\Continuum\Anaconda3\lib\site-packages\pydaqm x-1.3.1-py3.4.egg\PyDAQmx\DAQmxFunctions.py", line 28, in mafunction raise DAQError(error,errBuff.value.decode("utf-8"), f.__name__) PyDAQmx.DAQmxFunctions.DAQError: Minimum is greater than or equal to the maximum . Ensure the maximum value is greater than the minimum value. If using a custom scale, ensure that the scaled maximum is greater than the scaled minimum. Property: DAQmx_AO_Min Corresponding Value: 0.0 Property: DAQmx_AO_Max Corresponding Value: 0.0 Channel Name: Dev1/ao1 Task Name: _unnamedTask<4> Status Code: -200082 in function DAQmxWriteAnalogF64 --- artiq/devices/pxi6733/driver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/devices/pxi6733/driver.py b/artiq/devices/pxi6733/driver.py index 2d139dc85..153098b5d 100644 --- a/artiq/devices/pxi6733/driver.py +++ b/artiq/devices/pxi6733/driver.py @@ -93,7 +93,7 @@ class DAQmx: values = values.flatten() t = self.daq.Task() t.CreateAOVoltageChan(self.channels, b"", - min(values), max(values), + min(values), max(values)+1, self.daq.DAQmx_Val_Volts, None) channel_number = (c_ulong*1)() From 3002b5be56626d660b4f362862a48561442481bf Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Wed, 19 Aug 2015 13:20:25 +0200 Subject: [PATCH 118/191] conda: update artiq pkg entry points --- conda/artiq/meta.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/conda/artiq/meta.yaml b/conda/artiq/meta.yaml index 45e0c194d..192193bce 100644 --- a/conda/artiq/meta.yaml +++ b/conda/artiq/meta.yaml @@ -11,9 +11,10 @@ build: entry_points: - artiq_client = artiq.frontend.artiq_client:main - artiq_compile = artiq.frontend.artiq_compile:main - - artiq_coreconfig = artiq.frontend.artiq_coreconfig:main + - artiq_coretool = artiq.frontend.artiq_coretool:main - artiq_ctlmgr = artiq.frontend.artiq_ctlmgr:main - artiq_gui = artiq.frontend.artiq_gui:main + - artiq_influxdb = artiq.frontend.artiq_influxdb:main - artiq_master = artiq.frontend.artiq_master:main - artiq_mkfs = artiq.frontend.artiq_mkfs:main - artiq_rpctool = artiq.frontend.artiq_rpctool:main From e6cc0419e5957feb183623f67546a68d90dffc6a Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Thu, 20 Aug 2015 12:33:55 +0200 Subject: [PATCH 119/191] conda: give 10 more minutes to ISE to achieve routing --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7f3498cb7..e57e5491b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ before_install: - conda install -q pip coverage anaconda-client migen cython - pip install coveralls install: - - conda build conda/artiq + - travis_wait conda build conda/artiq - conda install -q artiq --use-local script: - coverage run --source=artiq setup.py test From c6bba4d5eb599b16e446aafe978733665b442e14 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Thu, 20 Aug 2015 13:03:09 +0200 Subject: [PATCH 120/191] Revert "conda: give 10 more minutes to ISE to achieve routing" This reverts commit e6cc0419e5957feb183623f67546a68d90dffc6a. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e57e5491b..7f3498cb7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,7 +16,7 @@ before_install: - conda install -q pip coverage anaconda-client migen cython - pip install coveralls install: - - travis_wait conda build conda/artiq + - conda build conda/artiq - conda install -q artiq --use-local script: - coverage run --source=artiq setup.py test From df7489ff61b9a450e4df8a2aed2187aafae75c79 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Thu, 20 Aug 2015 16:20:08 +0200 Subject: [PATCH 121/191] travis/conda: add automated builds for kc705 nist_qc2 subtarget --- artiq/frontend/artiq_flash.sh | 20 ++++++++++++++++++-- conda/artiq/build.sh | 17 ++++++++++++++--- 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/artiq/frontend/artiq_flash.sh b/artiq/frontend/artiq_flash.sh index 2ac6163ef..cbe4cca46 100755 --- a/artiq/frontend/artiq_flash.sh +++ b/artiq/frontend/artiq_flash.sh @@ -9,6 +9,8 @@ ARTIQ_PREFIX=$(python3 -c "import artiq; print(artiq.__path__[0])") # Default is kc705 BOARD=kc705 +# Default carrier board is nist_qc1 +CARRIER_BOARD=nist_qc1 while getopts "bBrht:d:f:" opt do @@ -53,17 +55,29 @@ do exit 1 fi ;; + c) + if [ "$OPTARG" == "nist_qc1" ] + then + CARRIER_BOARD=nist_qc1 + elif [ "$OPTARG" == "nist_qc2" ] + then + CARRIER_BOARD=nist_qc2 + else + echo "KC705 carrier board is either nist_qc1 or nist_qc2" + exit 1 + fi *) echo "ARTIQ flashing tool" echo "" echo "To flash everything, do not use any of the -b|-B|-r option." echo "" - echo "usage: $0 [-b] [-B] [-r] [-h] [-t kc705|pipistrello] [-d path]" + echo "usage: $0 [-b] [-B] [-r] [-h] [-c nist_qc1|nist_qc2] [-t kc705|pipistrello] [-d path] [-f path]" echo "-b Flash bitstream" echo "-B Flash BIOS" echo "-r Flash ARTIQ runtime" echo "-h Show this help message" echo "-t Target (kc705, pipistrello, default is: kc705)" + echo "-c Carrier board (nist_qc1, nist_qc2, default is: nist_qc1)" echo "-f Flash storage image generated with artiq_mkfs" echo "-d Directory containing the binaries to be flashed" exit 1 @@ -103,11 +117,12 @@ fi if [ "$BOARD" == "kc705" ] then UDEV_RULES=99-kc705.rules - BITSTREAM=artiq_kc705-nist_qc1-kc705.bit + BITSTREAM=artiq_kc705-${CARRIER_BOARD}-kc705.bit CABLE=jtaghs1_fast PROXY=bscan_spi_kc705.bit BIOS_ADDR=0xaf0000 RUNTIME_ADDR=0xb00000 + RUNTIME_FILENAME=runtime_${CARRIER_BOARD}.fbi FS_ADDR=0xb40000 if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/kc705; fi search_for_proxy $PROXY @@ -119,6 +134,7 @@ then PROXY=bscan_spi_lx45_csg324.bit BIOS_ADDR=0x170000 RUNTIME_ADDR=0x180000 + RUNTIME_FILENAME=runtime.fbi FS_ADDR=0x1c0000 if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/pipistrello; fi search_for_proxy $PROXY diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index b818cdf70..0f30a2453 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -16,15 +16,15 @@ BIN_PREFIX=$ARTIQ_PREFIX/binaries/ mkdir -p $ARTIQ_PREFIX/misc mkdir -p $BIN_PREFIX/kc705 $BIN_PREFIX/pipistrello -# build for KC705 +# build for KC705 NIST_QC1 cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 build-headers build-bios; cd - make -C soc/runtime clean runtime.fbi cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd - -# install KC705 binaries +# install KC705 NIST_QC1 binaries -cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/ +cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/runtime_nist_qc1.fbi cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/kc705/ cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc1-kc705.bit $BIN_PREFIX/kc705/ wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit @@ -44,6 +44,17 @@ cp $SRC_DIR/misoc/build/artiq_pipistrello-nist_qc1-pipistrello.bit $BIN_PREFIX/p wget http://www.phys.ethz.ch/~robertjo/bscan_spi_lx45_csg324.bit mv bscan_spi_lx45_csg324.bit $BIN_PREFIX/pipistrello/ +# build for KC705 NIST_QC2 + +cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 build-headers; cd - +make -C soc/runtime clean runtime.fbi +cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 $MISOC_EXTRA_VIVADO_CMDLINE build-bitstream; cd - + +# install KC705 NIST_QC2 binaries + +cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/runtime_nist_qc2.fbi +cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc2-kc705.bit $BIN_PREFIX/kc705/ + cp artiq/frontend/artiq_flash.sh $PREFIX/bin # misc From 646721d3e9380017d7fb9a17e1fd838e2f32cbe3 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Thu, 20 Aug 2015 18:28:00 +0200 Subject: [PATCH 122/191] artiq_flash: add missing semicolons --- artiq/frontend/artiq_flash.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/artiq/frontend/artiq_flash.sh b/artiq/frontend/artiq_flash.sh index cbe4cca46..e6302059f 100755 --- a/artiq/frontend/artiq_flash.sh +++ b/artiq/frontend/artiq_flash.sh @@ -66,6 +66,7 @@ do echo "KC705 carrier board is either nist_qc1 or nist_qc2" exit 1 fi + ;; *) echo "ARTIQ flashing tool" echo "" From a8575fe2ac0c0e4d61863312d1371ac02181bf32 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 21 Aug 2015 13:32:26 +0800 Subject: [PATCH 123/191] typo --- artiq/language/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/language/environment.py b/artiq/language/environment.py index 534197800..c8c3acaf9 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -73,7 +73,7 @@ class NumberValue(_SimpleArgProcessor): :param unit: A string representing the unit of the value, for user interface (UI) purposes. - :param step: The step with with the value should be modified by up/down + :param step: The step with which the value should be modified by up/down buttons in a UI. :param min: The minimum value of the argument. :param max: The maximum value of the argument. From ac5dd0a7e5a1a79684cf1dcb443f8bbf81b1fb96 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 21 Aug 2015 13:32:53 +0800 Subject: [PATCH 124/191] doc: scans --- artiq/language/scan.py | 38 ++++++++++++++++++++++++++ doc/manual/core_language_reference.rst | 6 ++++ 2 files changed, 44 insertions(+) diff --git a/artiq/language/scan.py b/artiq/language/scan.py index 90867737d..a65ce566c 100644 --- a/artiq/language/scan.py +++ b/artiq/language/scan.py @@ -1,3 +1,23 @@ +""" +Implementation and management of scan objects. + +A scan object (e.g. :class:`artiq.language.scan.LinearScan`) represents a +one-dimensional sweep of a numerical range. Multi-dimensional scans are +constructed by combining several scan objects. + +Iterate on a scan object to scan it, e.g. :: + + for variable in self.scan: + do_something(variable) + +Iterating multiple times on the same scan object is possible, with the scan +restarting at the minimum value each time. Iterating concurrently on the +same scan object (e.g. via nested loops) is also supported, and the +iterators are independent from each other. + +Scan objects are supported both on the host and the core device. +""" + from random import Random, shuffle import inspect @@ -9,6 +29,7 @@ __all__ = ["NoScan", "LinearScan", "RandomScan", "ExplicitScan", "Scannable"] class NoScan: + """A scan object that yields a single value.""" def __init__(self, value): self.value = value @@ -25,6 +46,8 @@ class NoScan: class LinearScan: + """A scan object that yields a fixed number of increasing evenly + spaced values in a range.""" def __init__(self, min, max, npoints): self.min = min self.max = max @@ -47,6 +70,8 @@ class LinearScan: class RandomScan: + """A scan object that yields a fixed number of randomly ordered evenly + spaced values in a range.""" def __init__(self, min, max, npoints, seed=0): self.sequence = list(LinearScan(min, max, npoints)) shuffle(self.sequence, Random(seed).random) @@ -61,6 +86,7 @@ class RandomScan: class ExplicitScan: + """A scan object that yields values from a explicitly defined sequence.""" def __init__(self, sequence): self.sequence = sequence @@ -81,6 +107,18 @@ _ty_to_scan = { class Scannable: + """An argument (as defined in :class:`artiq.language.environment`) that + takes a scan object. + + :param global_min: The minimum value taken by the scanned variable, common + to all scan modes. The user interface takes this value to set the + range of its input widgets. + :param global_max: Same as global_min, but for the maximum value. + :param global_step: The step with which the value should be modified by + up/down buttons in a user interface. + :param unit: A string representing the unit of the scanned variable, for user + interface purposes. + """ def __init__(self, global_min=None, global_max=None, global_step=None, unit="", default=NoDefault): self.global_min = global_min diff --git a/doc/manual/core_language_reference.rst b/doc/manual/core_language_reference.rst index f4efa4335..b88b95ade 100644 --- a/doc/manual/core_language_reference.rst +++ b/doc/manual/core_language_reference.rst @@ -15,6 +15,12 @@ The most commonly used features from those modules can be imported with ``from a .. automodule:: artiq.language.environment :members: +:mod:`artiq.language.scan` module +---------------------------------------- + +.. automodule:: artiq.language.scan + :members: + :mod:`artiq.language.units` module ---------------------------------- From d4f42e33e6ec64cccdf2893ff0d4f088c9d22d6c Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 21 Aug 2015 13:33:23 +0800 Subject: [PATCH 125/191] doc: clarify hw support --- doc/manual/core_device.rst | 3 ++- doc/manual/getting_started.rst | 6 ++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/doc/manual/core_device.rst b/doc/manual/core_device.rst index 6b0120ec8..c99ff7862 100644 --- a/doc/manual/core_device.rst +++ b/doc/manual/core_device.rst @@ -19,6 +19,7 @@ The flash storage area is one sector (typically 64 kB) large and is organized as This flash storage space can be accessed by using ``artiq_coretool`` (see: :ref:`core-device-access-tool`). +.. _board-ports: FPGA board ports **************** @@ -49,7 +50,7 @@ With the QC1 hardware, the TTL lines are mapped as follows: Pipistrello ----------- -The low-cost Pipistrello FPGA board can be used as a lower-cost but slower alternative. +The low-cost Pipistrello FPGA board can be used as a lower-cost but slower alternative. The current USB over serial protocol also suffers from limitations (no monitoring/injection, no idle experiment, no kernel interruptions, lack of robustness). When plugged to an adapter, the NIST QC1 hardware can be used. The TTL lines are mapped to RTIO channels as follows: diff --git a/doc/manual/getting_started.rst b/doc/manual/getting_started.rst index fd14b9956..3e8f9f557 100644 --- a/doc/manual/getting_started.rst +++ b/doc/manual/getting_started.rst @@ -23,7 +23,7 @@ As a very first step, we will turn on a LED on the core device. Create a file `` The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``attr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``attr_device`` like any other. -Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run.py``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. +Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run.py``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform. Run your code using ``artiq_run``, which is part of the ARTIQ front-end tools: :: @@ -102,7 +102,7 @@ Create a new file ``rtio.py`` containing the following: :: delay(2*us) -Connect an oscilloscope or logic analyzer to TTL0 (pin C11 on the Pipistrello) and run ``artiq_run.py led.py``. Notice that the generated signal's period is precisely 4 microseconds, and that it has a duty cycle of precisely 50%. This is not what you would expect if the delay and the pulse were implemented with CPU-controlled GPIO: overhead from the loop management, function calls, etc. would increase the signal's period, and asymmetry in the overhead would cause duty cycle distortion. +Connect an oscilloscope or logic analyzer to TTL0 and run ``artiq_run.py led.py``. Notice that the generated signal's period is precisely 4 microseconds, and that it has a duty cycle of precisely 50%. This is not what you would expect if the delay and the pulse were implemented with CPU-controlled GPIO: overhead from the loop management, function calls, etc. would increase the signal's period, and asymmetry in the overhead would cause duty cycle distortion. Instead, inside the core device, output timing is generated by the gateware and the CPU only programs switching commands with certain timestamps that the CPU computes. This guarantees precise timing as long as the CPU can keep generating timestamps that are increasing fast enough. In case it fails to do that (and attempts to program an event with a timestamp in the past), the :class:`artiq.coredevice.runtime_exceptions.RTIOUnderflow` exception is raised. The kernel causing it may catch it (using a regular ``try... except...`` construct), or it will be propagated to the host. @@ -140,8 +140,6 @@ Try the following code and observe the generated pulses on a 2-channel oscillosc self.ttl1.pulse(4*us) delay(4*us) -TTL1 is assigned to the pin C10 of the Pipistrello. The name of the attributes (``ttl0`` and ``ttl1``) is used to look up hardware in the device database. - Within a parallel block, some statements can be made sequential again using a ``with sequential`` construct. Observe the pulses generated by this code: :: for i in range(1000000): From 5fc5d0b672133b516e2cdeb25728475e43ad6ee4 Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Fri, 21 Aug 2015 09:36:59 +0200 Subject: [PATCH 126/191] artiq_flash: replace wrong wording 'carrier' with 'mezzanine' --- artiq/frontend/artiq_flash.sh | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/artiq/frontend/artiq_flash.sh b/artiq/frontend/artiq_flash.sh index e6302059f..a008b2ebd 100755 --- a/artiq/frontend/artiq_flash.sh +++ b/artiq/frontend/artiq_flash.sh @@ -9,10 +9,10 @@ ARTIQ_PREFIX=$(python3 -c "import artiq; print(artiq.__path__[0])") # Default is kc705 BOARD=kc705 -# Default carrier board is nist_qc1 -CARRIER_BOARD=nist_qc1 +# Default mezzanine board is nist_qc1 +MEZZANINE_BOARD=nist_qc1 -while getopts "bBrht:d:f:" opt +while getopts "bBrht:d:f:m:" opt do case $opt in b) @@ -55,15 +55,15 @@ do exit 1 fi ;; - c) + m) if [ "$OPTARG" == "nist_qc1" ] then - CARRIER_BOARD=nist_qc1 + MEZZANINE_BOARD=nist_qc1 elif [ "$OPTARG" == "nist_qc2" ] then - CARRIER_BOARD=nist_qc2 + MEZZANINE_BOARD=nist_qc2 else - echo "KC705 carrier board is either nist_qc1 or nist_qc2" + echo "KC705 mezzanine board is either nist_qc1 or nist_qc2" exit 1 fi ;; @@ -72,13 +72,13 @@ do echo "" echo "To flash everything, do not use any of the -b|-B|-r option." echo "" - echo "usage: $0 [-b] [-B] [-r] [-h] [-c nist_qc1|nist_qc2] [-t kc705|pipistrello] [-d path] [-f path]" + echo "usage: $0 [-b] [-B] [-r] [-h] [-m nist_qc1|nist_qc2] [-t kc705|pipistrello] [-d path] [-f path]" echo "-b Flash bitstream" echo "-B Flash BIOS" echo "-r Flash ARTIQ runtime" echo "-h Show this help message" echo "-t Target (kc705, pipistrello, default is: kc705)" - echo "-c Carrier board (nist_qc1, nist_qc2, default is: nist_qc1)" + echo "-m Mezzanine board (nist_qc1, nist_qc2, default is: nist_qc1)" echo "-f Flash storage image generated with artiq_mkfs" echo "-d Directory containing the binaries to be flashed" exit 1 @@ -118,12 +118,12 @@ fi if [ "$BOARD" == "kc705" ] then UDEV_RULES=99-kc705.rules - BITSTREAM=artiq_kc705-${CARRIER_BOARD}-kc705.bit + BITSTREAM=artiq_kc705-${MEZZANINE_BOARD}-kc705.bit CABLE=jtaghs1_fast PROXY=bscan_spi_kc705.bit BIOS_ADDR=0xaf0000 RUNTIME_ADDR=0xb00000 - RUNTIME_FILENAME=runtime_${CARRIER_BOARD}.fbi + RUNTIME_FILENAME=runtime_${MEZZANINE_BOARD}.fbi FS_ADDR=0xb40000 if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/kc705; fi search_for_proxy $PROXY From 8d8aa32aedf778c2aa580a8778ef07139d07ecd1 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 21 Aug 2015 17:51:01 +0800 Subject: [PATCH 127/191] runtime: fix AD9914 register read in bridge --- soc/runtime/bridge.c | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/soc/runtime/bridge.c b/soc/runtime/bridge.c index 78083a16f..3c4097c39 100644 --- a/soc/runtime/bridge.c +++ b/soc/runtime/bridge.c @@ -20,7 +20,13 @@ static int dds_read(int addr) { int r; - dds_write(addr | 128, 0); +#ifdef DDS_AD9858 +#define DDS_READ_FLAG 128 +#endif +#ifdef DDS_AD9914 +#define DDS_READ_FLAG 256 +#endif + dds_write(addr | DDS_READ_FLAG, 0); while(rtio_i_status_read() & RTIO_I_STATUS_EMPTY); r = rtio_i_data_read(); rtio_i_re_write(1); From ac7e080c48dc6124bce953d3874c027bb646924f Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Fri, 21 Aug 2015 17:51:11 +0800 Subject: [PATCH 128/191] typo --- artiq/language/scan.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/language/scan.py b/artiq/language/scan.py index a65ce566c..33e98eb4e 100644 --- a/artiq/language/scan.py +++ b/artiq/language/scan.py @@ -86,7 +86,7 @@ class RandomScan: class ExplicitScan: - """A scan object that yields values from a explicitly defined sequence.""" + """A scan object that yields values from an explicitly defined sequence.""" def __init__(self, sequence): self.sequence = sequence From c9236870bd948d5c1da359ad743d5721d8afbb7d Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Fri, 21 Aug 2015 18:47:00 +0200 Subject: [PATCH 129/191] travis: do not upload conda package if branch is not master. fixes #55 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7f3498cb7..216fe090a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,7 @@ script: - make -C doc/manual html after_success: - anaconda -q login --hostname $(hostname) --username $binstar_login --password $binstar_password - - anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2 + - if [ "$TRAVIS_BRANCH" == "master" ]; then anaconda -q upload --user $binstar_login --channel dev --force $HOME/miniconda/conda-bld/linux-64/artiq-*.tar.bz2; fi - anaconda -q logout - coveralls notifications: From e0c35cabab9b5d99f7b48592139f078cd8180aba Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 09:56:40 +0800 Subject: [PATCH 130/191] runtime: fix onehot selection in ddstest_one --- soc/runtime/test_mode.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/soc/runtime/test_mode.c b/soc/runtime/test_mode.c index 3c0ce02f0..5083f84d7 100644 --- a/soc/runtime/test_mode.c +++ b/soc/runtime/test_mode.c @@ -257,6 +257,9 @@ static void ddstest_one(unsigned int i) }; unsigned int f, g, j; +#ifdef DDS_ONEHOT_SEL + i = 1 << i; +#endif brg_ddssel(i); ddsinit(); From 8d4ef479e5145ce11f8a0908d5686bb4293a7805 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 10:03:35 +0800 Subject: [PATCH 131/191] runtime: test all DDS channels --- soc/runtime/test_mode.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/soc/runtime/test_mode.c b/soc/runtime/test_mode.c index 5083f84d7..3fda83ae8 100644 --- a/soc/runtime/test_mode.c +++ b/soc/runtime/test_mode.c @@ -303,8 +303,8 @@ static void ddstest(char *n) } n2 = strtoul(n, &c, 0); - for(i=0; i Date: Sat, 22 Aug 2015 10:43:46 +0800 Subject: [PATCH 132/191] runtime: support single channel test sequence --- soc/runtime/test_mode.c | 42 +++++++++++++++++++++++++++++------------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/soc/runtime/test_mode.c b/soc/runtime/test_mode.c index 3fda83ae8..40384ee0a 100644 --- a/soc/runtime/test_mode.c +++ b/soc/runtime/test_mode.c @@ -248,7 +248,7 @@ static void ddsinit(void) } #endif -static void ddstest_one(unsigned int i) +static void do_ddstest_one(unsigned int i) { unsigned int v[12] = { 0xaaaaaaaa, 0x55555555, 0xa5a5a5a5, 0x5a5a5a5a, @@ -258,9 +258,10 @@ static void ddstest_one(unsigned int i) unsigned int f, g, j; #ifdef DDS_ONEHOT_SEL - i = 1 << i; -#endif + brg_ddssel(1 << i); +#else brg_ddssel(i); +#endif ddsinit(); for(j=0; j<12; j++) { @@ -291,23 +292,40 @@ static void ddstest_one(unsigned int i) } } -static void ddstest(char *n) +static void ddstest(char *n, char *channel) { int i, j; char *c; unsigned int n2; + int channel2; - if (*n == 0) { - printf("ddstest \n"); + if((*n == 0) || (*channel == 0)) { + printf("ddstest \n"); return; } n2 = strtoul(n, &c, 0); - - for(i=0;i= 0) { + for(i=0;i - read DDS register"); puts("ddsfud - pulse FUD"); puts("ddsftw - write FTW"); - puts("ddstest - perform test sequence on DDS"); + puts("ddstest - perform test sequence on DDS"); puts("leds - set LEDs"); #if (defined CSR_SPIFLASH_BASE && defined SPIFLASH_PAGE_SIZE) puts("fserase - erase flash storage"); @@ -606,7 +624,7 @@ static void do_command(char *c) else if(strcmp(token, "ddsinit") == 0) ddsinit(); else if(strcmp(token, "ddsfud") == 0) ddsfud(); else if(strcmp(token, "ddsftw") == 0) ddsftw(get_token(&c), get_token(&c)); - else if(strcmp(token, "ddstest") == 0) ddstest(get_token(&c)); + else if(strcmp(token, "ddstest") == 0) ddstest(get_token(&c), get_token(&c)); #if (defined CSR_SPIFLASH_BASE && defined SPIFLASH_PAGE_SIZE) else if(strcmp(token, "fserase") == 0) fs_erase(); From 094fc1cfd1aebd3556747e9aca0ea03fc624c441 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 11:49:38 +0800 Subject: [PATCH 133/191] qc2: DDS selection is active low --- artiq/gateware/ad9xxx.py | 5 ++++- artiq/gateware/nist_qc2.py | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/artiq/gateware/ad9xxx.py b/artiq/gateware/ad9xxx.py index aa087053f..439ece554 100644 --- a/artiq/gateware/ad9xxx.py +++ b/artiq/gateware/ad9xxx.py @@ -62,7 +62,10 @@ class AD9xxx(Module): self.comb += pads.rst.eq(gpio[0]) else: self.comb += pads.rst_n.eq(~gpio[0]) - self.comb += pads.sel.eq(gpio[1:]) + if hasattr(pads, "sel"): + self.comb += pads.sel.eq(gpio[1:]) + else: + self.comb += pads.sel_n.eq(~gpio[1:]) bus_r_gpio = Signal() self.comb += If(bus_r_gpio, diff --git a/artiq/gateware/nist_qc2.py b/artiq/gateware/nist_qc2.py index 9d6896781..da3997f91 100644 --- a/artiq/gateware/nist_qc2.py +++ b/artiq/gateware/nist_qc2.py @@ -26,9 +26,9 @@ fmc_adapter_io = [ "LPC:LA11_N LPC:LA12_N LPC:LA11_P LPC:LA12_P " "LPC:LA07_N LPC:LA08_N LPC:LA07_P LPC:LA08_P " "LPC:LA04_N LPC:LA03_N LPC:LA04_P LPC:LA03_P")), - Subsignal("sel", Pins("LPC:LA24_N LPC:LA29_P LPC:LA28_P LPC:LA29_N " - "LPC:LA28_N LPC:LA31_P LPC:LA30_P LPC:LA31_N " - "LPC:LA30_N LPC:LA33_P LPC:LA33_N")), + Subsignal("sel_n", Pins("LPC:LA24_N LPC:LA29_P LPC:LA28_P LPC:LA29_N " + "LPC:LA28_N LPC:LA31_P LPC:LA30_P LPC:LA31_N " + "LPC:LA30_N LPC:LA33_P LPC:LA33_N")), Subsignal("fud", Pins("LPC:LA21_N")), Subsignal("wr_n", Pins("LPC:LA24_P")), Subsignal("rd_n", Pins("LPC:LA25_N")), From fba05531f456ba4d0f2b47d241bb039ab66ba930 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 12:25:44 +0800 Subject: [PATCH 134/191] runtime: enable ad9914 matched latency --- soc/runtime/dds.c | 14 ++++---------- soc/runtime/test_mode.c | 8 ++++---- 2 files changed, 8 insertions(+), 14 deletions(-) diff --git a/soc/runtime/dds.c b/soc/runtime/dds.c index 5f9f8650e..61a22d4d5 100644 --- a/soc/runtime/dds.c +++ b/soc/runtime/dds.c @@ -77,18 +77,12 @@ void dds_init(long long int timestamp, int channel) #endif #ifdef DDS_AD9914 - /* - * Enable cosine output (to match AD9858 behavior) - * Enable DAC calibration - * Leave SYNCLK enabled and PLL/divider disabled - */ - DDS_WRITE(DDS_CFR1L, 0x0008); - DDS_WRITE(DDS_CFR1H, 0x0000); - DDS_WRITE(DDS_CFR4H, 0x0105); + DDS_WRITE(DDS_CFR1H, 0x0000); /* Enable cosine output */ + DDS_WRITE(DDS_CFR2L, 0x8900); /* Enable matched latency */ + DDS_WRITE(DDS_CFR4H, 0x0105); /* Enable DAC calibration */ DDS_WRITE(DDS_FUD, 0); - /* Disable DAC calibration */ now += DURATION_DAC_CAL; - DDS_WRITE(DDS_CFR4H, 0x0005); + DDS_WRITE(DDS_CFR4H, 0x0005); /* Disable DAC calibration */ DDS_WRITE(DDS_FUD, 0); #endif } diff --git a/soc/runtime/test_mode.c b/soc/runtime/test_mode.c index 40384ee0a..a15950bee 100644 --- a/soc/runtime/test_mode.c +++ b/soc/runtime/test_mode.c @@ -237,13 +237,13 @@ static void ddsinit(void) long long int t; brg_ddsreset(); - brg_ddswrite(DDS_CFR1L, 0x0008); - brg_ddswrite(DDS_CFR1H, 0x0000); - brg_ddswrite(DDS_CFR4H, 0x0105); + brg_ddswrite(DDS_CFR1H, 0x0000); /* Enable cosine output */ + brg_ddswrite(DDS_CFR2L, 0x8900); /* Enable matched latency */ + brg_ddswrite(DDS_CFR4H, 0x0105); /* Enable DAC calibration */ brg_ddswrite(DDS_FUD, 0); t = clock_get_ms(); while(clock_get_ms() < t + 2); - brg_ddswrite(DDS_CFR4H, 0x0005); + brg_ddswrite(DDS_CFR4H, 0x0005); /* Disable DAC calibration */ brg_ddsfud(); } #endif From d38f1e679644eba7661c385cdaba0d33bfc5b3be Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 13:12:30 +0800 Subject: [PATCH 135/191] ad9xxx: fix gpio signal length --- artiq/gateware/ad9xxx.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/artiq/gateware/ad9xxx.py b/artiq/gateware/ad9xxx.py index 439ece554..0bd290df7 100644 --- a/artiq/gateware/ad9xxx.py +++ b/artiq/gateware/ad9xxx.py @@ -55,7 +55,11 @@ class AD9xxx(Module): dts.oe.eq(~rx) ] - gpio = Signal(flen(pads.sel) + 1) + if hasattr(pads, "sel"): + sel_len = flen(pads.sel) + else: + sel_len = flen(pads.sel_n) + gpio = Signal(sel_len + 1) gpio_load = Signal() self.sync += If(gpio_load, gpio.eq(bus.dat_w)) if hasattr(pads, "rst"): From 72616f6f293956a8e4a4016ad0d3287ee872cdbd Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 21:04:23 +0800 Subject: [PATCH 136/191] language: document HasEnvironment.dbs --- artiq/language/environment.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/artiq/language/environment.py b/artiq/language/environment.py index c8c3acaf9..8a90f51cf 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -133,6 +133,15 @@ class HasEnvironment: raise NotImplementedError def dbs(self): + """Returns the device manager, the parameter database and the result + database, in this order. + + This is the same order that the constructor takes them, allowing + sub-objects to be created with this idiom to pass the environment + around: :: + + sub_object = SomeLibrary(*self.dbs()) + """ return self.__dmgr, self.__pdb, self.__rdb def get_argument(self, key, processor=None): From 42e87c060e47c3c46cacbc254cbf8910d4eb8059 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 21:04:44 +0800 Subject: [PATCH 137/191] protocols/pyon: use better object for empty builtins --- artiq/protocols/pyon.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/protocols/pyon.py b/artiq/protocols/pyon.py index 13161094e..ff793945d 100644 --- a/artiq/protocols/pyon.py +++ b/artiq/protocols/pyon.py @@ -153,7 +153,7 @@ def _npscalar(ty, data): _eval_dict = { - "__builtins__": None, + "__builtins__": {}, "null": None, "false": False, From 21a28a13c99246d7c5aa91dc86f1657d05db8422 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 21:05:05 +0800 Subject: [PATCH 138/191] gui/explorer: less verbose error dialogs --- artiq/gui/explorer.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index e61a451f0..a1c16a346 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -1,5 +1,4 @@ import asyncio -import traceback from quamash import QtGui, QtCore from pyqtgraph import dockarea @@ -136,12 +135,12 @@ class _ArgumentSetter(LayoutWidget): for arg, entry in self._args_to_entries.items(): try: r[arg] = entry.get_argument_value() - except: + except Exception as e: if show_error_message: msgbox = QtGui.QMessageBox(self.dialog_parent) msgbox.setWindowTitle("Error") - msgbox.setText("Failed to obtain value for argument '{}'.\n{}" - .format(arg, traceback.format_exc())) + msgbox.setText("Failed to obtain value for argument '{}':\n{}" + .format(arg, str(e))) msgbox.setStandardButtons(QtGui.QMessageBox.Ok) msgbox.show() return None From 284e3ddb2b5e19eef2cffb0b671aae0b7bb2582f Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Sat, 22 Aug 2015 21:05:24 +0800 Subject: [PATCH 139/191] gui/scan: enforce min <= max --- artiq/gui/scan.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/artiq/gui/scan.py b/artiq/gui/scan.py index ee97a1401..ea34cf8d3 100644 --- a/artiq/gui/scan.py +++ b/artiq/gui/scan.py @@ -40,9 +40,13 @@ class _Range(LayoutWidget): force_spinbox_value(self.npoints, npoints) def get_values(self): + min = self.min.value() + max = self.max.value() + if min > max: + raise ValueError("Minimum scan boundary must be less than maximum") return { - "min": self.min.value(), - "max": self.max.value(), + "min": min, + "max": max, "npoints": self.npoints.value() } From 34771ade67c085afc50e101c5b3cb5bee3fca0dd Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Sat, 22 Aug 2015 15:28:32 +0200 Subject: [PATCH 140/191] artiq_flash.sh: fix runtime programming and allow usage of -d . -r --- artiq/frontend/artiq_flash.sh | 6 +++--- conda/artiq/build.sh | 6 ++++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/artiq/frontend/artiq_flash.sh b/artiq/frontend/artiq_flash.sh index a008b2ebd..4d155e4b9 100755 --- a/artiq/frontend/artiq_flash.sh +++ b/artiq/frontend/artiq_flash.sh @@ -123,7 +123,7 @@ then PROXY=bscan_spi_kc705.bit BIOS_ADDR=0xaf0000 RUNTIME_ADDR=0xb00000 - RUNTIME_FILENAME=runtime_${MEZZANINE_BOARD}.fbi + RUNTIME_FILE=${MEZZANINE_BOARD}/runtime.fbi FS_ADDR=0xb40000 if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/kc705; fi search_for_proxy $PROXY @@ -135,7 +135,7 @@ then PROXY=bscan_spi_lx45_csg324.bit BIOS_ADDR=0x170000 RUNTIME_ADDR=0x180000 - RUNTIME_FILENAME=runtime.fbi + RUNTIME_FILE=runtime.fbi FS_ADDR=0x1c0000 if [ -z "$BIN_PREFIX" ]; then BIN_PREFIX=$ARTIQ_PREFIX/binaries/pipistrello; fi search_for_proxy $PROXY @@ -185,7 +185,7 @@ fi if [ "${FLASH_RUNTIME}" == "1" ] then echo "Flashing ARTIQ runtime..." - xc3sprog -v -c $CABLE -I$PROXY_PATH/$PROXY $BIN_PREFIX/runtime.fbi:w:$RUNTIME_ADDR:BIN + xc3sprog -v -c $CABLE -I$PROXY_PATH/$PROXY $BIN_PREFIX/${RUNTIME_FILE}:w:$RUNTIME_ADDR:BIN fi echo "Done." xc3sprog -v -c $CABLE -R > /dev/null 2>&1 diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index 0f30a2453..0a6dd1225 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -24,7 +24,8 @@ cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 $MISOC_EXTRA_VIVADO_ # install KC705 NIST_QC1 binaries -cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/runtime_nist_qc1.fbi +mkdir -p $BIN_PREFIX/kc705/nist_qc1 +cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc1/ cp $SRC_DIR/misoc/software/bios/bios.bin $BIN_PREFIX/kc705/ cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc1-kc705.bit $BIN_PREFIX/kc705/ wget http://sionneau.net/artiq/binaries/kc705/flash_proxy/bscan_spi_kc705.bit @@ -52,7 +53,8 @@ cd $SRC_DIR/misoc; $PYTHON make.py -X ../soc -t artiq_kc705 -s NIST_QC2 $MISOC_E # install KC705 NIST_QC2 binaries -cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/runtime_nist_qc2.fbi +mkdir -p $BIN_PREFIX/kc705/nist_qc2 +cp soc/runtime/runtime.fbi $BIN_PREFIX/kc705/nist_qc2/ cp $SRC_DIR/misoc/build/artiq_kc705-nist_qc2-kc705.bit $BIN_PREFIX/kc705/ cp artiq/frontend/artiq_flash.sh $PREFIX/bin From 84b07607adfd42365137b238ccdabe5557c0aef1 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 24 Aug 2015 20:20:33 +0800 Subject: [PATCH 141/191] gui: do not attempt moninj on windows --- artiq/frontend/artiq_gui.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py index b33e19c43..3811ce6b2 100755 --- a/artiq/frontend/artiq_gui.py +++ b/artiq/frontend/artiq_gui.py @@ -98,18 +98,22 @@ def main(): args.server, args.port_notify)) atexit.register(lambda: loop.run_until_complete(d_results.sub_close())) - d_ttl_dds = MonInj() - loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify)) - atexit.register(lambda: loop.run_until_complete(d_ttl_dds.stop())) + if os.name != "nt": + d_ttl_dds = MonInj() + loop.run_until_complete(d_ttl_dds.start(args.server, args.port_notify)) + atexit.register(lambda: loop.run_until_complete(d_ttl_dds.stop())) d_params = ParametersDock() loop.run_until_complete(d_params.sub_connect( args.server, args.port_notify)) atexit.register(lambda: loop.run_until_complete(d_params.sub_close())) - area.addDock(d_ttl_dds.dds_dock, "top") - area.addDock(d_ttl_dds.ttl_dock, "above", d_ttl_dds.dds_dock) - area.addDock(d_results, "above", d_ttl_dds.ttl_dock) + if os.name != "nt": + area.addDock(d_ttl_dds.dds_dock, "top") + area.addDock(d_ttl_dds.ttl_dock, "above", d_ttl_dds.dds_dock) + area.addDock(d_results, "above", d_ttl_dds.ttl_dock) + else: + area.addDock(d_results, "top") area.addDock(d_params, "above", d_results) area.addDock(d_explorer, "above", d_params) From 45a90bf272853abb63e8890de6c6aa52fa7511e9 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 24 Aug 2015 20:28:39 +0800 Subject: [PATCH 142/191] conda: remove stale ARTIQ_GUI env variable --- conda/artiq/bld.bat | 1 - conda/artiq/build.sh | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/conda/artiq/bld.bat b/conda/artiq/bld.bat index d1604ee7b..e104111df 100644 --- a/conda/artiq/bld.bat +++ b/conda/artiq/bld.bat @@ -1,2 +1 @@ -set ARTIQ_GUI=0 "%PYTHON%" setup.py install --single-version-externally-managed --record=record.txt diff --git a/conda/artiq/build.sh b/conda/artiq/build.sh index 0a6dd1225..f74722cee 100755 --- a/conda/artiq/build.sh +++ b/conda/artiq/build.sh @@ -7,7 +7,7 @@ then source $BUILD_SETTINGS_FILE fi -ARTIQ_GUI=1 $PYTHON setup.py install --single-version-externally-managed --record=record.txt +$PYTHON setup.py install --single-version-externally-managed --record=record.txt git clone --recursive https://github.com/m-labs/misoc export MSCDIR=$SRC_DIR/misoc From 7d4d92ec0607521f7531729456764defc7596260 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 24 Aug 2015 22:59:56 +0800 Subject: [PATCH 143/191] gui/explorer: use QTreeWidget for argument editor --- artiq/gui/explorer.py | 44 +++++++++++++++++++++++++------------------ 1 file changed, 26 insertions(+), 18 deletions(-) diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index a1c16a346..66dc9ce36 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -115,21 +115,33 @@ _procty_to_entry = { } -class _ArgumentSetter(LayoutWidget): - def __init__(self, dialog_parent, arguments): - LayoutWidget.__init__(self) +class _ArgumentEditor(QtGui.QTreeWidget): + def __init__(self, dialog_parent): + QtGui.QTreeWidget.__init__(self) + self.setColumnCount(2) + self.header().setResizeMode( + QtGui.QHeaderView.ResizeToContents) + self.header().setVisible(False) + self.setSelectionMode(QtGui.QAbstractItemView.NoSelection) + self.dialog_parent = dialog_parent + self.set_arguments([]) + + def set_arguments(self, arguments): + self.clear() if not arguments: - self.addWidget(QtGui.QLabel("No arguments"), 0, 0) + self.addTopLevelItem(QtGui.QTreeWidgetItem(["No arguments", ""])) self._args_to_entries = dict() for n, (name, procdesc) in enumerate(arguments): - self.addWidget(QtGui.QLabel(name), n, 0) entry = _procty_to_entry[procdesc["ty"]](procdesc) - self.addWidget(entry, n, 1) self._args_to_entries[name] = entry + widget_item = QtGui.QTreeWidgetItem([name, ""]) + self.addTopLevelItem(widget_item) + self.setItemWidget(widget_item, 1, entry) + def get_argument_values(self, show_error_message): r = dict() for arg, entry in self._args_to_entries.items(): @@ -201,27 +213,23 @@ class ExplorerDock(dockarea.Dock): grid.addWidget(submit, 3, 0, colspan=4) submit.clicked.connect(self.submit_clicked) - self.argsetter = _ArgumentSetter(self.dialog_parent, []) - self.splitter.addWidget(self.argsetter) + self.argeditor = _ArgumentEditor(self.dialog_parent) + self.splitter.addWidget(self.argeditor) self.splitter.setSizes([grid.minimumSizeHint().width(), 1000]) self.state = dict() def update_selection(self, selected, deselected): if deselected: - self.state[deselected] = self.argsetter.get_argument_values(False) + self.state[deselected] = self.argeditor.get_argument_values(False) if selected: expinfo = self.explist_model.backing_store[selected] - arguments = expinfo["arguments"] - sizes = self.splitter.sizes() - self.argsetter.deleteLater() - self.argsetter = _ArgumentSetter(self.dialog_parent, arguments) + self.argeditor.set_arguments(expinfo["arguments"]) if selected in self.state: arguments = self.state[selected] if arguments is not None: - self.argsetter.set_argument_values(arguments, True) - self.splitter.insertWidget(1, self.argsetter) - self.splitter.setSizes(sizes) + self.argeditor.set_argument_values(arguments, True) + self.splitter.insertWidget(1, self.argeditor) self.selected_key = selected def _sel_to_key(self, selection): @@ -241,7 +249,7 @@ class ExplorerDock(dockarea.Dock): if idx: row = idx[0].row() key = self.explist_model.row_to_key[row] - self.state[key] = self.argsetter.get_argument_values(False) + self.state[key] = self.argeditor.get_argument_values(False) return self.state def restore_state(self, state): @@ -285,7 +293,7 @@ class ExplorerDock(dockarea.Dock): due_date = self.datetime.dateTime().toMSecsSinceEpoch()/1000 else: due_date = None - arguments = self.argsetter.get_argument_values(True) + arguments = self.argeditor.get_argument_values(True) if arguments is None: return asyncio.async(self.submit(self.pipeline.text(), From bb228b00ef52fcfc7bf215487b7b641c525b5de0 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Mon, 24 Aug 2015 23:46:54 +0800 Subject: [PATCH 144/191] gui,language,master: argument groups --- artiq/gui/explorer.py | 56 +++++++++++++++++++++++++++++------ artiq/language/environment.py | 10 ++++--- artiq/master/worker_impl.py | 4 +-- 3 files changed, 55 insertions(+), 15 deletions(-) diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index 66dc9ce36..81b491d52 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -119,14 +119,32 @@ class _ArgumentEditor(QtGui.QTreeWidget): def __init__(self, dialog_parent): QtGui.QTreeWidget.__init__(self) self.setColumnCount(2) - self.header().setResizeMode( - QtGui.QHeaderView.ResizeToContents) + self.header().setResizeMode(QtGui.QHeaderView.ResizeToContents) self.header().setVisible(False) self.setSelectionMode(QtGui.QAbstractItemView.NoSelection) self.dialog_parent = dialog_parent + self._groups = dict() self.set_arguments([]) + def clear(self): + QtGui.QTreeWidget.clear(self) + self._groups.clear() + + def _get_group(self, name): + if name in self._groups: + return self._groups[name] + group = QtGui.QTreeWidgetItem([name, ""]) + for c in 0, 1: + group.setBackground(c, QtGui.QBrush(QtGui.QColor(100, 100, 100))) + group.setForeground(c, QtGui.QBrush(QtGui.QColor(220, 220, 255))) + font = group.font(c) + font.setBold(True) + group.setFont(c, font) + self.addTopLevelItem(group) + self._groups[name] = group + return group + def set_arguments(self, arguments): self.clear() @@ -134,12 +152,15 @@ class _ArgumentEditor(QtGui.QTreeWidget): self.addTopLevelItem(QtGui.QTreeWidgetItem(["No arguments", ""])) self._args_to_entries = dict() - for n, (name, procdesc) in enumerate(arguments): + for n, (name, (procdesc, group)) in enumerate(arguments): entry = _procty_to_entry[procdesc["ty"]](procdesc) self._args_to_entries[name] = entry widget_item = QtGui.QTreeWidgetItem([name, ""]) - self.addTopLevelItem(widget_item) + if group is None: + self.addTopLevelItem(widget_item) + else: + self._get_group(group).addChild(widget_item) self.setItemWidget(widget_item, 1, entry) def get_argument_values(self, show_error_message): @@ -167,6 +188,25 @@ class _ArgumentEditor(QtGui.QTreeWidget): if not ignore_errors: raise + def save_state(self): + expanded = [] + for k, v in self._groups.items(): + if v.isExpanded(): + expanded.append(k) + argument_values = self.get_argument_values(False) + return { + "expanded": expanded, + "argument_values": argument_values + } + + def restore_state(self, state): + self.set_argument_values(state["argument_values"], True) + for e in state["expanded"]: + try: + self._groups[e].setExpanded(True) + except KeyError: + pass + class ExplorerDock(dockarea.Dock): def __init__(self, dialog_parent, status_bar, schedule_ctl): @@ -220,15 +260,13 @@ class ExplorerDock(dockarea.Dock): def update_selection(self, selected, deselected): if deselected: - self.state[deselected] = self.argeditor.get_argument_values(False) + self.state[deselected] = self.argeditor.save_state() if selected: expinfo = self.explist_model.backing_store[selected] self.argeditor.set_arguments(expinfo["arguments"]) if selected in self.state: - arguments = self.state[selected] - if arguments is not None: - self.argeditor.set_argument_values(arguments, True) + self.argeditor.restore_state(self.state[selected]) self.splitter.insertWidget(1, self.argeditor) self.selected_key = selected @@ -249,7 +287,7 @@ class ExplorerDock(dockarea.Dock): if idx: row = idx[0].row() key = self.explist_model.row_to_key[row] - self.state[key] = self.argeditor.get_argument_values(False) + self.state[key] = self.argeditor.save_state() return self.state def restore_state(self, state): diff --git a/artiq/language/environment.py b/artiq/language/environment.py index 8a90f51cf..c75bd7d4e 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -144,19 +144,21 @@ class HasEnvironment: """ return self.__dmgr, self.__pdb, self.__rdb - def get_argument(self, key, processor=None): + def get_argument(self, key, processor=None, group=None): """Retrieves and returns the value of an argument. :param key: Name of the argument. :param processor: A description of how to process the argument, such as instances of ``BooleanValue`` and ``NumberValue``. + :param group: An optional string that defines what group the argument + belongs to, for user interface purposes. """ if not self.__in_build: raise TypeError("get_argument() should only " "be called from build()") if processor is None: processor = FreeValue() - self.requested_args[key] = processor + self.requested_args[key] = processor, group try: argval = self.__kwargs[key] except KeyError: @@ -169,10 +171,10 @@ class HasEnvironment: raise return processor.process(argval) - def attr_argument(self, key, processor=None): + def attr_argument(self, key, processor=None, group=None): """Sets an argument as attribute. The names of the argument and of the attribute are the same.""" - setattr(self, key, self.get_argument(key, processor)) + setattr(self, key, self.get_argument(key, processor, group)) def get_device(self, key): """Creates and returns a device driver.""" diff --git a/artiq/master/worker_impl.py b/artiq/master/worker_impl.py index 6fea52513..f8ff39746 100644 --- a/artiq/master/worker_impl.py +++ b/artiq/master/worker_impl.py @@ -147,8 +147,8 @@ def examine(dmgr, pdb, rdb, file): if name[-1] == ".": name = name[:-1] exp_inst = exp_class(dmgr, pdb, rdb, default_arg_none=True) - arguments = [(k, v.describe()) - for k, v in exp_inst.requested_args.items()] + arguments = [(k, (proc.describe(), group)) + for k, (proc, group) in exp_inst.requested_args.items()] register_experiment(class_name, name, arguments) From 1ad8e65e429a90d2220c67e7917c9dbabcce1996 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 25 Aug 2015 00:03:36 +0800 Subject: [PATCH 145/191] language/environment: support parents --- artiq/language/environment.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/artiq/language/environment.py b/artiq/language/environment.py index c75bd7d4e..8ecfa0499 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -103,13 +103,14 @@ class StringValue(_SimpleArgProcessor): class HasEnvironment: """Provides methods to manage the environment of an experiment (devices, parameters, results, arguments).""" - def __init__(self, dmgr=None, pdb=None, rdb=None, *, + def __init__(self, dmgr=None, pdb=None, rdb=None, *, parent=None, param_override=dict(), default_arg_none=False, **kwargs): self.requested_args = OrderedDict() self.__dmgr = dmgr self.__pdb = pdb self.__rdb = rdb + self.__parent = parent self.__param_override = param_override self.__default_arg_none = default_arg_none @@ -156,6 +157,8 @@ class HasEnvironment: if not self.__in_build: raise TypeError("get_argument() should only " "be called from build()") + if self.__parent is not None and key not in self.__kwargs: + return self.__parent.get_argument(key, processor, group) if processor is None: processor = FreeValue() self.requested_args[key] = processor, group @@ -178,6 +181,8 @@ class HasEnvironment: def get_device(self, key): """Creates and returns a device driver.""" + if self.__parent is not None: + return self.__parent.get_device(key) if self.__dmgr is None: raise ValueError("Device manager not present") return self.__dmgr.get(key) @@ -189,6 +194,8 @@ class HasEnvironment: def get_parameter(self, key, default=NoDefault): """Retrieves and returns a parameter.""" + if self.__parent is not None and key not in self.__param_override: + return self.__parent.get_parameter(key, default) if self.__pdb is None: raise ValueError("Parameter database not present") if key in self.__param_override: @@ -208,6 +215,8 @@ class HasEnvironment: def set_parameter(self, key, value): """Writes the value of a parameter into the parameter database.""" + if self.__parent is not None: + self.__parent.set_parameter(key, value) if self.__pdb is None: raise ValueError("Parameter database not present") self.__pdb.set(key, value) @@ -222,6 +231,8 @@ class HasEnvironment: :param store: Defines if the result should be stored permanently, e.g. in HDF5 output. Default is to store. """ + if self.__parent is not None: + self.__parent.set_result(key, value, realtime, store) if self.__rdb is None: raise ValueError("Result database not present") if realtime: @@ -244,6 +255,8 @@ class HasEnvironment: There is no difference between real-time and non-real-time results (this function does not return ``Notifier`` instances). """ + if self.__parent is not None: + return self.__parent.get_result(key) if self.__rdb is None: raise ValueError("Result database not present") return self.__rdb.get(key) From b7120aa0acc84992fde1d4dc8a2ae4db15058d8e Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 25 Aug 2015 00:07:37 +0800 Subject: [PATCH 146/191] examples/arguments_demo: demonstrate parents --- examples/master/repository/arguments_demo.py | 42 ++++++++++++++++++-- 1 file changed, 39 insertions(+), 3 deletions(-) diff --git a/examples/master/repository/arguments_demo.py b/examples/master/repository/arguments_demo.py index 97393df19..d956071b3 100644 --- a/examples/master/repository/arguments_demo.py +++ b/examples/master/repository/arguments_demo.py @@ -1,15 +1,49 @@ from artiq import * +class SubComponent1(HasEnvironment): + def build(self): + self.attr_argument("sc1_scan", Scannable(default=NoScan(325)), + "Flux capacitor") + self.attr_argument("sc1_enum", EnumerationValue(["1", "2", "3"]), + "Flux capacitor") + + def do(self): + print("SC1:") + for i in self.sc1_scan: + print(i) + print(self.sc1_enum) + + +class SubComponent2(HasEnvironment): + def build(self): + self.attr_argument("sc2_boolean", BooleanValue(False), + "Transporter") + self.attr_argument("sc2_scan", Scannable(default=NoScan(325)), + "Transporter") + self.attr_argument("sc2_enum", EnumerationValue(["3", "4", "5"]), + "Transporter") + + def do(self): + print("SC2:") + print(self.sc2_boolean) + for i in self.sc2_scan: + print(i) + print(self.sc2_enum) + + class ArgumentsDemo(EnvExperiment): def build(self): self.attr_argument("free_value", FreeValue(None)) - self.attr_argument("boolean", BooleanValue(True)) - self.attr_argument("enum", EnumerationValue( - ["foo", "bar", "quux"], "foo")) self.attr_argument("number", NumberValue(42, unit="s", step=0.1)) self.attr_argument("string", StringValue("Hello World")) self.attr_argument("scan", Scannable(global_max=400, default=NoScan(325))) + self.attr_argument("boolean", BooleanValue(True), "Group") + self.attr_argument("enum", EnumerationValue( + ["foo", "bar", "quux"], "foo"), "Group") + + self.sc1 = SubComponent1(parent=self) + self.sc2 = SubComponent2(parent=self) def run(self): print(self.free_value) @@ -19,3 +53,5 @@ class ArgumentsDemo(EnvExperiment): print(self.string) for i in self.scan: print(i) + self.sc1.do() + self.sc2.do() From 1c96f258b115696d03484f17e6f8d04f5fe80753 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 25 Aug 2015 00:37:49 +0800 Subject: [PATCH 147/191] gui: show server in window title --- artiq/frontend/artiq_gui.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/artiq/frontend/artiq_gui.py b/artiq/frontend/artiq_gui.py index 3811ce6b2..f6fbf7ebb 100755 --- a/artiq/frontend/artiq_gui.py +++ b/artiq/frontend/artiq_gui.py @@ -45,10 +45,10 @@ def get_argparser(): class MainWindow(QtGui.QMainWindow): - def __init__(self, app): + def __init__(self, app, server): QtGui.QMainWindow.__init__(self) self.setWindowIcon(QtGui.QIcon(os.path.join(data_dir, "icon.png"))) - self.setWindowTitle("ARTIQ") + self.setWindowTitle("ARTIQ - {}".format(server)) self.exit_request = asyncio.Event() def closeEvent(self, *args): @@ -77,7 +77,7 @@ def main(): args.server, args.port_control, "master_schedule")) atexit.register(lambda: schedule_ctl.close_rpc()) - win = MainWindow(app) + win = MainWindow(app, args.server) area = dockarea.DockArea() smgr.register(area) smgr.register(win) From 718de9888b3a64991de781e86490a0db3b7f6779 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 25 Aug 2015 00:38:03 +0800 Subject: [PATCH 148/191] gui: disable parameter selection --- artiq/gui/parameters.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/artiq/gui/parameters.py b/artiq/gui/parameters.py index 22f2addbd..4bc53b927 100644 --- a/artiq/gui/parameters.py +++ b/artiq/gui/parameters.py @@ -38,7 +38,7 @@ class ParametersDock(dockarea.Dock): grid.addWidget(self.search, 0, 0) self.table = QtGui.QTableView() - self.table.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows) + self.table.setSelectionMode(QtGui.QAbstractItemView.NoSelection) self.table.horizontalHeader().setResizeMode( QtGui.QHeaderView.ResizeToContents) grid.addWidget(self.table, 1, 0) From e04317912068797364b9bacf3ab75f94706f99aa Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 25 Aug 2015 00:39:03 +0800 Subject: [PATCH 149/191] language,gui: support ndecimals in scan and number arguments --- artiq/gui/explorer.py | 1 + artiq/gui/scan.py | 9 ++++--- artiq/language/environment.py | 5 +++- artiq/language/scan.py | 26 +++++++++++--------- examples/master/repository/arguments_demo.py | 7 ++++-- 5 files changed, 31 insertions(+), 17 deletions(-) diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index 81b491d52..c4f5ea680 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -74,6 +74,7 @@ class _EnumerationEntry(QtGui.QComboBox): class _NumberEntry(QtGui.QDoubleSpinBox): def __init__(self, procdesc): QtGui.QDoubleSpinBox.__init__(self) + self.setDecimals(procdesc["ndecimals"]) if procdesc["step"] is not None: self.setSingleStep(procdesc["step"]) if procdesc["min"] is not None: diff --git a/artiq/gui/scan.py b/artiq/gui/scan.py index ea34cf8d3..65d81ae52 100644 --- a/artiq/gui/scan.py +++ b/artiq/gui/scan.py @@ -5,10 +5,11 @@ from artiq.gui.tools import force_spinbox_value class _Range(LayoutWidget): - def __init__(self, global_min, global_max, global_step, unit): + def __init__(self, global_min, global_max, global_step, unit, ndecimals): LayoutWidget.__init__(self) def apply_properties(spinbox): + spinbox.setDecimals(ndecimals) if global_min is not None: spinbox.setMinimum(global_min) if global_max is not None: @@ -61,8 +62,10 @@ class ScanController(LayoutWidget): gmin, gmax = procdesc["global_min"], procdesc["global_max"] gstep = procdesc["global_step"] unit = procdesc["unit"] + ndecimals = procdesc["ndecimals"] self.v_noscan = QtGui.QDoubleSpinBox() + self.v_noscan.setDecimals(ndecimals) if gmin is not None: self.v_noscan.setMinimum(gmin) if gmax is not None: @@ -76,10 +79,10 @@ class ScanController(LayoutWidget): self.v_noscan_gr.addWidget(self.v_noscan, 0, 1) self.stack.addWidget(self.v_noscan_gr) - self.v_linear = _Range(gmin, gmax, gstep, unit) + self.v_linear = _Range(gmin, gmax, gstep, unit, ndecimals) self.stack.addWidget(self.v_linear) - self.v_random = _Range(gmin, gmax, gstep, unit) + self.v_random = _Range(gmin, gmax, gstep, unit, ndecimals) self.stack.addWidget(self.v_random) self.v_explicit = QtGui.QLineEdit() diff --git a/artiq/language/environment.py b/artiq/language/environment.py index 8ecfa0499..fa3f30849 100644 --- a/artiq/language/environment.py +++ b/artiq/language/environment.py @@ -77,14 +77,16 @@ class NumberValue(_SimpleArgProcessor): buttons in a UI. :param min: The minimum value of the argument. :param max: The maximum value of the argument. + :param ndecimals: The number of decimals a UI should use. """ def __init__(self, default=NoDefault, unit="", step=None, - min=None, max=None): + min=None, max=None, ndecimals=2): _SimpleArgProcessor.__init__(self, default) self.unit = unit self.step = step self.min = min self.max = max + self.ndecimals = ndecimals def describe(self): d = _SimpleArgProcessor.describe(self) @@ -92,6 +94,7 @@ class NumberValue(_SimpleArgProcessor): d["step"] = self.step d["min"] = self.min d["max"] = self.max + d["ndecimals"] = self.ndecimals return d diff --git a/artiq/language/scan.py b/artiq/language/scan.py index 33e98eb4e..fe22c5a4f 100644 --- a/artiq/language/scan.py +++ b/artiq/language/scan.py @@ -117,16 +117,19 @@ class Scannable: :param global_step: The step with which the value should be modified by up/down buttons in a user interface. :param unit: A string representing the unit of the scanned variable, for user - interface purposes. + interface (UI) purposes. + :param ndecimals: The number of decimals a UI should use. """ - def __init__(self, global_min=None, global_max=None, global_step=None, - unit="", default=NoDefault): - self.global_min = global_min - self.global_max = global_max - self.global_step = global_step - self.unit = unit + def __init__(self, default=NoDefault, unit="", + global_step=None, global_min=None, global_max=None, + ndecimals=2): if default is not NoDefault: self.default_value = default + self.unit = unit + self.global_step = global_step + self.global_min = global_min + self.global_max = global_max + self.ndecimals = ndecimals def default(self): if not hasattr(self, "default_value"): @@ -143,10 +146,11 @@ class Scannable: def describe(self): d = {"ty": "Scannable"} - d["global_min"] = self.global_min - d["global_max"] = self.global_max - d["global_step"] = self.global_step - d["unit"] = self.unit if hasattr(self, "default_value"): d["default"] = self.default_value.describe() + d["unit"] = self.unit + d["global_step"] = self.global_step + d["global_min"] = self.global_min + d["global_max"] = self.global_max + d["ndecimals"] = self.ndecimals return d diff --git a/examples/master/repository/arguments_demo.py b/examples/master/repository/arguments_demo.py index d956071b3..08ef43ba2 100644 --- a/examples/master/repository/arguments_demo.py +++ b/examples/master/repository/arguments_demo.py @@ -35,9 +35,12 @@ class SubComponent2(HasEnvironment): class ArgumentsDemo(EnvExperiment): def build(self): self.attr_argument("free_value", FreeValue(None)) - self.attr_argument("number", NumberValue(42, unit="s", step=0.1)) + self.attr_argument("number", NumberValue(42, unit="s", step=0.1, + ndecimals=4)) self.attr_argument("string", StringValue("Hello World")) - self.attr_argument("scan", Scannable(global_max=400, default=NoScan(325))) + self.attr_argument("scan", Scannable(global_max=400, + default=NoScan(325), + ndecimals=6)) self.attr_argument("boolean", BooleanValue(True), "Group") self.attr_argument("enum", EnumerationValue( ["foo", "bar", "quux"], "foo"), "Group") From d5f2f5c062e77a2f09bb6ab44b2cd4dc9607ba3c Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Tue, 25 Aug 2015 00:56:19 +0800 Subject: [PATCH 150/191] gui: fix spinbox bounds --- artiq/gui/explorer.py | 8 ++++++-- artiq/gui/scan.py | 18 ++++++++++++------ artiq/gui/tools.py | 8 -------- 3 files changed, 18 insertions(+), 16 deletions(-) diff --git a/artiq/gui/explorer.py b/artiq/gui/explorer.py index c4f5ea680..c8925256b 100644 --- a/artiq/gui/explorer.py +++ b/artiq/gui/explorer.py @@ -6,7 +6,7 @@ from pyqtgraph import LayoutWidget from artiq.protocols.sync_struct import Subscriber from artiq.protocols import pyon -from artiq.gui.tools import DictSyncModel, force_spinbox_value +from artiq.gui.tools import DictSyncModel from artiq.gui.scan import ScanController @@ -79,8 +79,12 @@ class _NumberEntry(QtGui.QDoubleSpinBox): self.setSingleStep(procdesc["step"]) if procdesc["min"] is not None: self.setMinimum(procdesc["min"]) + else: + self.setMinimum(float("-inf")) if procdesc["max"] is not None: self.setMaximum(procdesc["max"]) + else: + self.setMaximum(float("inf")) if procdesc["unit"]: self.setSuffix(" " + procdesc["unit"]) if "default" in procdesc: @@ -90,7 +94,7 @@ class _NumberEntry(QtGui.QDoubleSpinBox): return self.value() def set_argument_value(self, value): - force_spinbox_value(self, value) + self.setValue(value) class _StringEntry(QtGui.QLineEdit): diff --git a/artiq/gui/scan.py b/artiq/gui/scan.py index 65d81ae52..f3405200b 100644 --- a/artiq/gui/scan.py +++ b/artiq/gui/scan.py @@ -1,8 +1,6 @@ from quamash import QtGui from pyqtgraph import LayoutWidget -from artiq.gui.tools import force_spinbox_value - class _Range(LayoutWidget): def __init__(self, global_min, global_max, global_step, unit, ndecimals): @@ -12,8 +10,12 @@ class _Range(LayoutWidget): spinbox.setDecimals(ndecimals) if global_min is not None: spinbox.setMinimum(global_min) + else: + spinbox.setMinimum(float("-inf")) if global_max is not None: spinbox.setMaximum(global_max) + else: + spinbox.setMaximum(float("inf")) if global_step is not None: spinbox.setSingleStep(global_step) if unit: @@ -36,9 +38,9 @@ class _Range(LayoutWidget): self.addWidget(self.npoints, 0, 5) def set_values(self, min, max, npoints): - force_spinbox_value(self.min, min) - force_spinbox_value(self.max, max) - force_spinbox_value(self.npoints, npoints) + self.min.setValue(min) + self.max.setValue(max) + self.npoints.setValue(npoints) def get_values(self): min = self.min.value() @@ -68,8 +70,12 @@ class ScanController(LayoutWidget): self.v_noscan.setDecimals(ndecimals) if gmin is not None: self.v_noscan.setMinimum(gmin) + else: + self.v_noscan.setMinimum(float("-inf")) if gmax is not None: self.v_noscan.setMaximum(gmax) + else: + self.v_noscan.setMaximum(float("inf")) if gstep is not None: self.v_noscan.setSingleStep(gstep) if unit: @@ -135,7 +141,7 @@ class ScanController(LayoutWidget): def set_argument_value(self, d): if d["ty"] == "NoScan": self.noscan.setChecked(True) - force_spinbox_value(self.v_noscan, d["value"]) + self.v_noscan.setValue(d["value"]) elif d["ty"] == "LinearScan": self.linear.setChecked(True) self.v_linear.set_values(d["min"], d["max"], d["npoints"]) diff --git a/artiq/gui/tools.py b/artiq/gui/tools.py index 04476b25d..ecce285ed 100644 --- a/artiq/gui/tools.py +++ b/artiq/gui/tools.py @@ -37,14 +37,6 @@ def short_format(v): return r -def force_spinbox_value(spinbox, value): - if spinbox.minimum() > value: - spinbox.setMinimum(value) - if spinbox.maximum() < value: - spinbox.setMaximum(value) - spinbox.setValue(value) - - class _SyncSubstruct: def __init__(self, update_cb, ref): self.update_cb = update_cb From b790fb093d953d3e36a1f1874f56d4e2543b7747 Mon Sep 17 00:00:00 2001 From: Joe Britton Date: Tue, 25 Aug 2015 22:01:51 -0600 Subject: [PATCH 151/191] add ping() to novatech driver --- artiq/devices/novatech409b/driver.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/artiq/devices/novatech409b/driver.py b/artiq/devices/novatech409b/driver.py index 5bea7164e..ffe971012 100644 --- a/artiq/devices/novatech409b/driver.py +++ b/artiq/devices/novatech409b/driver.py @@ -210,3 +210,15 @@ class Novatech409B: result = [r.rstrip().decode() for r in result] logger.debug("got device status: %s", result) return result + + def ping(self): + try: + stat = self.get_status() + except: + return False + # check that version number matches is "21" + if stat[4][20:] == "21": + logger.debug("ping successful") + return True + else: + return False From 92390cfbe6dd5ce3218ba50b25b63c9e77829a9c Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Wed, 26 Aug 2015 11:12:48 +0200 Subject: [PATCH 152/191] py2llvm: allow the unit test to run on Windows --- artiq/test/py2llvm.py | 3 ++- conda/llvmdev-or1k/bld.bat | 2 +- conda/llvmlite-artiq/meta.yaml | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/artiq/test/py2llvm.py b/artiq/test/py2llvm.py index 6026d3797..a56c7f410 100644 --- a/artiq/test/py2llvm.py +++ b/artiq/test/py2llvm.py @@ -4,6 +4,7 @@ import inspect from fractions import Fraction from ctypes import CFUNCTYPE, c_int, c_int32, c_int64, c_double import struct +import os import llvmlite_artiq.binding as llvm @@ -16,7 +17,7 @@ from artiq.py2llvm.module import Module llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() -if struct.calcsize("P") < 8: +if struct.calcsize("P") < 8 and os.name != "nt": from ctypes import _dlopen, RTLD_GLOBAL _dlopen("libgcc_s.so", RTLD_GLOBAL) diff --git a/conda/llvmdev-or1k/bld.bat b/conda/llvmdev-or1k/bld.bat index 2c93638ee..654b44d64 100644 --- a/conda/llvmdev-or1k/bld.bat +++ b/conda/llvmdev-or1k/bld.bat @@ -9,7 +9,7 @@ set CMAKE_GENERATOR=Visual Studio 12 2013 Win64 ) set CMAKE_GENERATOR_TOOLSET=v120_xp @rem Reduce build times and package size by removing unused stuff -set CMAKE_CUSTOM=-DLLVM_TARGETS_TO_BUILD=OR1K -DLLVM_INCLUDE_TESTS=OFF ^ +set CMAKE_CUSTOM=-DLLVM_TARGETS_TO_BUILD="OR1K;X86" -DLLVM_INCLUDE_TESTS=OFF ^ -DLLVM_INCLUDE_TOOLS=OFF -DLLVM_INCLUDE_UTILS=OFF ^ -DLLVM_INCLUDE_DOCS=OFF -DLLVM_INCLUDE_EXAMPLES=OFF ^ -DLLVM_ENABLE_ASSERTIONS=ON diff --git a/conda/llvmlite-artiq/meta.yaml b/conda/llvmlite-artiq/meta.yaml index af5253bea..e22929410 100644 --- a/conda/llvmlite-artiq/meta.yaml +++ b/conda/llvmlite-artiq/meta.yaml @@ -15,7 +15,7 @@ requirements: - python build: - number: 2 + number: 3 test: imports: From 7b0b1d686013691fb5a492b3691715c6611cf2ab Mon Sep 17 00:00:00 2001 From: Yann Sionneau Date: Wed, 26 Aug 2015 14:22:13 +0200 Subject: [PATCH 153/191] manual: Windows (32 and 64-bit) users should install 32-bit miniconda/anaconda --- doc/manual/installing.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/doc/manual/installing.rst b/doc/manual/installing.rst index 9d7523205..da2b6a893 100644 --- a/doc/manual/installing.rst +++ b/doc/manual/installing.rst @@ -15,7 +15,11 @@ Installing Anaconda or Miniconda * You can either install Anaconda (chose Python 3.4) from https://store.continuum.io/cshop/anaconda/ -* Or install the more minimalistic Miniconda (chose Python3.4) from http://conda.pydata.org/miniconda.html +* Or install the more minimalistic Miniconda (chose Python 3.4) from http://conda.pydata.org/miniconda.html + +.. warning:: + If you are installing on Windows, chose the Windows 32-bit version regardless of whether you have + a 32-bit or 64-bit Windows. After installing either Anaconda or Miniconda, open a new terminal and make sure the following command works:: From 947acb1f90ae1b8862efb489a9cc29f7d4e0c645 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 26 Aug 2015 20:23:50 +0800 Subject: [PATCH 154/191] manual: management system tutorial, Git integration --- ...g_started.rst => getting_started_core.rst} | 6 +- doc/manual/getting_started_mgmt.rst | 138 ++++++++++++++++++ doc/manual/index.rst | 5 +- 3 files changed, 144 insertions(+), 5 deletions(-) rename doc/manual/{getting_started.rst => getting_started_core.rst} (90%) create mode 100644 doc/manual/getting_started_mgmt.rst diff --git a/doc/manual/getting_started.rst b/doc/manual/getting_started_core.rst similarity index 90% rename from doc/manual/getting_started.rst rename to doc/manual/getting_started_core.rst index 3e8f9f557..f5a05a5e0 100644 --- a/doc/manual/getting_started.rst +++ b/doc/manual/getting_started_core.rst @@ -1,5 +1,5 @@ -Getting started -=============== +Getting started with the core language +====================================== .. _connecting-to-the-core-device: @@ -23,7 +23,7 @@ As a very first step, we will turn on a LED on the core device. Create a file `` The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``attr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``attr_device`` like any other. -Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run.py``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform. +Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples/master`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform. Run your code using ``artiq_run``, which is part of the ARTIQ front-end tools: :: diff --git a/doc/manual/getting_started_mgmt.rst b/doc/manual/getting_started_mgmt.rst new file mode 100644 index 000000000..53db876b3 --- /dev/null +++ b/doc/manual/getting_started_mgmt.rst @@ -0,0 +1,138 @@ +Getting started with the management system +========================================== + +The management system is the high-level part of ARTIQ that schedules the experiments, distributes and stores the results, and manages devices and parameters. + +The manipulations described in this tutorial can be carried out using a single computer, without any special hardware. + +Starting your first experiment with the master +---------------------------------------------- + +In the previous tutorial, we used the ``artiq_run`` utility to execute our experiments, which is a simple stand-alone tool that bypasses the ARTIQ management system. We will now see how to run an experiment using the master (the central program in the management system that schedules and executes experiments) and the GUI client (that connects to the master and controls it). + +First, create a folder ``~/artiq-master`` and copy the ``ddb.pyon`` and ``pdb.pyon`` files (device and parameter databases) found in the ``examples/master`` directory from the ARTIQ sources. The master uses those files in the same way as ``artiq_run``. + +Then create a ``~/artiq-master/repository`` sub-folder to contain experiments. The master scans this ``repository`` folder to determine what experiments are available (the name of the folder can be changed using ``-r``). + +Create a very simple experiment in ``~/artiq-master/repository`` and save it as ``mgmt_tutorial.py``: :: + + from artiq import * + + + class MgmtTutorial(EnvExperiment): + """Management tutorial""" + def build(self): + pass # no devices used + + def run(self): + print("Hello World") + + +Start the master with: :: + + $ cd ~/artiq-master + $ artiq_master + +This last command should not return, as the master keeps running. + +Now, start the GUI client with the following commands in another terminal: :: + + $ cd ~ + $ artiq_gui + +.. note:: The ``artiq_gui`` program uses a file called ``artiq_gui.pyon`` in the current directory to save and restore the GUI state (window/dock positions, last values entered by the user, etc.). + +The GUI should display the list of experiments from the repository folder in a dock called "Explorer". There should be only the experiment we created. Select it and click "Submit", lhen look at the "Log" dock for the output from this simple experiment. + +.. note:: Multiple clients may be connected at the same time, possibly on different machines, and will be synchronized. See the ``-s`` option of ``artiq_gui`` and the ``--bind`` option of ``artiq_master`` to use the network. Both IPv4 and IPv6 are supported. + +Adding an argument +------------------ + +Experiments may have arguments whose values can be set in the GUI and used in the experiment's code. Modify the experiment as follows: :: + + + def build(self): + self.attr_argument("count", NumberValue(ndecimals=0)) + + def run(self): + for i in range(int(self.count)): + print("Hello World", i) + + +``NumberValue`` represents a floating point numeric argument. There are many other types, see :class:`artiq.language.environment` and :class:`artiq.language.scan`. + +Use the command-line client to trigger a repository rescan: :: + + artiq_client scan-repository + +The GUI should now display a spin box that allows you to set the value of the ``count`` argument. Try submitting the experiment as before. + +Setting up Git integration +-------------------------- + +So far, we have used the bare filesystem for the experiment repository, without any version control. Using Git to host the experiment repository helps with the tracking of modifications to experiments and with the tracability of a result to a particular version of an experiment. + +.. note:: The workflow we will describe in this tutorial corresponds to a situation where the ARTIQ master machine is also used as a Git server where multiple users may push and pull code. The Git setup can be customized according to your needs; the main point to remember is that when scanning or submitting, the ARTIQ master uses the internal Git data (*not* any working directory that may be present) to fetch the latest *fully completed commit* at the repository's head. + +We will use the current ``repository`` folder as working directory for making local modifications to the experiments, move it away from the master data directory, and create a new ``repository`` folder that holds the Git data used by the master. Stop the master with Ctrl-C and enter the following commands: :: + + $ cd ~/artiq-master + $ mv repository ~/artiq-work + $ mkdir repository + $ cd repository + $ git init --bare + +Start the master again with the ``-g`` flag, telling it to treat the contents of the ``repository`` folder as a bare Git repository: :: + + $ cd ~/artiq-master + $ artiq_master -g + +There should be no errors displayed, and if you start the GUI again you should notice an empty experiment list. We will now add our previously written experiment to it. + +First, another small configuration step is needed. We must tell Git to make the master rescan the repository when new data is added to it. Create a file ``~/artiq-master/repository/hooks/post-receive`` with the following contents: :: + + #!/bin/sh + artiq_client scan-repository + +Then set the execution permission on it: :: + + $ chmod 755 ~/artiq-master/repository/hooks/post-receive + +The setup on the master side is now complete. All we need to do now is push data to into the bare repository. Initialize a regular (non-bare) Git repository into our working directory: :: + + $ cd ~/artiq-work + $ git init + +Then commit our experiment: :: + + $ git add mgmt_tutorial.py + $ git commit -m "First version of the tutorial experiment" + +and finally, push the commit into the master's bare repository: :: + + $ git remote add origin ~/artiq-master/repository + $ git push -u origin master + +The GUI should immediately list the experiment again, and you should be able to submit it as before. + +.. note:: Remote machines may also push and pull into the master's bare repository using e.g. Git over SSH. + +Let's now make a modification to the experiment. In the source present in the working directory, add an exclamation mark at the end of "Hello World". Before committing it, check that the experiment can still be executed correctly by running it directly from the filesystem using: :: + + $ artiq_client submit ~/artiq-work/mgmt_tutorial.py + +.. note:: Submitting experiments outside the repository from the GUI is currently not supported. Submitting an experiment from the repository using the ``artiq_client`` command-line tool is done using the ``-R`` flag. + +Verify the log in the GUI. If you are happy with the result, commit the new version and push it into the master's repository: :: + + $ cd ~/artiq-work + $ git commit -a -m "More enthusiasm" + $ git push + +.. note:: Notice that commands other than ``git push`` are not needed anymore. + +The master should now run the new version from its repository. + +As an exercise, add another argument to the experiment, commit and push the result, and verify that the new control is added in the GUI. + diff --git a/doc/manual/index.rst b/doc/manual/index.rst index af51eef69..f13e79a3e 100644 --- a/doc/manual/index.rst +++ b/doc/manual/index.rst @@ -8,10 +8,11 @@ Contents: introduction installing - getting_started + getting_started_core + getting_started_mgmt + core_device management_system environment - core_device core_language_reference core_drivers_reference protocols_reference From 2c893ba0a2eb5f6c8632f2f4063d5f6853f36e21 Mon Sep 17 00:00:00 2001 From: Sebastien Bourdeauducq Date: Wed, 26 Aug 2015 20:40:37 +0800 Subject: [PATCH 155/191] doc/manual: add results to mgmt tutorial --- doc/manual/getting_started_mgmt.rst | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/doc/manual/getting_started_mgmt.rst b/doc/manual/getting_started_mgmt.rst index 53db876b3..1ff2e7fed 100644 --- a/doc/manual/getting_started_mgmt.rst +++ b/doc/manual/getting_started_mgmt.rst @@ -136,3 +136,21 @@ The master should now run the new version from its repository. As an exercise, add another argument to the experiment, commit and push the result, and verify that the new control is added in the GUI. +Results +------- + +Modify the ``run()`` method of the experiment as follows: :: + + def run(self): + parabola = self.set_result("parabola", [], realtime=True) + for i in range(int(self.count)): + parabola.append(i*i) + time.sleep(0.5) + +.. note:: You need to import the ``time`` module. + +Commit, push and submit the experiment as before. While it is running, go to the "Results" dock of the GUI and create a new XY plot showing the new result. Observe how the points are added one by one to the plot. + +After the experiment has finished executing, the results are written to a HDF5 file that resides in ``~/artiq-master/results//