mirror of https://github.com/m-labs/artiq.git
Merge branch 'master' into new-py2llvm
This commit is contained in:
commit
e9adfd639e
|
@ -19,10 +19,11 @@ do
|
||||||
done
|
done
|
||||||
|
|
||||||
export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:$PATH
|
export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:$PATH
|
||||||
export LD_LIBRARY_PATH=$PWD/packages/usr/local/llvm-or1k/lib:$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH
|
export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:$LD_LIBRARY_PATH
|
||||||
|
|
||||||
|
echo "export LD_LIBRARY_PATH=$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh
|
||||||
echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:\$PATH" >> $HOME/.mlabs/build_settings.sh
|
echo "export PATH=$PWD/packages/usr/local/llvm-or1k/bin:$PWD/packages/usr/local/bin:$PWD/packages/usr/bin:\$PATH" >> $HOME/.mlabs/build_settings.sh
|
||||||
echo "export LD_LIBRARY_PATH=$PWD/packages/usr/local/llvm-or1k/lib:$PWD/packages/usr/lib/x86_64-linux-gnu:$PWD/packages/usr/local/x86_64-unknown-linux-gnu/or1k-elf/lib:\$LD_LIBRARY_PATH" >> $HOME/.mlabs/build_settings.sh
|
|
||||||
|
|
||||||
|
or1k-linux-as --version
|
||||||
llc --version
|
llc --version
|
||||||
clang --version
|
clang --version
|
||||||
|
|
|
@ -224,18 +224,22 @@ def _show_dict(args, notifier_name, display_fun):
|
||||||
_run_subscriber(args.server, args.port, subscriber)
|
_run_subscriber(args.server, args.port, subscriber)
|
||||||
|
|
||||||
|
|
||||||
|
def _print_log_record(record):
|
||||||
|
level, source, t, message = record
|
||||||
|
t = time.strftime("%m/%d %H:%M:%S", time.localtime(t))
|
||||||
|
print(level, source, t, message)
|
||||||
|
|
||||||
|
|
||||||
class _LogPrinter:
|
class _LogPrinter:
|
||||||
def __init__(self, init):
|
def __init__(self, init):
|
||||||
for rid, msg in init:
|
for record in init:
|
||||||
print(rid, msg)
|
_print_log_record(record)
|
||||||
|
|
||||||
def append(self, x):
|
def append(self, record):
|
||||||
rid, msg = x
|
_print_log_record(record)
|
||||||
print(rid, msg)
|
|
||||||
|
|
||||||
def insert(self, i, x):
|
def insert(self, i, record):
|
||||||
rid, msg = x
|
_print_log_record(record)
|
||||||
print(rid, msg)
|
|
||||||
|
|
||||||
def pop(self, i=-1):
|
def pop(self, i=-1):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -6,16 +6,17 @@ import atexit
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from artiq.protocols.pc_rpc import Server
|
from artiq.protocols.pc_rpc import Server
|
||||||
from artiq.protocols.sync_struct import Notifier, Publisher
|
from artiq.protocols.sync_struct import Publisher
|
||||||
|
from artiq.master.log import log_args, init_log
|
||||||
from artiq.master.databases import DeviceDB, DatasetDB
|
from artiq.master.databases import DeviceDB, DatasetDB
|
||||||
from artiq.master.scheduler import Scheduler
|
from artiq.master.scheduler import Scheduler
|
||||||
from artiq.master.worker_db import get_last_rid
|
from artiq.master.worker_db import get_last_rid
|
||||||
from artiq.master.repository import FilesystemBackend, GitBackend, Repository
|
from artiq.master.repository import FilesystemBackend, GitBackend, Repository
|
||||||
from artiq.tools import verbosity_args, init_logger
|
|
||||||
|
|
||||||
|
|
||||||
def get_argparser():
|
def get_argparser():
|
||||||
parser = argparse.ArgumentParser(description="ARTIQ master")
|
parser = argparse.ArgumentParser(description="ARTIQ master")
|
||||||
|
|
||||||
group = parser.add_argument_group("network")
|
group = parser.add_argument_group("network")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--bind", default="::1",
|
"--bind", default="::1",
|
||||||
|
@ -26,11 +27,13 @@ def get_argparser():
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"--port-control", default=3251, type=int,
|
"--port-control", default=3251, type=int,
|
||||||
help="TCP port to listen to for control (default: %(default)d)")
|
help="TCP port to listen to for control (default: %(default)d)")
|
||||||
|
|
||||||
group = parser.add_argument_group("databases")
|
group = parser.add_argument_group("databases")
|
||||||
group.add_argument("--device-db", default="device_db.pyon",
|
group.add_argument("--device-db", default="device_db.pyon",
|
||||||
help="device database file (default: '%(default)s')")
|
help="device database file (default: '%(default)s')")
|
||||||
group.add_argument("--dataset-db", default="dataset_db.pyon",
|
group.add_argument("--dataset-db", default="dataset_db.pyon",
|
||||||
help="dataset file (default: '%(default)s')")
|
help="dataset file (default: '%(default)s')")
|
||||||
|
|
||||||
group = parser.add_argument_group("repository")
|
group = parser.add_argument_group("repository")
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"-g", "--git", default=False, action="store_true",
|
"-g", "--git", default=False, action="store_true",
|
||||||
|
@ -38,25 +41,15 @@ def get_argparser():
|
||||||
group.add_argument(
|
group.add_argument(
|
||||||
"-r", "--repository", default="repository",
|
"-r", "--repository", default="repository",
|
||||||
help="path to the repository (default: '%(default)s')")
|
help="path to the repository (default: '%(default)s')")
|
||||||
verbosity_args(parser)
|
|
||||||
|
log_args(parser)
|
||||||
|
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
class Log:
|
|
||||||
def __init__(self, depth):
|
|
||||||
self.depth = depth
|
|
||||||
self.data = Notifier([])
|
|
||||||
|
|
||||||
def log(self, rid, message):
|
|
||||||
if len(self.data.read) >= self.depth:
|
|
||||||
del self.data[0]
|
|
||||||
self.data.append((rid, message))
|
|
||||||
log.worker_pass_rid = True
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
args = get_argparser().parse_args()
|
args = get_argparser().parse_args()
|
||||||
init_logger(args)
|
log_buffer, log_forwarder = init_log(args)
|
||||||
if os.name == "nt":
|
if os.name == "nt":
|
||||||
loop = asyncio.ProactorEventLoop()
|
loop = asyncio.ProactorEventLoop()
|
||||||
asyncio.set_event_loop(loop)
|
asyncio.set_event_loop(loop)
|
||||||
|
@ -68,13 +61,13 @@ def main():
|
||||||
dataset_db = DatasetDB(args.dataset_db)
|
dataset_db = DatasetDB(args.dataset_db)
|
||||||
dataset_db.start()
|
dataset_db.start()
|
||||||
atexit.register(lambda: loop.run_until_complete(dataset_db.stop()))
|
atexit.register(lambda: loop.run_until_complete(dataset_db.stop()))
|
||||||
log = Log(1000)
|
|
||||||
|
|
||||||
if args.git:
|
if args.git:
|
||||||
repo_backend = GitBackend(args.repository)
|
repo_backend = GitBackend(args.repository)
|
||||||
else:
|
else:
|
||||||
repo_backend = FilesystemBackend(args.repository)
|
repo_backend = FilesystemBackend(args.repository)
|
||||||
repository = Repository(repo_backend, device_db.get_device_db, log.log)
|
repository = Repository(repo_backend, device_db.get_device_db,
|
||||||
|
log_forwarder.log_worker)
|
||||||
atexit.register(repository.close)
|
atexit.register(repository.close)
|
||||||
repository.scan_async()
|
repository.scan_async()
|
||||||
|
|
||||||
|
@ -83,7 +76,7 @@ def main():
|
||||||
"get_device": device_db.get,
|
"get_device": device_db.get,
|
||||||
"get_dataset": dataset_db.get,
|
"get_dataset": dataset_db.get,
|
||||||
"update_dataset": dataset_db.update,
|
"update_dataset": dataset_db.update,
|
||||||
"log": log.log
|
"log": log_forwarder.log_worker
|
||||||
}
|
}
|
||||||
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
|
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
|
||||||
worker_handlers["scheduler_submit"] = scheduler.submit
|
worker_handlers["scheduler_submit"] = scheduler.submit
|
||||||
|
@ -105,7 +98,7 @@ def main():
|
||||||
"devices": device_db.data,
|
"devices": device_db.data,
|
||||||
"datasets": dataset_db.data,
|
"datasets": dataset_db.data,
|
||||||
"explist": repository.explist,
|
"explist": repository.explist,
|
||||||
"log": log.data
|
"log": log_buffer.data
|
||||||
})
|
})
|
||||||
loop.run_until_complete(server_notify.start(
|
loop.run_until_complete(server_notify.start(
|
||||||
args.bind, args.port_notify))
|
args.bind, args.port_notify))
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
from quamash import QtGui, QtCore
|
from quamash import QtGui, QtCore
|
||||||
from pyqtgraph import dockarea
|
from pyqtgraph import dockarea
|
||||||
|
@ -7,23 +9,61 @@ from artiq.protocols.sync_struct import Subscriber
|
||||||
from artiq.gui.tools import ListSyncModel
|
from artiq.gui.tools import ListSyncModel
|
||||||
|
|
||||||
|
|
||||||
|
def _level_to_name(level):
|
||||||
|
if level >= logging.CRITICAL:
|
||||||
|
return "CRITICAL"
|
||||||
|
if level >= logging.ERROR:
|
||||||
|
return "ERROR"
|
||||||
|
if level >= logging.WARNING:
|
||||||
|
return "WARNING"
|
||||||
|
if level >= logging.INFO:
|
||||||
|
return "INFO"
|
||||||
|
return "DEBUG"
|
||||||
|
|
||||||
class _LogModel(ListSyncModel):
|
class _LogModel(ListSyncModel):
|
||||||
def __init__(self, parent, init):
|
def __init__(self, parent, init):
|
||||||
ListSyncModel.__init__(self,
|
ListSyncModel.__init__(self,
|
||||||
["RID", "Message"],
|
["Level", "Source", "Time", "Message"],
|
||||||
parent, init)
|
parent, init)
|
||||||
self.fixed_font = QtGui.QFont()
|
self.fixed_font = QtGui.QFont()
|
||||||
self.fixed_font.setFamily("Monospace")
|
self.fixed_font.setFamily("Monospace")
|
||||||
|
|
||||||
|
self.white = QtGui.QBrush(QtGui.QColor(255, 255, 255))
|
||||||
|
self.black = QtGui.QBrush(QtGui.QColor(0, 0, 0))
|
||||||
|
self.debug_fg = QtGui.QBrush(QtGui.QColor(55, 55, 55))
|
||||||
|
self.warning_bg = QtGui.QBrush(QtGui.QColor(255, 255, 180))
|
||||||
|
self.error_bg = QtGui.QBrush(QtGui.QColor(255, 150, 150))
|
||||||
|
|
||||||
def data(self, index, role):
|
def data(self, index, role):
|
||||||
if (role == QtCore.Qt.FontRole and index.isValid()
|
if (role == QtCore.Qt.FontRole and index.isValid()
|
||||||
and index.column() == 1):
|
and index.column() == 3):
|
||||||
return self.fixed_font
|
return self.fixed_font
|
||||||
|
elif role == QtCore.Qt.BackgroundRole and index.isValid():
|
||||||
|
level = self.backing_store[index.row()][0]
|
||||||
|
if level >= logging.ERROR:
|
||||||
|
return self.error_bg
|
||||||
|
elif level >= logging.WARNING:
|
||||||
|
return self.warning_bg
|
||||||
|
else:
|
||||||
|
return self.white
|
||||||
|
elif role == QtCore.Qt.ForegroundRole and index.isValid():
|
||||||
|
level = self.backing_store[index.row()][0]
|
||||||
|
if level <= logging.DEBUG:
|
||||||
|
return self.debug_fg
|
||||||
|
else:
|
||||||
|
return self.black
|
||||||
else:
|
else:
|
||||||
return ListSyncModel.data(self, index, role)
|
return ListSyncModel.data(self, index, role)
|
||||||
|
|
||||||
def convert(self, v, column):
|
def convert(self, v, column):
|
||||||
return v[column]
|
if column == 0:
|
||||||
|
return _level_to_name(v[0])
|
||||||
|
elif column == 1:
|
||||||
|
return v[1]
|
||||||
|
elif column == 2:
|
||||||
|
return time.strftime("%m/%d %H:%M:%S", time.localtime(v[2]))
|
||||||
|
else:
|
||||||
|
return v[3]
|
||||||
|
|
||||||
|
|
||||||
class LogDock(dockarea.Dock):
|
class LogDock(dockarea.Dock):
|
||||||
|
|
|
@ -207,6 +207,14 @@ class HasEnvironment:
|
||||||
|
|
||||||
def set_dataset(self, key, value,
|
def set_dataset(self, key, value,
|
||||||
broadcast=False, persist=False, save=True):
|
broadcast=False, persist=False, save=True):
|
||||||
|
"""Sets the contents and handling modes of a dataset.
|
||||||
|
|
||||||
|
:param broadcast: the data is sent in real-time to the master, which
|
||||||
|
dispatches it. Returns a Notifier that can be used to mutate the dataset.
|
||||||
|
:param persist: the master should store the data on-disk. Implies broadcast.
|
||||||
|
:param save: the data is saved into the local storage of the current
|
||||||
|
run (archived as a HDF5 file).
|
||||||
|
"""
|
||||||
if self.__parent is not None:
|
if self.__parent is not None:
|
||||||
self.__parent.set_dataset(key, value, broadcast, persist, save)
|
self.__parent.set_dataset(key, value, broadcast, persist, save)
|
||||||
return
|
return
|
||||||
|
@ -215,6 +223,15 @@ class HasEnvironment:
|
||||||
return self.__dataset_mgr.set(key, value, broadcast, persist, save)
|
return self.__dataset_mgr.set(key, value, broadcast, persist, save)
|
||||||
|
|
||||||
def get_dataset(self, key, default=NoDefault):
|
def get_dataset(self, key, default=NoDefault):
|
||||||
|
"""Returns the contents of a dataset.
|
||||||
|
|
||||||
|
The local storage is searched first, followed by the master storage
|
||||||
|
(which contains the broadcasted datasets from all experiments) if the
|
||||||
|
key was not found initially.
|
||||||
|
|
||||||
|
If the dataset does not exist, returns the default value. If no default
|
||||||
|
is provided, raises ``KeyError``.
|
||||||
|
"""
|
||||||
if self.__parent is not None:
|
if self.__parent is not None:
|
||||||
return self.__parent.get_dataset(key, default)
|
return self.__parent.get_dataset(key, default)
|
||||||
if self.__dataset_mgr is None:
|
if self.__dataset_mgr is None:
|
||||||
|
@ -228,6 +245,8 @@ class HasEnvironment:
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def setattr_dataset(self, key, default=NoDefault):
|
def setattr_dataset(self, key, default=NoDefault):
|
||||||
|
"""Sets the contents of a dataset as attribute. The names of the
|
||||||
|
dataset and of the attribute are the same."""
|
||||||
setattr(self, key, self.get_dataset(key, default))
|
setattr(self, key, self.get_dataset(key, default))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,105 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from artiq.protocols.sync_struct import Notifier
|
||||||
|
|
||||||
|
|
||||||
|
class LogBuffer:
|
||||||
|
def __init__(self, depth):
|
||||||
|
self.depth = depth
|
||||||
|
self.data = Notifier([])
|
||||||
|
|
||||||
|
def log(self, level, source, time, message):
|
||||||
|
if len(self.data.read) >= self.depth:
|
||||||
|
del self.data[0]
|
||||||
|
self.data.append((level, source, time, message))
|
||||||
|
|
||||||
|
|
||||||
|
class LogBufferHandler(logging.Handler):
|
||||||
|
def __init__(self, log_buffer, *args, **kwargs):
|
||||||
|
logging.Handler.__init__(self, *args, **kwargs)
|
||||||
|
self.log_buffer = log_buffer
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
message = self.format(record)
|
||||||
|
self.log_buffer.log(record.levelno, record.source, record.created, message)
|
||||||
|
|
||||||
|
|
||||||
|
name_to_level = {
|
||||||
|
"CRITICAL": logging.CRITICAL,
|
||||||
|
"ERROR": logging.ERROR,
|
||||||
|
"WARN": logging.WARNING,
|
||||||
|
"WARNING": logging.WARNING,
|
||||||
|
"INFO": logging.INFO,
|
||||||
|
"DEBUG": logging.DEBUG,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_log_message(msg):
|
||||||
|
for name, level in name_to_level.items():
|
||||||
|
if msg.startswith(name + ":"):
|
||||||
|
remainder = msg[len(name) + 1:]
|
||||||
|
try:
|
||||||
|
idx = remainder.index(":")
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
return level, remainder[:idx], remainder[idx+1:]
|
||||||
|
return logging.INFO, "print", msg
|
||||||
|
|
||||||
|
|
||||||
|
fwd_logger = logging.getLogger("fwd")
|
||||||
|
|
||||||
|
|
||||||
|
class LogForwarder:
|
||||||
|
def log_worker(self, rid, message):
|
||||||
|
level, name, message = parse_log_message(message)
|
||||||
|
fwd_logger.name = name
|
||||||
|
fwd_logger.log(level, message,
|
||||||
|
extra={"source": "worker({})".format(rid)})
|
||||||
|
log_worker.worker_pass_rid = True
|
||||||
|
|
||||||
|
|
||||||
|
class SourceFilter:
|
||||||
|
def __init__(self, master_level):
|
||||||
|
self.master_level = master_level
|
||||||
|
|
||||||
|
def filter(self, record):
|
||||||
|
if not hasattr(record, "source"):
|
||||||
|
record.source = "master"
|
||||||
|
if record.source == "master":
|
||||||
|
return record.levelno >= self.master_level
|
||||||
|
else:
|
||||||
|
# log messages that are forwarded from a source have already
|
||||||
|
# been filtered, and may have a level below the master level.
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def log_args(parser):
|
||||||
|
group = parser.add_argument_group("verbosity")
|
||||||
|
group.add_argument("-v", "--verbose", default=0, action="count",
|
||||||
|
help="increase logging level for the master process")
|
||||||
|
group.add_argument("-q", "--quiet", default=0, action="count",
|
||||||
|
help="decrease logging level for the master process")
|
||||||
|
|
||||||
|
|
||||||
|
def init_log(args):
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
root_logger.setLevel(logging.NOTSET) # we use our custom filter only
|
||||||
|
flt = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10)
|
||||||
|
|
||||||
|
handlers = []
|
||||||
|
console_handler = logging.StreamHandler()
|
||||||
|
console_handler.setFormatter(logging.Formatter("%(levelname)s:%(source)s:%(name)s:%(message)s"))
|
||||||
|
handlers.append(console_handler)
|
||||||
|
|
||||||
|
log_buffer = LogBuffer(1000)
|
||||||
|
buffer_handler = LogBufferHandler(log_buffer)
|
||||||
|
buffer_handler.setFormatter(logging.Formatter("%(name)s:%(message)s"))
|
||||||
|
handlers.append(buffer_handler)
|
||||||
|
|
||||||
|
for handler in handlers:
|
||||||
|
handler.addFilter(flt)
|
||||||
|
root_logger.addHandler(handler)
|
||||||
|
|
||||||
|
log_forwarder = LogForwarder()
|
||||||
|
|
||||||
|
return log_buffer, log_forwarder
|
|
@ -228,9 +228,9 @@ class PrepareStage(TaskObject):
|
||||||
await run.build()
|
await run.build()
|
||||||
await run.prepare()
|
await run.prepare()
|
||||||
except:
|
except:
|
||||||
logger.warning("got worker exception in prepare stage, "
|
logger.error("got worker exception in prepare stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
run.rid, exc_info=True)
|
run.rid, exc_info=True)
|
||||||
self.delete_cb(run.rid)
|
self.delete_cb(run.rid)
|
||||||
else:
|
else:
|
||||||
run.status = RunStatus.prepare_done
|
run.status = RunStatus.prepare_done
|
||||||
|
@ -278,9 +278,9 @@ class RunStage(TaskObject):
|
||||||
run.status = RunStatus.running
|
run.status = RunStatus.running
|
||||||
completed = await run.run()
|
completed = await run.run()
|
||||||
except:
|
except:
|
||||||
logger.warning("got worker exception in run stage, "
|
logger.error("got worker exception in run stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
run.rid, exc_info=True)
|
run.rid, exc_info=True)
|
||||||
self.delete_cb(run.rid)
|
self.delete_cb(run.rid)
|
||||||
else:
|
else:
|
||||||
if completed:
|
if completed:
|
||||||
|
@ -316,9 +316,9 @@ class AnalyzeStage(TaskObject):
|
||||||
await run.analyze()
|
await run.analyze()
|
||||||
await run.write_results()
|
await run.write_results()
|
||||||
except:
|
except:
|
||||||
logger.warning("got worker exception in analyze stage, "
|
logger.error("got worker exception in analyze stage, "
|
||||||
"deleting RID %d",
|
"deleting RID %d",
|
||||||
run.rid, exc_info=True)
|
run.rid, exc_info=True)
|
||||||
self.delete_cb(run.rid)
|
self.delete_cb(run.rid)
|
||||||
else:
|
else:
|
||||||
self.delete_cb(run.rid)
|
self.delete_cb(run.rid)
|
||||||
|
|
|
@ -5,14 +5,13 @@ Preparing:
|
||||||
|
|
||||||
1. [Install miniconda][miniconda]
|
1. [Install miniconda][miniconda]
|
||||||
2. `conda update -q conda`
|
2. `conda update -q conda`
|
||||||
3. `conda install conda-build`
|
3. `conda install conda-build jinja2 anaconda`
|
||||||
4. `conda create -q -n py35 python=3.5`
|
4. `conda create -q -n py35 python=3.5`
|
||||||
5. `conda config --add channels https://conda.anaconda.org/m-labs/channel/dev`
|
5. `conda config --add channels https://conda.anaconda.org/m-labs/channel/dev`
|
||||||
|
|
||||||
Building:
|
Building:
|
||||||
|
|
||||||
1. `source activate py35`
|
1. `conda build pkgname --python 3.5`; this command displays a path to the freshly built package
|
||||||
2. `conda build pkgname --python 3.5`; this command displays a path to the freshly built package
|
2. `anaconda upload <package> -c main -c dev`
|
||||||
3. `anaconda upload <package> -c main -c dev`
|
|
||||||
|
|
||||||
[miniconda]: http://conda.pydata.org/docs/install/quick.html#linux-miniconda-install
|
[miniconda]: http://conda.pydata.org/docs/install/quick.html#linux-miniconda-install
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
"%PYTHON%" setup.py install
|
||||||
|
if errorlevel 1 exit 1
|
|
@ -0,0 +1 @@
|
||||||
|
$PYTHON setup.py install
|
|
@ -0,0 +1,27 @@
|
||||||
|
package:
|
||||||
|
name: lit
|
||||||
|
version: 0.4.1
|
||||||
|
|
||||||
|
source:
|
||||||
|
fn: lit-0.4.1.tar.gz
|
||||||
|
url: https://pypi.python.org/packages/source/l/lit/lit-0.4.1.tar.gz
|
||||||
|
md5: ea6f00470e1bf7ed9e4edcff0f650fe6
|
||||||
|
|
||||||
|
build:
|
||||||
|
number: 0
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
build:
|
||||||
|
- python
|
||||||
|
- setuptools
|
||||||
|
|
||||||
|
run:
|
||||||
|
- python
|
||||||
|
|
||||||
|
test:
|
||||||
|
commands:
|
||||||
|
- lit --version
|
||||||
|
|
||||||
|
about:
|
||||||
|
home: http://llvm.org/docs/CommandGuide/lit.html
|
||||||
|
license: MIT
|
|
@ -7,7 +7,7 @@ source:
|
||||||
git_tag: master
|
git_tag: master
|
||||||
|
|
||||||
build:
|
build:
|
||||||
number: 4
|
number: 5
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
build:
|
build:
|
||||||
|
|
|
@ -1,3 +1,3 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
PATH=/usr/local/llvm-or1k/bin:$PATH $PYTHON setup.py install
|
$PYTHON setup.py install
|
||||||
|
|
|
@ -15,12 +15,12 @@ requirements:
|
||||||
- python
|
- python
|
||||||
|
|
||||||
build:
|
build:
|
||||||
number: 4
|
number: 5
|
||||||
|
|
||||||
test:
|
test:
|
||||||
imports:
|
imports:
|
||||||
- llvmlite_artiq
|
- llvmlite_artiq
|
||||||
- llvmlite_artiq.llvmpy
|
- llvmlite_artiq.binding
|
||||||
|
|
||||||
about:
|
about:
|
||||||
home: https://pypi.python.org/pypi/llvmlite/
|
home: https://pypi.python.org/pypi/llvmlite/
|
||||||
|
|
|
@ -3,7 +3,7 @@ Core device
|
||||||
|
|
||||||
The core device is a FPGA-based hardware component that contains a softcore CPU tightly coupled with the so-called RTIO core that provides precision timing. The CPU executes Python code that is statically compiled by the ARTIQ compiler, and communicates with the core device peripherals (TTL, DDS, etc.) over the RTIO core. This architecture provides high timing resolution, low latency, low jitter, high level programming capabilities, and good integration with the rest of the Python experiment code.
|
The core device is a FPGA-based hardware component that contains a softcore CPU tightly coupled with the so-called RTIO core that provides precision timing. The CPU executes Python code that is statically compiled by the ARTIQ compiler, and communicates with the core device peripherals (TTL, DDS, etc.) over the RTIO core. This architecture provides high timing resolution, low latency, low jitter, high level programming capabilities, and good integration with the rest of the Python experiment code.
|
||||||
|
|
||||||
While it is possible to use all the other parts of ARTIQ (controllers, master, GUI, result management, etc.) without a core device, many experiments require it.
|
While it is possible to use all the other parts of ARTIQ (controllers, master, GUI, dataset management, etc.) without a core device, many experiments require it.
|
||||||
|
|
||||||
|
|
||||||
.. _core-device-flash-storage:
|
.. _core-device-flash-storage:
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
The environment
|
The environment
|
||||||
===============
|
===============
|
||||||
|
|
||||||
Experiments interact with an environment that consists of devices, parameters, arguments and results. Access to the environment is handled by the class :class:`artiq.language.environment.EnvExperiment` that experiments should derive from.
|
Experiments interact with an environment that consists of devices, arguments and datasets. Access to the environment is handled by the class :class:`artiq.language.environment.EnvExperiment` that experiments should derive from.
|
||||||
|
|
||||||
.. _ddb:
|
.. _device-db:
|
||||||
|
|
||||||
The device database
|
The device database
|
||||||
-------------------
|
-------------------
|
||||||
|
@ -12,7 +12,7 @@ The device database contains information about the devices available in a ARTIQ
|
||||||
|
|
||||||
The master (or ``artiq_run``) instantiates the device drivers (and the RPC clients in the case of controllers) for the experiments based on the contents of the device database.
|
The master (or ``artiq_run``) instantiates the device drivers (and the RPC clients in the case of controllers) for the experiments based on the contents of the device database.
|
||||||
|
|
||||||
The device database is stored in the memory of the master and is backed by a PYON file typically called ``ddb.pyon``.
|
The device database is stored in the memory of the master and is backed by a PYON file typically called ``device_db.pyon``.
|
||||||
|
|
||||||
The device database is a Python dictionary whose keys are the device names, and values can have several types.
|
The device database is a Python dictionary whose keys are the device names, and values can have several types.
|
||||||
|
|
||||||
|
@ -33,11 +33,6 @@ Aliases
|
||||||
|
|
||||||
If an entry is a string, that string is used as a key for another lookup in the device database.
|
If an entry is a string, that string is used as a key for another lookup in the device database.
|
||||||
|
|
||||||
The parameter database
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
The parameter database is a key-value store that is global to all experiments. It is stored in the memory of the master and is backed by a PYON file typically called ``pdb.pyon``. It may be used to communicate values across experiments; for example, a periodic calibration experiment may update a parameter read by payload experiments.
|
|
||||||
|
|
||||||
Arguments
|
Arguments
|
||||||
---------
|
---------
|
||||||
|
|
||||||
|
@ -45,7 +40,14 @@ Arguments are values that parameterize the behavior of an experiment and are set
|
||||||
|
|
||||||
Requesting the values of arguments can only be done in the build phase of an experiment. The value requests are also used to define the GUI widgets shown in the explorer when the experiment is selected.
|
Requesting the values of arguments can only be done in the build phase of an experiment. The value requests are also used to define the GUI widgets shown in the explorer when the experiment is selected.
|
||||||
|
|
||||||
Results
|
|
||||||
-------
|
|
||||||
|
|
||||||
Results are the output of an experiment. They are archived after in the HDF5 format after the experiment is run. Experiments may define real-time results that are (additionally) distributed to all clients connected to the master; for example, the ARTIQ GUI may plot them while the experiment is in progress to give rapid feedback to the user. Real-time results are a global key-value store (similar to the parameter database); experiments should use distinctive real-time result names in order to avoid conflicts.
|
Datasets
|
||||||
|
--------
|
||||||
|
|
||||||
|
Datasets are values (possibly arrays) that are read and written by experiments and live in a key-value store.
|
||||||
|
|
||||||
|
A dataset may be broadcasted, that is, distributed to all clients connected to the master. For example, the ARTIQ GUI may plot it while the experiment is in progress to give rapid feedback to the user. Broadcasted datasets live in a global key-value store; experiments should use distinctive real-time result names in order to avoid conflicts. Broadcasted datasets may be used to communicate values across experiments; for example, a periodic calibration experiment may update a dataset read by payload experiments. Broadcasted datasets are replaced when a new dataset with the same key (name) is produced.
|
||||||
|
|
||||||
|
Broadcasted datasets may be persistent: the master stores them in a file typically called ``dataset_db.pyon`` so they are saved across master restarts.
|
||||||
|
|
||||||
|
Datasets produced by an experiment run may be archived in the HDF5 output for that run.
|
||||||
|
|
|
@ -22,7 +22,7 @@ As a very first step, we will turn on a LED on the core device. Create a file ``
|
||||||
|
|
||||||
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``setattr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``setattr_device`` like any other.
|
The central part of our code is our ``LED`` class, that derives from :class:`artiq.language.environment.EnvExperiment`. Among other features, ``EnvExperiment`` calls our ``build`` method and provides the ``setattr_device`` method that interfaces to the device database to create the appropriate device drivers and make those drivers accessible as ``self.core`` and ``self.led``. The ``@kernel`` decorator tells the system that the ``run`` method must be executed on the core device (instead of the host). The decorator uses ``self.core`` internally, which is why we request the core device using ``setattr_device`` like any other.
|
||||||
|
|
||||||
Copy the files ``ddb.pyon`` and ``pdb.pyon`` (containing the device and parameter databases) from the ``examples/master`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``-d`` and ``-p`` options of ``artiq_run``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``ddb.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`ddb` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform.
|
Copy the files ``device_db.pyon`` and ``dataset_db.pyon`` (containing the device and dataset databases) from the ``examples/master`` folder of ARTIQ into the same directory as ``led.py`` (alternatively, you can use the ``--device-db`` and ``--dataset-db`` options of ``artiq_run``). You can open the database files using a text editor - their contents are in a human-readable format. You will probably want to set the IP address of the core device in ``device_db.pyon`` so that the computer can connect to it (it is the ``host`` parameter of the ``comm`` entry). See :ref:`device-db` for more information. The example device database is designed for the NIST QC1 hardware on the KC705; see :ref:`board-ports` for RTIO channel assignments if you need to adapt the device database to a different hardware platform.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
If the ``led`` device is a bidirectional TTL (i.e. ``TTLInOut`` instead of ``TTLOut``), you need to put it in output (driving) mode. Add the following at the beginning of ``run``: ::
|
If the ``led`` device is a bidirectional TTL (i.e. ``TTLInOut`` instead of ``TTLOut``), you need to put it in output (driving) mode. Add the following at the beginning of ``run``: ::
|
||||||
|
|
|
@ -10,7 +10,7 @@ Starting your first experiment with the master
|
||||||
|
|
||||||
In the previous tutorial, we used the ``artiq_run`` utility to execute our experiments, which is a simple stand-alone tool that bypasses the ARTIQ management system. We will now see how to run an experiment using the master (the central program in the management system that schedules and executes experiments) and the GUI client (that connects to the master and controls it).
|
In the previous tutorial, we used the ``artiq_run`` utility to execute our experiments, which is a simple stand-alone tool that bypasses the ARTIQ management system. We will now see how to run an experiment using the master (the central program in the management system that schedules and executes experiments) and the GUI client (that connects to the master and controls it).
|
||||||
|
|
||||||
First, create a folder ``~/artiq-master`` and copy the ``ddb.pyon`` and ``pdb.pyon`` files (device and parameter databases) found in the ``examples/master`` directory from the ARTIQ sources. The master uses those files in the same way as ``artiq_run``.
|
First, create a folder ``~/artiq-master`` and copy the ``device_db.pyon`` and ``dataset_db.pyon`` (containing the device and dataset databases) found in the ``examples/master`` directory from the ARTIQ sources. The master uses those files in the same way as ``artiq_run``.
|
||||||
|
|
||||||
Then create a ``~/artiq-master/repository`` sub-folder to contain experiments. The master scans this ``repository`` folder to determine what experiments are available (the name of the folder can be changed using ``-r``).
|
Then create a ``~/artiq-master/repository`` sub-folder to contain experiments. The master scans this ``repository`` folder to determine what experiments are available (the name of the folder can be changed using ``-r``).
|
||||||
|
|
||||||
|
@ -136,20 +136,20 @@ The master should now run the new version from its repository.
|
||||||
|
|
||||||
As an exercise, add another argument to the experiment, commit and push the result, and verify that the new control is added in the GUI.
|
As an exercise, add another argument to the experiment, commit and push the result, and verify that the new control is added in the GUI.
|
||||||
|
|
||||||
Results
|
Datasets
|
||||||
-------
|
--------
|
||||||
|
|
||||||
Modify the ``run()`` method of the experiment as follows: ::
|
Modify the ``run()`` method of the experiment as follows: ::
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
parabola = self.set_result("parabola", [], realtime=True)
|
parabola = self.set_dataset("parabola", [], broadcast=True)
|
||||||
for i in range(int(self.count)):
|
for i in range(int(self.count)):
|
||||||
parabola.append(i*i)
|
parabola.append(i*i)
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
|
|
||||||
.. note:: You need to import the ``time`` module.
|
.. note:: You need to import the ``time`` module.
|
||||||
|
|
||||||
Commit, push and submit the experiment as before. While it is running, go to the "Results" dock of the GUI and create a new XY plot showing the new result. Observe how the points are added one by one to the plot.
|
Commit, push and submit the experiment as before. While it is running, go to the "Datasets" dock of the GUI and create a new XY plot showing the new result. Observe how the points are added one by one to the plot.
|
||||||
|
|
||||||
After the experiment has finished executing, the results are written to a HDF5 file that resides in ``~/artiq-master/results/<date>/<time>``. Open that file with HDFView or h5dump, and observe the data we just generated as well as the Git commit ID of the experiment (a hexadecimal hash such as ``947acb1f90ae1b8862efb489a9cc29f7d4e0c645`` that represents the data at a particular time in the Git repository). The list of Git commit IDs can be found using the ``git log`` command in ``~/artiq-work``.
|
After the experiment has finished executing, the results are written to a HDF5 file that resides in ``~/artiq-master/results/<date>/<time>``. Open that file with HDFView or h5dump, and observe the data we just generated as well as the Git commit ID of the experiment (a hexadecimal hash such as ``947acb1f90ae1b8862efb489a9cc29f7d4e0c645`` that represents the data at a particular time in the Git repository). The list of Git commit IDs can be found using the ``git log`` command in ``~/artiq-work``.
|
||||||
|
|
||||||
|
|
|
@ -104,9 +104,7 @@ The artiq_coretool utility allows to perform maintenance on the core device:
|
||||||
* as well as read, write and remove key-value records from the :ref:`core-device-flash-storage`;
|
* as well as read, write and remove key-value records from the :ref:`core-device-flash-storage`;
|
||||||
* erase the entire flash storage area.
|
* erase the entire flash storage area.
|
||||||
|
|
||||||
To use this tool, you need to specify a ``ddb.pyon`` DDB file which contains a ``comm`` device (an example is provided in ``artiq/examples/master/ddb.pyon``).
|
To use this tool, you need to specify a ``device_db.pyon`` device database file which contains a ``comm`` device (an example is provided in ``artiq/examples/master/device_db.pyon``). This tells the tool how to connect to the core device (via serial or via TCP) and with which parameters (baudrate, serial device, IP address, TCP port). When not specified, the artiq_coretool utility will assume that there is a file named ``device_db.pyon`` in the current directory.
|
||||||
This tells the tool how to connect to the core device (via serial or via TCP) and with which parameters (baudrate, serial device, IP address, TCP port).
|
|
||||||
When not specified, the artiq_coretool utility will assume that there is a file named ``ddb.pyon`` in the current directory.
|
|
||||||
|
|
||||||
|
|
||||||
To read the record whose key is ``mac``::
|
To read the record whose key is ``mac``::
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
from artiq import *
|
from artiq import *
|
||||||
|
|
||||||
|
|
||||||
|
@ -51,6 +53,12 @@ class ArgumentsDemo(EnvExperiment):
|
||||||
self.sc2 = SubComponent2(parent=self)
|
self.sc2 = SubComponent2(parent=self)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
logging.error("logging test: error")
|
||||||
|
logging.warning("logging test: warning")
|
||||||
|
logging.info("logging test: info")
|
||||||
|
logging.debug("logging test: debug")
|
||||||
|
|
||||||
print(self.free_value)
|
print(self.free_value)
|
||||||
print(self.boolean)
|
print(self.boolean)
|
||||||
print(self.enum)
|
print(self.enum)
|
||||||
|
|
|
@ -101,7 +101,7 @@ class SpeedBenchmark(EnvExperiment):
|
||||||
self.scheduler.priority, None, False)
|
self.scheduler.priority, None, False)
|
||||||
|
|
||||||
def run_without_scheduler(self, pause):
|
def run_without_scheduler(self, pause):
|
||||||
payload = globals()["_Payload" + self.payload](*self.dbs())
|
payload = globals()["_Payload" + self.payload](*self.managers())
|
||||||
|
|
||||||
start_time = time.monotonic()
|
start_time = time.monotonic()
|
||||||
for i in range(int(self.nruns)):
|
for i in range(int(self.nruns)):
|
||||||
|
|
Loading…
Reference in New Issue