forked from M-Labs/artiq
master: reorganize result output code
This commit is contained in:
parent
1aec6a0462
commit
965dd89b61
|
@ -9,7 +9,7 @@ from artiq.protocols.pc_rpc import Server
|
|||
from artiq.protocols.sync_struct import Publisher
|
||||
from artiq.protocols.file_db import FlatFileDB, SimpleHistory
|
||||
from artiq.master.scheduler import Scheduler
|
||||
from artiq.master.rt_results import RTResults
|
||||
from artiq.master.results import RTResults
|
||||
from artiq.master.repository import Repository
|
||||
from artiq.tools import verbosity_args, init_logger
|
||||
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
import os
|
||||
import time
|
||||
|
||||
import numpy
|
||||
import h5py
|
||||
|
||||
from artiq.protocols.sync_struct import Notifier, process_mod
|
||||
|
||||
|
||||
def get_hdf5_output(start_time, rid, name):
|
||||
dirname = os.path.join("results",
|
||||
time.strftime("%y-%m-%d", start_time),
|
||||
time.strftime("%H-%M", start_time))
|
||||
filename = "{:09}-{}.h5".format(rid, name)
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
return h5py.File(os.path.join(dirname, filename), "w")
|
||||
|
||||
|
||||
_type_to_hdf5 = {
|
||||
int: h5py.h5t.STD_I64BE,
|
||||
float: h5py.h5t.IEEE_F64BE
|
||||
}
|
||||
|
||||
def result_dict_to_hdf5(f, rd):
|
||||
for name, data in rd.items():
|
||||
if isinstance(data, list):
|
||||
el_ty = type(data[0])
|
||||
for d in data:
|
||||
if type(d) != el_ty:
|
||||
raise TypeError("All list elements must have the same"
|
||||
" type for HDF5 output")
|
||||
try:
|
||||
el_ty_h5 = _type_to_hdf5[el_ty]
|
||||
except KeyError:
|
||||
raise TypeError("List element type {} is not supported for"
|
||||
" HDF5 output".format(el_ty))
|
||||
dataset = f.create_dataset(name, (len(data), ), el_ty_h5)
|
||||
dataset[:] = data
|
||||
elif isinstance(data, numpy.ndarray):
|
||||
f.create_dataset(name, data=data)
|
||||
else:
|
||||
ty = type(data)
|
||||
try:
|
||||
ty_h5 = _type_to_hdf5[ty]
|
||||
except KeyError:
|
||||
raise TypeError("Type {} is not supported for HDF5 output"
|
||||
.format(ty))
|
||||
dataset = f.create_dataset(name, (), ty_h5)
|
||||
dataset[()] = data
|
||||
|
||||
|
||||
class RTResults:
|
||||
def __init__(self):
|
||||
self.groups = Notifier(dict())
|
||||
self.current_group = "default"
|
||||
|
||||
def init(self, description):
|
||||
data = dict()
|
||||
for rtr in description.keys():
|
||||
if isinstance(rtr, tuple):
|
||||
for e in rtr:
|
||||
data[e] = []
|
||||
else:
|
||||
data[rtr] = []
|
||||
self.groups[self.current_group] = {
|
||||
"description": description,
|
||||
"data": data
|
||||
}
|
||||
|
||||
def update(self, mod):
|
||||
target = self.groups[self.current_group]["data"]
|
||||
process_mod(target, mod)
|
|
@ -1,24 +0,0 @@
|
|||
from artiq.protocols.sync_struct import Notifier, process_mod
|
||||
|
||||
|
||||
class RTResults:
|
||||
def __init__(self):
|
||||
self.groups = Notifier(dict())
|
||||
self.current_group = "default"
|
||||
|
||||
def init(self, description):
|
||||
data = dict()
|
||||
for rtr in description.keys():
|
||||
if isinstance(rtr, tuple):
|
||||
for e in rtr:
|
||||
data[e] = []
|
||||
else:
|
||||
data[rtr] = []
|
||||
self.groups[self.current_group] = {
|
||||
"description": description,
|
||||
"data": data
|
||||
}
|
||||
|
||||
def update(self, mod):
|
||||
target = self.groups[self.current_group]["data"]
|
||||
process_mod(target, mod)
|
|
@ -1,44 +1,9 @@
|
|||
from collections import OrderedDict
|
||||
import importlib
|
||||
|
||||
import numpy
|
||||
import h5py
|
||||
|
||||
from artiq.protocols.sync_struct import Notifier
|
||||
from artiq.protocols.pc_rpc import Client, BestEffortClient
|
||||
|
||||
|
||||
_type_to_hdf5 = {
|
||||
int: h5py.h5t.STD_I64BE,
|
||||
float: h5py.h5t.IEEE_F64BE
|
||||
}
|
||||
|
||||
def _result_dict_to_hdf5(f, rd):
|
||||
for name, data in rd.items():
|
||||
if isinstance(data, list):
|
||||
el_ty = type(data[0])
|
||||
for d in data:
|
||||
if type(d) != el_ty:
|
||||
raise TypeError("All list elements must have the same"
|
||||
" type for HDF5 output")
|
||||
try:
|
||||
el_ty_h5 = _type_to_hdf5[el_ty]
|
||||
except KeyError:
|
||||
raise TypeError("List element type {} is not supported for"
|
||||
" HDF5 output".format(el_ty))
|
||||
dataset = f.create_dataset(name, (len(data), ), el_ty_h5)
|
||||
dataset[:] = data
|
||||
elif isinstance(data, numpy.ndarray):
|
||||
f.create_dataset(name, data=data)
|
||||
else:
|
||||
ty = type(data)
|
||||
try:
|
||||
ty_h5 = _type_to_hdf5[ty]
|
||||
except KeyError:
|
||||
raise TypeError("Type {} is not supported for HDF5 output"
|
||||
.format(ty))
|
||||
dataset = f.create_dataset(name, (), ty_h5)
|
||||
dataset[()] = data
|
||||
from artiq.master.results import result_dict_to_hdf5
|
||||
|
||||
|
||||
class ResultDB:
|
||||
|
@ -68,8 +33,8 @@ class ResultDB:
|
|||
self.data[name] = value
|
||||
|
||||
def write_hdf5(self, f):
|
||||
_result_dict_to_hdf5(f, self.realtime_data.read)
|
||||
_result_dict_to_hdf5(f, self.data.read)
|
||||
result_dict_to_hdf5(f, self.realtime_data.read)
|
||||
result_dict_to_hdf5(f, self.data.read)
|
||||
|
||||
|
||||
def _create_device(desc, dbh):
|
|
@ -1,14 +1,12 @@
|
|||
import sys
|
||||
import os
|
||||
import time
|
||||
from inspect import isclass
|
||||
import traceback
|
||||
|
||||
import h5py
|
||||
|
||||
from artiq.protocols import pyon
|
||||
from artiq.tools import file_import
|
||||
from artiq.master.db import DBHub, ResultDB
|
||||
from artiq.master.worker_db import DBHub, ResultDB
|
||||
from artiq.master.results import get_hdf5_output
|
||||
|
||||
|
||||
def get_object():
|
||||
|
@ -118,12 +116,7 @@ def run(rid, run_params):
|
|||
finally:
|
||||
dbh.close()
|
||||
|
||||
dirname = os.path.join("results",
|
||||
time.strftime("%y-%m-%d", start_time),
|
||||
time.strftime("%H-%M", start_time))
|
||||
filename = "{:09}-{}.h5".format(rid, unit.__name__)
|
||||
os.makedirs(dirname, exist_ok=True)
|
||||
f = h5py.File(os.path.join(dirname, filename), "w")
|
||||
f = get_hdf5_output(start_time, rid, unit.__name__)
|
||||
try:
|
||||
rdb.write_hdf5(f)
|
||||
finally:
|
||||
|
|
Loading…
Reference in New Issue