2018-03-09 05:40:05 +08:00
|
|
|
"""Client-side interfaces to the master databases (devices, datasets).
|
|
|
|
|
|
|
|
These artefacts are intended for out-of-process use (i.e. from workers or the
|
|
|
|
standalone command line tools).
|
|
|
|
"""
|
|
|
|
|
2016-06-12 12:56:12 +08:00
|
|
|
from operator import setitem
|
2015-01-12 18:51:23 +08:00
|
|
|
import importlib
|
2015-04-05 17:49:41 +08:00
|
|
|
import logging
|
2015-07-14 04:08:20 +08:00
|
|
|
|
2019-11-10 15:55:17 +08:00
|
|
|
from sipyco.sync_struct import Notifier
|
|
|
|
from sipyco.pc_rpc import AutoTarget, Client, BestEffortClient
|
2015-02-04 18:37:57 +08:00
|
|
|
|
|
|
|
|
2015-04-05 18:02:07 +08:00
|
|
|
logger = logging.getLogger(__name__)
|
2015-04-05 17:49:41 +08:00
|
|
|
|
|
|
|
|
2016-07-08 07:23:28 +08:00
|
|
|
class DummyDevice:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-10-12 17:18:23 +08:00
|
|
|
def _create_device(desc, device_mgr):
|
|
|
|
ty = desc["type"]
|
|
|
|
if ty == "local":
|
|
|
|
module = importlib.import_module(desc["module"])
|
|
|
|
device_class = getattr(module, desc["class"])
|
2016-03-05 00:17:08 +08:00
|
|
|
return device_class(device_mgr, **desc.get("arguments", {}))
|
2015-10-12 17:18:23 +08:00
|
|
|
elif ty == "controller":
|
2015-10-18 14:37:08 +08:00
|
|
|
if desc.get("best_effort", False):
|
|
|
|
cls = BestEffortClient
|
2015-10-12 17:18:23 +08:00
|
|
|
else:
|
2015-10-18 14:35:49 +08:00
|
|
|
cls = Client
|
|
|
|
# Automatic target can be specified either by the absence of
|
|
|
|
# the target_name parameter, or a None value.
|
|
|
|
target_name = desc.get("target_name", None)
|
|
|
|
if target_name is None:
|
|
|
|
target_name = AutoTarget
|
|
|
|
return cls(desc["host"], desc["port"], target_name)
|
2016-03-24 00:46:39 +08:00
|
|
|
elif ty == "controller_aux_target":
|
|
|
|
controller = device_mgr.get_desc(desc["controller"])
|
|
|
|
if desc.get("best_effort", controller.get("best_effort", False)):
|
|
|
|
cls = BestEffortClient
|
|
|
|
else:
|
|
|
|
cls = Client
|
|
|
|
return cls(controller["host"], controller["port"], desc["target_name"])
|
2016-07-08 07:23:28 +08:00
|
|
|
elif ty == "dummy":
|
|
|
|
return DummyDevice()
|
2015-10-12 17:18:23 +08:00
|
|
|
else:
|
|
|
|
raise ValueError("Unsupported type in device DB: " + ty)
|
|
|
|
|
|
|
|
|
2016-04-16 19:55:08 +08:00
|
|
|
class DeviceError(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2015-10-12 17:18:23 +08:00
|
|
|
class DeviceManager:
|
|
|
|
"""Handles creation and destruction of local device drivers and controller
|
|
|
|
RPC clients."""
|
2016-06-12 00:11:07 +08:00
|
|
|
def __init__(self, ddb, virtual_devices=dict()):
|
2015-10-12 17:18:23 +08:00
|
|
|
self.ddb = ddb
|
|
|
|
self.virtual_devices = virtual_devices
|
2019-11-14 16:22:45 +08:00
|
|
|
self.active_devices = []
|
2015-10-12 17:18:23 +08:00
|
|
|
|
2015-10-12 19:46:31 +08:00
|
|
|
def get_device_db(self):
|
2015-10-12 17:18:23 +08:00
|
|
|
"""Returns the full contents of the device database."""
|
2015-10-12 19:46:31 +08:00
|
|
|
return self.ddb.get_device_db()
|
2015-10-12 17:18:23 +08:00
|
|
|
|
2016-03-24 00:46:39 +08:00
|
|
|
def get_desc(self, name):
|
2019-11-14 16:22:45 +08:00
|
|
|
return self.ddb.get(name, resolve_alias=True)
|
2016-03-24 00:46:39 +08:00
|
|
|
|
2015-10-12 17:18:23 +08:00
|
|
|
def get(self, name):
|
|
|
|
"""Get the device driver or controller client corresponding to a
|
|
|
|
device database entry."""
|
|
|
|
if name in self.virtual_devices:
|
|
|
|
return self.virtual_devices[name]
|
2019-11-14 16:22:45 +08:00
|
|
|
|
|
|
|
try:
|
|
|
|
desc = self.get_desc(name)
|
|
|
|
except Exception as e:
|
|
|
|
raise DeviceError("Failed to get description of device '{}'"
|
|
|
|
.format(name)) from e
|
|
|
|
|
|
|
|
for existing_desc, existing_dev in self.active_devices:
|
|
|
|
if desc == existing_desc:
|
|
|
|
return existing_dev
|
|
|
|
|
|
|
|
try:
|
|
|
|
dev = _create_device(desc, self)
|
|
|
|
except Exception as e:
|
|
|
|
raise DeviceError("Failed to create device '{}'"
|
|
|
|
.format(name)) from e
|
|
|
|
self.active_devices.append((desc, dev))
|
|
|
|
return dev
|
2015-10-12 17:18:23 +08:00
|
|
|
|
|
|
|
def close_devices(self):
|
|
|
|
"""Closes all active devices, in the opposite order as they were
|
|
|
|
requested."""
|
2019-11-14 16:22:45 +08:00
|
|
|
for _desc, dev in reversed(self.active_devices):
|
2015-10-12 17:18:23 +08:00
|
|
|
try:
|
|
|
|
if isinstance(dev, (Client, BestEffortClient)):
|
|
|
|
dev.close_rpc()
|
2016-06-12 00:11:07 +08:00
|
|
|
elif hasattr(dev, "close"):
|
2015-10-12 17:18:23 +08:00
|
|
|
dev.close()
|
2016-06-12 00:11:07 +08:00
|
|
|
except Exception as e:
|
|
|
|
logger.warning("Exception %r when closing device %r", e, dev)
|
2015-10-12 17:18:23 +08:00
|
|
|
self.active_devices.clear()
|
|
|
|
|
|
|
|
|
|
|
|
class DatasetManager:
|
|
|
|
def __init__(self, ddb):
|
2018-03-09 02:58:38 +08:00
|
|
|
self._broadcaster = Notifier(dict())
|
2015-10-12 17:18:23 +08:00
|
|
|
self.local = dict()
|
2016-10-18 17:08:36 +08:00
|
|
|
self.archive = dict()
|
2015-10-12 17:18:23 +08:00
|
|
|
|
|
|
|
self.ddb = ddb
|
2018-03-09 02:58:38 +08:00
|
|
|
self._broadcaster.publish = ddb.update
|
2015-10-12 17:18:23 +08:00
|
|
|
|
2022-01-25 10:02:15 +08:00
|
|
|
def set(self, key, value, broadcast=False, persist=False, archive=True):
|
2015-10-12 17:18:23 +08:00
|
|
|
if persist:
|
|
|
|
broadcast = True
|
2018-03-09 02:58:38 +08:00
|
|
|
|
2015-10-12 17:18:23 +08:00
|
|
|
if broadcast:
|
2022-01-25 10:02:15 +08:00
|
|
|
self._broadcaster[key] = persist, value
|
2018-03-09 02:58:38 +08:00
|
|
|
elif key in self._broadcaster.raw_view:
|
|
|
|
del self._broadcaster[key]
|
|
|
|
|
2018-10-21 12:08:34 +08:00
|
|
|
if archive:
|
2022-01-25 10:02:15 +08:00
|
|
|
self.local[key] = value
|
2016-10-18 17:10:18 +08:00
|
|
|
elif key in self.local:
|
|
|
|
del self.local[key]
|
2016-03-29 16:26:14 +08:00
|
|
|
|
2018-06-25 17:52:25 +08:00
|
|
|
def _get_mutation_target(self, key):
|
|
|
|
target = self.local.get(key, None)
|
2018-03-09 02:58:38 +08:00
|
|
|
if key in self._broadcaster.raw_view:
|
2016-10-18 14:54:59 +08:00
|
|
|
if target is not None:
|
2022-01-25 10:02:15 +08:00
|
|
|
assert target is self._broadcaster.raw_view[key][1]
|
|
|
|
return self._broadcaster[key][1]
|
2016-03-29 16:26:14 +08:00
|
|
|
if target is None:
|
2018-06-25 17:52:25 +08:00
|
|
|
raise KeyError("Cannot mutate nonexistent dataset '{}'".format(key))
|
2022-01-25 10:02:15 +08:00
|
|
|
return target
|
2016-06-12 12:56:12 +08:00
|
|
|
|
2018-06-25 17:52:25 +08:00
|
|
|
def mutate(self, key, index, value):
|
|
|
|
target = self._get_mutation_target(key)
|
2016-06-12 12:56:12 +08:00
|
|
|
if isinstance(index, tuple):
|
|
|
|
if isinstance(index[0], tuple):
|
|
|
|
index = tuple(slice(*e) for e in index)
|
|
|
|
else:
|
|
|
|
index = slice(*index)
|
|
|
|
setitem(target, index, value)
|
2015-07-14 04:08:20 +08:00
|
|
|
|
2018-06-25 17:52:25 +08:00
|
|
|
def append_to(self, key, value):
|
|
|
|
self._get_mutation_target(key).append(value)
|
|
|
|
|
2016-10-19 20:12:16 +08:00
|
|
|
def get(self, key, archive=False):
|
2016-03-29 16:26:14 +08:00
|
|
|
if key in self.local:
|
2022-01-25 10:02:15 +08:00
|
|
|
return self.local[key]
|
|
|
|
|
|
|
|
data = self.ddb.get(key)
|
2018-03-09 02:58:38 +08:00
|
|
|
if archive:
|
|
|
|
if key in self.archive:
|
|
|
|
logger.warning("Dataset '%s' is already in archive, "
|
|
|
|
"overwriting", key, stack_info=True)
|
2022-01-25 10:02:15 +08:00
|
|
|
self.archive[key] = data
|
|
|
|
return data
|
2015-08-06 18:43:27 +08:00
|
|
|
|
2015-02-04 18:37:57 +08:00
|
|
|
def write_hdf5(self, f):
|
2016-10-18 17:08:36 +08:00
|
|
|
datasets_group = f.create_group("datasets")
|
2016-04-05 15:26:48 +08:00
|
|
|
for k, v in self.local.items():
|
2018-06-25 17:50:30 +08:00
|
|
|
_write(datasets_group, k, v)
|
2018-03-09 02:58:38 +08:00
|
|
|
|
2016-10-18 17:08:36 +08:00
|
|
|
archive_group = f.create_group("archive")
|
|
|
|
for k, v in self.archive.items():
|
2018-06-25 17:50:30 +08:00
|
|
|
_write(archive_group, k, v)
|
|
|
|
|
|
|
|
|
|
|
|
def _write(group, k, v):
|
|
|
|
# Add context to exception message when the user writes a dataset that is
|
|
|
|
# not representable in HDF5.
|
|
|
|
try:
|
2022-01-25 10:02:15 +08:00
|
|
|
group[k] = v
|
2018-06-25 17:50:30 +08:00
|
|
|
except TypeError as e:
|
|
|
|
raise TypeError("Error writing dataset '{}' of type '{}': {}".format(
|
2022-01-25 10:02:15 +08:00
|
|
|
k, type(v), e))
|