2015-11-07 18:39:39 +08:00
|
|
|
#!/usr/bin/env python3.5
|
2015-08-17 15:44:40 +08:00
|
|
|
|
|
|
|
import argparse
|
|
|
|
import logging
|
|
|
|
import asyncio
|
|
|
|
import atexit
|
|
|
|
import fnmatch
|
|
|
|
from functools import partial
|
|
|
|
|
2015-08-18 13:34:42 +08:00
|
|
|
import numpy as np
|
2015-08-17 15:44:40 +08:00
|
|
|
import aiohttp
|
|
|
|
|
2015-11-11 16:22:12 +08:00
|
|
|
from artiq.tools import *
|
2015-08-17 15:44:40 +08:00
|
|
|
from artiq.protocols.sync_struct import Subscriber
|
|
|
|
from artiq.protocols.pc_rpc import Server
|
|
|
|
from artiq.protocols import pyon
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
def get_argparser():
|
|
|
|
parser = argparse.ArgumentParser(
|
2016-03-31 00:28:28 +08:00
|
|
|
description="ARTIQ data to InfluxDB bridge",
|
|
|
|
epilog="Pattern matching works as follows. "
|
|
|
|
"The default action on a key (dataset name) is to log it. "
|
|
|
|
"Then the patterns are traversed in order and glob-matched "
|
|
|
|
"with the key. "
|
|
|
|
"Optional + and - pattern prefixes specify whether to ignore or "
|
|
|
|
"log keys matching the rest of the pattern. "
|
|
|
|
"Default (in the absence of prefix) is to ignore. Last matched "
|
|
|
|
"pattern takes precedence.")
|
2015-08-17 15:44:40 +08:00
|
|
|
group = parser.add_argument_group("master")
|
|
|
|
group.add_argument(
|
|
|
|
"--server-master", default="::1",
|
|
|
|
help="hostname or IP of the master to connect to")
|
|
|
|
group.add_argument(
|
|
|
|
"--port-master", default=3250, type=int,
|
|
|
|
help="TCP port to use to connect to the master")
|
|
|
|
group.add_argument(
|
|
|
|
"--retry-master", default=5.0, type=float,
|
|
|
|
help="retry timer for reconnecting to master")
|
|
|
|
group = parser.add_argument_group("database")
|
|
|
|
group.add_argument(
|
|
|
|
"--baseurl-db", default="http://localhost:8086",
|
|
|
|
help="base URL to access InfluxDB (default: %(default)s)")
|
|
|
|
group.add_argument(
|
|
|
|
"--user-db", default="", help="InfluxDB username")
|
|
|
|
group.add_argument(
|
|
|
|
"--password-db", default="", help="InfluxDB password")
|
|
|
|
group.add_argument(
|
|
|
|
"--database", default="db", help="database name to use")
|
|
|
|
group.add_argument(
|
|
|
|
"--table", default="lab", help="table name to use")
|
|
|
|
group = parser.add_argument_group("filter")
|
|
|
|
group.add_argument(
|
2016-03-31 00:28:28 +08:00
|
|
|
"--pattern-file", default="influxdb_patterns.cfg",
|
|
|
|
help="file to load the patterns from (default: %(default)s). "
|
|
|
|
"If the file is not found, no patterns are loaded "
|
|
|
|
"(everything is logged).")
|
2015-12-27 18:03:13 +08:00
|
|
|
simple_network_args(parser, [("control", "control", 3248)])
|
2015-08-17 15:44:40 +08:00
|
|
|
verbosity_args(parser)
|
|
|
|
return parser
|
|
|
|
|
|
|
|
|
2015-08-18 14:49:23 +08:00
|
|
|
def influxdb_str(s):
|
|
|
|
return '"' + s.replace('"', '\\"') + '"'
|
|
|
|
|
|
|
|
|
|
|
|
def format_influxdb(v):
|
|
|
|
if isinstance(v, bool):
|
|
|
|
if v:
|
|
|
|
return "bool", "t"
|
|
|
|
else:
|
|
|
|
return "bool", "f"
|
|
|
|
elif np.issubdtype(type(v), int):
|
|
|
|
return "int", "{}i".format(v)
|
|
|
|
elif np.issubdtype(type(v), float):
|
|
|
|
return "float", "{}".format(v)
|
|
|
|
elif isinstance(v, str):
|
|
|
|
return "str", influxdb_str(v)
|
|
|
|
else:
|
|
|
|
return "pyon", influxdb_str(pyon.encode(v))
|
|
|
|
|
|
|
|
|
2015-08-17 15:44:40 +08:00
|
|
|
class DBWriter(TaskObject):
|
|
|
|
def __init__(self, base_url, user, password, database, table):
|
|
|
|
self.base_url = base_url
|
|
|
|
self.user = user
|
|
|
|
self.password = password
|
|
|
|
self.database = database
|
|
|
|
self.table = table
|
|
|
|
|
|
|
|
self._queue = asyncio.Queue(100)
|
|
|
|
|
|
|
|
def update(self, k, v):
|
|
|
|
try:
|
|
|
|
self._queue.put_nowait((k, v))
|
|
|
|
except asyncio.QueueFull:
|
2015-10-12 17:18:23 +08:00
|
|
|
logger.warning("failed to update dataset '%s': "
|
2015-08-17 15:44:40 +08:00
|
|
|
"too many pending updates", k)
|
|
|
|
|
2015-10-03 19:28:57 +08:00
|
|
|
async def _do(self):
|
2015-08-17 15:44:40 +08:00
|
|
|
while True:
|
2015-10-03 19:28:57 +08:00
|
|
|
k, v = await self._queue.get()
|
2015-08-17 15:44:40 +08:00
|
|
|
url = self.base_url + "/write"
|
|
|
|
params = {"u": self.user, "p": self.password, "db": self.database,
|
2016-04-24 18:38:47 +08:00
|
|
|
"precision": "ms"}
|
2015-08-18 14:49:23 +08:00
|
|
|
fmt_ty, fmt_v = format_influxdb(v)
|
2015-10-12 17:18:23 +08:00
|
|
|
data = "{},dataset={} {}={}".format(self.table, k, fmt_ty, fmt_v)
|
2015-08-17 15:44:40 +08:00
|
|
|
try:
|
2015-10-03 19:28:57 +08:00
|
|
|
response = await aiohttp.request(
|
2015-08-17 15:44:40 +08:00
|
|
|
"POST", url, params=params, data=data)
|
|
|
|
except:
|
|
|
|
logger.warning("got exception trying to update '%s'",
|
|
|
|
k, exc_info=True)
|
|
|
|
else:
|
|
|
|
if response.status not in (200, 204):
|
2016-04-24 18:35:30 +08:00
|
|
|
content = (await response.content.read()).decode().strip()
|
2015-08-18 13:34:15 +08:00
|
|
|
logger.warning("got HTTP status %d "
|
|
|
|
"trying to update '%s': %s",
|
|
|
|
response.status, k, content)
|
2015-08-17 15:44:40 +08:00
|
|
|
response.close()
|
|
|
|
|
|
|
|
|
2015-10-12 17:18:23 +08:00
|
|
|
class _Mock:
|
|
|
|
def __setitem__(self, k, v):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def __getitem__(self, k):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __delitem__(self, k):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class Datasets:
|
2015-08-17 15:44:40 +08:00
|
|
|
def __init__(self, filter_function, writer, init):
|
|
|
|
self.filter_function = filter_function
|
|
|
|
self.writer = writer
|
|
|
|
|
|
|
|
def __setitem__(self, k, v):
|
2015-08-18 14:49:23 +08:00
|
|
|
if self.filter_function(k):
|
2015-10-12 17:18:23 +08:00
|
|
|
self.writer.update(k, v[1])
|
|
|
|
|
|
|
|
# ignore mutations
|
|
|
|
def __getitem__(self, k):
|
|
|
|
return _Mock()
|
2015-08-17 15:44:40 +08:00
|
|
|
|
2015-10-12 17:18:23 +08:00
|
|
|
# ignore deletions
|
2015-08-18 13:35:05 +08:00
|
|
|
def __delitem__(self, k):
|
|
|
|
pass
|
|
|
|
|
2015-08-17 15:44:40 +08:00
|
|
|
|
|
|
|
class MasterReader(TaskObject):
|
|
|
|
def __init__(self, server, port, retry, filter_function, writer):
|
|
|
|
self.server = server
|
|
|
|
self.port = port
|
|
|
|
self.retry = retry
|
|
|
|
|
|
|
|
self.filter_function = filter_function
|
|
|
|
self.writer = writer
|
|
|
|
|
2015-10-03 19:28:57 +08:00
|
|
|
async def _do(self):
|
2015-08-17 15:44:40 +08:00
|
|
|
subscriber = Subscriber(
|
2015-10-12 17:18:23 +08:00
|
|
|
"datasets",
|
|
|
|
partial(Datasets, self.filter_function, self.writer))
|
2015-08-17 15:44:40 +08:00
|
|
|
while True:
|
|
|
|
try:
|
2015-10-03 19:28:57 +08:00
|
|
|
await subscriber.connect(self.server, self.port)
|
2015-08-17 15:44:40 +08:00
|
|
|
try:
|
2015-10-03 19:28:57 +08:00
|
|
|
await asyncio.wait_for(subscriber.receive_task, None)
|
2015-08-17 15:44:40 +08:00
|
|
|
finally:
|
2015-10-03 19:28:57 +08:00
|
|
|
await subscriber.close()
|
2015-08-17 15:44:40 +08:00
|
|
|
except (ConnectionAbortedError, ConnectionError,
|
|
|
|
ConnectionRefusedError, ConnectionResetError) as e:
|
|
|
|
logger.warning("Connection to master failed (%s: %s)",
|
|
|
|
e.__class__.__name__, str(e))
|
|
|
|
else:
|
|
|
|
logger.warning("Connection to master lost")
|
|
|
|
logger.warning("Retrying in %.1f seconds", self.retry)
|
2015-10-03 19:28:57 +08:00
|
|
|
await asyncio.sleep(self.retry)
|
2015-08-17 15:44:40 +08:00
|
|
|
|
|
|
|
|
|
|
|
class Filter:
|
2015-08-18 09:59:48 +08:00
|
|
|
def __init__(self, pattern_file):
|
|
|
|
self.pattern_file = pattern_file
|
2016-03-31 00:28:28 +08:00
|
|
|
self.scan_patterns()
|
|
|
|
|
|
|
|
def scan_patterns(self):
|
|
|
|
"""(Re)load the patterns file."""
|
2015-08-17 15:44:40 +08:00
|
|
|
try:
|
2016-03-31 00:28:28 +08:00
|
|
|
with open(self.pattern_file, "r") as f:
|
|
|
|
self.patterns = []
|
|
|
|
for line in f:
|
|
|
|
line = line.rstrip()
|
|
|
|
if line:
|
|
|
|
self.patterns.append(line)
|
2015-08-17 15:44:40 +08:00
|
|
|
except FileNotFoundError:
|
2015-08-18 09:59:48 +08:00
|
|
|
logger.info("no pattern file found, logging everything")
|
2016-03-31 00:28:28 +08:00
|
|
|
self.patterns = []
|
2015-08-17 15:44:40 +08:00
|
|
|
|
2015-08-18 09:59:48 +08:00
|
|
|
# Privatize so that it is not shown in artiq_rpctool list-methods.
|
2015-08-17 15:44:40 +08:00
|
|
|
def _filter(self, k):
|
2015-08-18 09:59:48 +08:00
|
|
|
take = "+"
|
|
|
|
for pattern in self.patterns:
|
|
|
|
sign = "-"
|
|
|
|
if pattern[0] in "+-":
|
|
|
|
sign, pattern = pattern[0], pattern[1:]
|
2015-08-17 15:44:40 +08:00
|
|
|
if fnmatch.fnmatchcase(k, pattern):
|
2015-08-18 09:59:48 +08:00
|
|
|
take = sign
|
|
|
|
return take == "+"
|
|
|
|
|
2015-08-17 15:44:40 +08:00
|
|
|
def get_patterns(self):
|
2015-08-18 09:59:48 +08:00
|
|
|
"""Show existing patterns."""
|
|
|
|
return self.patterns
|
2015-08-17 15:44:40 +08:00
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
args = get_argparser().parse_args()
|
|
|
|
init_logger(args)
|
|
|
|
|
|
|
|
loop = asyncio.get_event_loop()
|
2015-11-11 16:22:12 +08:00
|
|
|
atexit.register(loop.close)
|
2015-08-17 15:44:40 +08:00
|
|
|
|
|
|
|
writer = DBWriter(args.baseurl_db,
|
|
|
|
args.user_db, args.password_db,
|
|
|
|
args.database, args.table)
|
|
|
|
writer.start()
|
2015-11-11 16:22:12 +08:00
|
|
|
atexit_register_coroutine(writer.stop)
|
2015-08-17 15:44:40 +08:00
|
|
|
|
2015-08-18 09:59:48 +08:00
|
|
|
filter = Filter(args.pattern_file)
|
2015-08-17 15:44:40 +08:00
|
|
|
rpc_server = Server({"influxdb_filter": filter}, builtin_terminate=True)
|
2015-12-27 18:03:13 +08:00
|
|
|
loop.run_until_complete(rpc_server.start(bind_address_from_args(args),
|
|
|
|
args.port_control))
|
2015-11-11 16:22:12 +08:00
|
|
|
atexit_register_coroutine(rpc_server.stop)
|
2015-08-17 15:44:40 +08:00
|
|
|
|
|
|
|
reader = MasterReader(args.server_master, args.port_master,
|
|
|
|
args.retry_master, filter._filter, writer)
|
|
|
|
reader.start()
|
2015-11-11 16:22:12 +08:00
|
|
|
atexit_register_coroutine(reader.stop)
|
2015-08-17 15:44:40 +08:00
|
|
|
|
|
|
|
loop.run_until_complete(rpc_server.wait_terminate())
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|