2
0
mirror of https://github.com/m-labs/artiq.git synced 2024-12-21 17:34:03 +08:00
artiq/artiq/frontend/artiq_influxdb.py

241 lines
7.9 KiB
Python
Raw Normal View History

#!/usr/bin/env python3
2015-08-17 15:44:40 +08:00
import argparse
import logging
import asyncio
import atexit
import fnmatch
from functools import partial
2016-04-24 19:11:46 +08:00
import time
2015-08-17 15:44:40 +08:00
2015-08-18 13:34:42 +08:00
import numpy as np
2015-08-17 15:44:40 +08:00
import aiohttp
from artiq.tools import *
2015-08-17 15:44:40 +08:00
from artiq.protocols.sync_struct import Subscriber
from artiq.protocols.pc_rpc import Server
from artiq.protocols import pyon
logger = logging.getLogger(__name__)
def get_argparser():
parser = argparse.ArgumentParser(
description="ARTIQ data to InfluxDB bridge",
epilog="Pattern matching works as follows. "
"The default action on a key (dataset name) is to log it. "
"Then the patterns are traversed in order and glob-matched "
"with the key. "
"Optional + and - pattern prefixes specify whether to ignore or "
"log keys matching the rest of the pattern. "
"Default (in the absence of prefix) is to ignore. Last matched "
"pattern takes precedence.")
2015-08-17 15:44:40 +08:00
group = parser.add_argument_group("master")
group.add_argument(
"--server-master", default="::1",
help="hostname or IP of the master to connect to")
group.add_argument(
"--port-master", default=3250, type=int,
help="TCP port to use to connect to the master")
group.add_argument(
"--retry-master", default=5.0, type=float,
help="retry timer for reconnecting to master")
group = parser.add_argument_group("database")
group.add_argument(
"--baseurl-db", default="http://localhost:8086",
help="base URL to access InfluxDB (default: %(default)s)")
group.add_argument(
"--user-db", default="", help="InfluxDB username")
group.add_argument(
"--password-db", default="", help="InfluxDB password")
group.add_argument(
"--database", default="db", help="database name to use")
group.add_argument(
"--table", default="lab", help="table name to use")
group = parser.add_argument_group("filter")
group.add_argument(
"--pattern-file", default="influxdb_patterns.cfg",
help="file to load the patterns from (default: %(default)s). "
"If the file is not found, no patterns are loaded "
"(everything is logged).")
simple_network_args(parser, [("control", "control", 3248)])
2015-08-17 15:44:40 +08:00
verbosity_args(parser)
return parser
def format_influxdb(v):
if np.issubdtype(type(v), np.bool_):
return "bool={}".format(v)
if np.issubdtype(type(v), np.integer):
return "int={}i".format(v)
if np.issubdtype(type(v), np.floating):
return "float={}".format(v)
if np.issubdtype(type(v), np.str_):
return "str=\"{}\"".format(v.replace('"', '\\"'))
return "pyon=\"{}\"".format(pyon.encode(v).replace('"', '\\"'))
2015-08-17 15:44:40 +08:00
class DBWriter(TaskObject):
def __init__(self, base_url, user, password, database, table):
self.base_url = base_url
self.user = user
self.password = password
self.database = database
self.table = table
self._queue = asyncio.Queue(100)
def update(self, k, v):
try:
2016-04-24 19:11:46 +08:00
self._queue.put_nowait((k, v, time.time()))
2015-08-17 15:44:40 +08:00
except asyncio.QueueFull:
logger.warning("failed to update dataset '%s': "
2015-08-17 15:44:40 +08:00
"too many pending updates", k)
2015-10-03 19:28:57 +08:00
async def _do(self):
async with aiohttp.ClientSession() as session:
while True:
k, v, t = await self._queue.get()
url = self.base_url + "/write"
params = {"u": self.user, "p": self.password, "db": self.database,
"precision": "ms"}
data = "{},dataset={} {} {}".format(
self.table, k, format_influxdb(v), round(t*1e3))
try:
response = await session.post(url, params=params, data=data)
except:
logger.warning("got exception trying to update '%s'",
k, exc_info=True)
else:
if response.status not in (200, 204):
content = (await response.content.read()).decode().strip()
logger.warning("got HTTP status %d "
"trying to update '%s': %s",
response.status, k, content)
response.close()
2015-08-17 15:44:40 +08:00
class _Mock:
def __setitem__(self, k, v):
pass
def __getitem__(self, k):
return self
def __delitem__(self, k):
pass
class Datasets:
2015-08-17 15:44:40 +08:00
def __init__(self, filter_function, writer, init):
self.filter_function = filter_function
self.writer = writer
def __setitem__(self, k, v):
if self.filter_function(k):
self.writer.update(k, v[1])
# ignore mutations
def __getitem__(self, k):
return _Mock()
2015-08-17 15:44:40 +08:00
# ignore deletions
def __delitem__(self, k):
pass
2015-08-17 15:44:40 +08:00
class MasterReader(TaskObject):
def __init__(self, server, port, retry, filter_function, writer):
self.server = server
self.port = port
self.retry = retry
self.filter_function = filter_function
self.writer = writer
2015-10-03 19:28:57 +08:00
async def _do(self):
2015-08-17 15:44:40 +08:00
subscriber = Subscriber(
"datasets",
partial(Datasets, self.filter_function, self.writer))
2015-08-17 15:44:40 +08:00
while True:
try:
2015-10-03 19:28:57 +08:00
await subscriber.connect(self.server, self.port)
2015-08-17 15:44:40 +08:00
try:
2015-10-03 19:28:57 +08:00
await asyncio.wait_for(subscriber.receive_task, None)
2015-08-17 15:44:40 +08:00
finally:
2015-10-03 19:28:57 +08:00
await subscriber.close()
2015-08-17 15:44:40 +08:00
except (ConnectionAbortedError, ConnectionError,
ConnectionRefusedError, ConnectionResetError) as e:
logger.warning("Connection to master failed (%s: %s)",
e.__class__.__name__, str(e))
else:
logger.warning("Connection to master lost")
logger.warning("Retrying in %.1f seconds", self.retry)
2015-10-03 19:28:57 +08:00
await asyncio.sleep(self.retry)
2015-08-17 15:44:40 +08:00
class Filter:
2015-08-18 09:59:48 +08:00
def __init__(self, pattern_file):
self.pattern_file = pattern_file
self.scan_patterns()
def scan_patterns(self):
"""(Re)load the patterns file."""
2015-08-17 15:44:40 +08:00
try:
with open(self.pattern_file, "r") as f:
self.patterns = []
for line in f:
line = line.rstrip()
if line:
self.patterns.append(line)
2015-08-17 15:44:40 +08:00
except FileNotFoundError:
2015-08-18 09:59:48 +08:00
logger.info("no pattern file found, logging everything")
self.patterns = []
2015-08-17 15:44:40 +08:00
2015-08-18 09:59:48 +08:00
# Privatize so that it is not shown in artiq_rpctool list-methods.
2015-08-17 15:44:40 +08:00
def _filter(self, k):
2015-08-18 09:59:48 +08:00
take = "+"
for pattern in self.patterns:
sign = "-"
if pattern[0] in "+-":
sign, pattern = pattern[0], pattern[1:]
2015-08-17 15:44:40 +08:00
if fnmatch.fnmatchcase(k, pattern):
2015-08-18 09:59:48 +08:00
take = sign
return take == "+"
2015-08-17 15:44:40 +08:00
def get_patterns(self):
2015-08-18 09:59:48 +08:00
"""Show existing patterns."""
return self.patterns
2015-08-17 15:44:40 +08:00
def main():
args = get_argparser().parse_args()
init_logger(args)
loop = asyncio.get_event_loop()
atexit.register(loop.close)
2015-08-17 15:44:40 +08:00
writer = DBWriter(args.baseurl_db,
args.user_db, args.password_db,
args.database, args.table)
writer.start()
atexit_register_coroutine(writer.stop)
2015-08-17 15:44:40 +08:00
2015-08-18 09:59:48 +08:00
filter = Filter(args.pattern_file)
2015-08-17 15:44:40 +08:00
rpc_server = Server({"influxdb_filter": filter}, builtin_terminate=True)
loop.run_until_complete(rpc_server.start(bind_address_from_args(args),
args.port_control))
atexit_register_coroutine(rpc_server.stop)
2015-08-17 15:44:40 +08:00
reader = MasterReader(args.server_master, args.port_master,
args.retry_master, filter._filter, writer)
reader.start()
atexit_register_coroutine(reader.stop)
2015-08-17 15:44:40 +08:00
loop.run_until_complete(rpc_server.wait_terminate())
if __name__ == "__main__":
main()