2
0
mirror of https://github.com/m-labs/artiq.git synced 2024-12-25 11:18:27 +08:00

master,client,gui: logging sources, timestamps, levels

This commit is contained in:
Sebastien Bourdeauducq 2015-10-14 01:06:57 +08:00
parent f632e84d74
commit c00bce3967
4 changed files with 161 additions and 31 deletions

View File

@ -224,18 +224,22 @@ def _show_dict(args, notifier_name, display_fun):
_run_subscriber(args.server, args.port, subscriber)
def _print_log_record(record):
level, source, t, message = record
t = time.strftime("%m/%d %H:%M:%S", time.localtime(t))
print(level, source, t, message)
class _LogPrinter:
def __init__(self, init):
for rid, msg in init:
print(rid, msg)
for record in init:
_print_log_record(record)
def append(self, x):
rid, msg = x
print(rid, msg)
def append(self, record):
_print_log_record(record)
def insert(self, i, x):
rid, msg = x
print(rid, msg)
def insert(self, i, record):
_print_log_record(record)
def pop(self, i=-1):
pass

View File

@ -6,16 +6,17 @@ import atexit
import os
from artiq.protocols.pc_rpc import Server
from artiq.protocols.sync_struct import Notifier, Publisher
from artiq.protocols.sync_struct import Publisher
from artiq.master.log import log_args, init_log
from artiq.master.databases import DeviceDB, DatasetDB
from artiq.master.scheduler import Scheduler
from artiq.master.worker_db import get_last_rid
from artiq.master.repository import FilesystemBackend, GitBackend, Repository
from artiq.tools import verbosity_args, init_logger
def get_argparser():
parser = argparse.ArgumentParser(description="ARTIQ master")
group = parser.add_argument_group("network")
group.add_argument(
"--bind", default="::1",
@ -26,11 +27,13 @@ def get_argparser():
group.add_argument(
"--port-control", default=3251, type=int,
help="TCP port to listen to for control (default: %(default)d)")
group = parser.add_argument_group("databases")
group.add_argument("--device-db", default="device_db.pyon",
help="device database file (default: '%(default)s')")
group.add_argument("--dataset-db", default="dataset_db.pyon",
help="dataset file (default: '%(default)s')")
group = parser.add_argument_group("repository")
group.add_argument(
"-g", "--git", default=False, action="store_true",
@ -38,25 +41,15 @@ def get_argparser():
group.add_argument(
"-r", "--repository", default="repository",
help="path to the repository (default: '%(default)s')")
verbosity_args(parser)
log_args(parser)
return parser
class Log:
def __init__(self, depth):
self.depth = depth
self.data = Notifier([])
def log(self, rid, message):
if len(self.data.read) >= self.depth:
del self.data[0]
self.data.append((rid, message))
log.worker_pass_rid = True
def main():
args = get_argparser().parse_args()
init_logger(args)
log_buffer, log_forwarder = init_log(args)
if os.name == "nt":
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
@ -68,13 +61,13 @@ def main():
dataset_db = DatasetDB(args.dataset_db)
dataset_db.start()
atexit.register(lambda: loop.run_until_complete(dataset_db.stop()))
log = Log(1000)
if args.git:
repo_backend = GitBackend(args.repository)
else:
repo_backend = FilesystemBackend(args.repository)
repository = Repository(repo_backend, device_db.get_device_db, log.log)
repository = Repository(repo_backend, device_db.get_device_db,
log_forwarder.log_worker)
atexit.register(repository.close)
repository.scan_async()
@ -83,7 +76,7 @@ def main():
"get_device": device_db.get,
"get_dataset": dataset_db.get,
"update_dataset": dataset_db.update,
"log": log.log
"log": log_forwarder.log_worker
}
scheduler = Scheduler(get_last_rid() + 1, worker_handlers, repo_backend)
worker_handlers["scheduler_submit"] = scheduler.submit
@ -105,7 +98,7 @@ def main():
"devices": device_db.data,
"datasets": dataset_db.data,
"explist": repository.explist,
"log": log.data
"log": log_buffer.data
})
loop.run_until_complete(server_notify.start(
args.bind, args.port_notify))

View File

@ -1,4 +1,6 @@
import asyncio
import logging
import time
from quamash import QtGui, QtCore
from pyqtgraph import dockarea
@ -7,23 +9,59 @@ from artiq.protocols.sync_struct import Subscriber
from artiq.gui.tools import ListSyncModel
def _level_to_name(level):
if level >= logging.CRITICAL:
return "CRITICAL"
if level >= logging.ERROR:
return "ERROR"
if level >= logging.WARNING:
return "WARNING"
if level >= logging.INFO:
return "INFO"
return "DEBUG"
class _LogModel(ListSyncModel):
def __init__(self, parent, init):
ListSyncModel.__init__(self,
["RID", "Message"],
["Level", "Source", "Time", "Message"],
parent, init)
self.fixed_font = QtGui.QFont()
self.fixed_font.setFamily("Monospace")
self.debug_fg = QtGui.QBrush(QtGui.QColor(55, 55, 55))
self.warning_bg = QtGui.QBrush(QtGui.QColor(255, 255, 180))
self.error_bg = QtGui.QBrush(QtGui.QColor(255, 150, 150))
def data(self, index, role):
if (role == QtCore.Qt.FontRole and index.isValid()
and index.column() == 1):
and index.column() == 3):
return self.fixed_font
elif role == QtCore.Qt.BackgroundRole and index.isValid():
level = self.backing_store[index.row()][0]
if level >= logging.ERROR:
return self.error_bg
elif level >= logging.WARNING:
return self.warning_bg
else:
return ListSyncModel.data(self, index, role)
elif role == QtCore.Qt.ForegroundRole and index.isValid():
level = self.backing_store[index.row()][0]
if level <= logging.DEBUG:
return self.debug_fg
else:
return ListSyncModel.data(self, index, role)
else:
return ListSyncModel.data(self, index, role)
def convert(self, v, column):
return v[column]
if column == 0:
return _level_to_name(v[0])
elif column == 1:
return v[1]
elif column == 2:
return time.strftime("%m/%d %H:%M:%S", time.localtime(v[2]))
else:
return v[3]
class LogDock(dockarea.Dock):

95
artiq/master/log.py Normal file
View File

@ -0,0 +1,95 @@
import logging
import time
from artiq.protocols.sync_struct import Notifier
class LogBuffer:
def __init__(self, depth):
self.depth = depth
self.data = Notifier([])
def log(self, level, source, message):
if len(self.data.read) >= self.depth:
del self.data[0]
self.data.append((level, source, time.time(), message))
class LogBufferHandler(logging.Handler):
def __init__(self, log_buffer, *args, **kwargs):
logging.Handler.__init__(self, *args, **kwargs)
self.log_buffer = log_buffer
def emit(self, record):
message = self.format(record)
source = getattr(record, "source", "master")
self.log_buffer.log(record.levelno, source, message)
name_to_level = {
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARN": logging.WARNING,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
}
def parse_log_message(msg):
for name, level in name_to_level.items():
if msg.startswith(name + ":"):
return level, msg[len(name) + 1:]
return logging.INFO, msg
class LogForwarder:
def log_worker(self, rid, message):
level, message = parse_log_message(message)
logging.log(level, message,
extra={"source": "worker:{}".format(rid)})
log_worker.worker_pass_rid = True
class SourceFilter:
def __init__(self, master_level):
self.master_level = master_level
def filter(self, record):
# log messages that are forwarded from a source have already
# been filtered, and may have a level below the master level.
if hasattr(record, "source"):
return True
return record.levelno >= self.master_level
def log_args(parser):
group = parser.add_argument_group("verbosity")
group.add_argument("-v", "--verbose", default=0, action="count",
help="increase logging level for the master process")
group.add_argument("-q", "--quiet", default=0, action="count",
help="decrease logging level for the master process")
def init_log(args):
root_logger = logging.getLogger()
root_logger.setLevel(logging.NOTSET) # we use our custom filter only
flt = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10)
handlers = []
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter("%(levelname)s:%(name)s:%(message)s"))
handlers.append(console_handler)
log_buffer = LogBuffer(1000)
buffer_handler = LogBufferHandler(log_buffer)
buffer_handler.setFormatter(logging.Formatter("%(name)s:%(message)s"))
handlers.append(buffer_handler)
for handler in handlers:
handler.addFilter(flt)
root_logger.addHandler(handler)
log_forwarder = LogForwarder()
return log_buffer, log_forwarder