artiq/artiq/master/log.py

98 lines
3.4 KiB
Python
Raw Normal View History

import logging
2015-10-15 23:47:31 +08:00
import logging.handlers
from artiq.protocols.sync_struct import Notifier
2015-10-16 00:53:35 +08:00
from artiq.protocols.logging import parse_log_message, log_with_name
class LogBuffer:
def __init__(self, depth):
self.depth = depth
self.data = Notifier([])
def log(self, level, source, time, message):
if len(self.data.read) >= self.depth:
del self.data[0]
self.data.append((level, source, time, message))
class LogBufferHandler(logging.Handler):
def __init__(self, log_buffer, *args, **kwargs):
logging.Handler.__init__(self, *args, **kwargs)
self.log_buffer = log_buffer
def emit(self, record):
message = self.format(record)
self.log_buffer.log(record.levelno, record.source, record.created, message)
2015-10-16 00:53:35 +08:00
def log_worker(rid, message):
level, name, message = parse_log_message(message)
log_with_name(name, level, message,
extra={"source": "worker({})".format(rid)})
log_worker.worker_pass_rid = True
class SourceFilter:
def __init__(self, master_level):
self.master_level = master_level
def filter(self, record):
if not hasattr(record, "source"):
record.source = "master"
if record.source == "master":
return record.levelno >= self.master_level
else:
# log messages that are forwarded from a source have already
# been filtered, and may have a level below the master level.
return True
def log_args(parser):
2015-10-15 23:47:31 +08:00
group = parser.add_argument_group("logging")
group.add_argument("-v", "--verbose", default=0, action="count",
2015-10-15 23:47:31 +08:00
help="increase logging level of the master process")
group.add_argument("-q", "--quiet", default=0, action="count",
2015-10-15 23:47:31 +08:00
help="decrease logging level of the master process")
group.add_argument("--log-file", default="",
help="store logs in rotated files; set the "
"base filename")
group.add_argument("--log-max-size", type=int, default=1024,
help="maximum size of each log file in KiB "
"(default: %(default)d)")
group.add_argument("--log-backup-count", type=int, default=6,
help="number of old log files to keep (.<n> is added "
"to the base filename (default: %(default)d)")
def init_log(args):
root_logger = logging.getLogger()
root_logger.setLevel(logging.NOTSET) # we use our custom filter only
flt = SourceFilter(logging.WARNING + args.quiet*10 - args.verbose*10)
2015-10-15 23:47:31 +08:00
full_fmt = logging.Formatter(
"%(levelname)s:%(source)s:%(name)s:%(message)s")
handlers = []
console_handler = logging.StreamHandler()
2015-10-15 23:47:31 +08:00
console_handler.setFormatter(full_fmt)
handlers.append(console_handler)
2015-10-15 23:47:31 +08:00
if args.log_file:
file_handler = logging.handlers.RotatingFileHandler(
args.log_file,
maxBytes=args.log_max_size*1024,
backupCount=args.log_backup_count)
file_handler.setFormatter(full_fmt)
handlers.append(file_handler)
log_buffer = LogBuffer(1000)
buffer_handler = LogBufferHandler(log_buffer)
buffer_handler.setFormatter(logging.Formatter("%(name)s:%(message)s"))
handlers.append(buffer_handler)
for handler in handlers:
handler.addFilter(flt)
root_logger.addHandler(handler)
2015-10-16 00:53:35 +08:00
return log_buffer