Coverage for /opt/conda/envs/apienv/lib/python3.10/site-packages/daiquiri/core/logging.py: 52%
176 statements
« prev ^ index » next coverage.py v7.6.4, created at 2024-11-14 02:13 +0000
« prev ^ index » next coverage.py v7.6.4, created at 2024-11-14 02:13 +0000
1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3import os
4import importlib
5import logging
6import traceback
7import sys
8import json
9import threading
10from functools import partial
12import graypy
13import graypy.handler
14from flask import g
16import daiquiri
17from daiquiri.core.exceptions import PrettyException
19logger = logging.getLogger(__name__)
21for k in [
22 "engineio",
23 "engineio.server",
24 "socketio.server",
25 "werkzeug",
26 "geventwebsocket.handler",
27]:
28 log = logging.getLogger(k)
29 log.setLevel(logging.ERROR)
32class LogWrapper:
33 """LogWrapper wrappers .info etc with an extra dict"""
35 _valid_types = set(["app", "hardware", "scan", "queue", "session", "actor"])
37 _methods = ["info", "debug", "warning", "error", "exception"]
39 def __init__(self, logger):
40 self.logger = logger
41 for k in self._methods:
42 setattr(self, k, partial(self.meth, k))
44 def inst(self):
45 return self.logger
47 def meth(self, meth, *args, type=None, state=None, **kwargs):
48 extra = {}
49 if state:
50 extra["state"] = state
51 if type:
52 if type in self._valid_types:
53 extra["type"] = type
54 else:
55 logger.warning("type '%s' unsupported", type)
57 return getattr(self.logger, meth)(*args, extra=extra, **kwargs)
60class GrayPyFilter(logging.Filter):
61 """Inject some useful metadata into graylog records"""
63 def __init__(self, config):
64 self._config = config
66 def filter(self, record):
67 record.daiquiri_version = daiquiri.__version__
68 # TODO: Why is this MainProcess by default?
69 record.process_name = "daiquiri"
70 record.beamlinename = self._config.get("meta_beamline")
72 for library in self._config["versions"]:
73 try:
74 mod = importlib.import_module(library)
75 setattr(record, f"{library}_version", mod.__version__)
76 except (AttributeError, ModuleNotFoundError):
77 pass
79 return True
82def handle_unhandled_exception(exc_type, exc_value, exc_traceback):
83 """Handler for unhandled exceptions that will write to the logs
85 See https://www.scrygroup.com/tutorial/2018-02-06/python-excepthook-logging/
86 """
87 if isinstance(exc_value, PrettyException):
88 logger.exception(exc_value, exc_info=(exc_type, exc_value, exc_traceback))
89 messages = logging.getLogger("daiquiri.messages")
90 messages.error("\n" + exc_value.pretty())
91 return
92 if issubclass(exc_type, KeyboardInterrupt):
93 # call the default excepthook saved at __excepthook__
94 sys.__excepthook__(exc_type, exc_value, exc_traceback)
95 return
96 logger.critical(
97 "Unhandled exception", exc_info=(exc_type, exc_value, exc_traceback)
98 )
99 user = logging.getLogger("user")
100 user.exception(
101 f"Unhandled exception: {str(exc_value)}",
102 extra={"type": "app"},
103 exc_info=(exc_type, exc_value, exc_traceback),
104 )
107def patch_threading_excepthook():
108 """Installs logging exception handler into the `threading` modules
109 `Thread` object.
111 Inspired by https://bugs.python.org/issue1230540
112 """
113 old_init = threading.Thread.__init__
115 def new_init(self, *args, **kwargs):
116 old_init(self, *args, **kwargs)
117 old_run = self.run
119 def run_with_our_excepthook(*args, **kwargs):
120 try:
121 old_run(*args, **kwargs)
122 except (KeyboardInterrupt, SystemExit):
123 raise
124 except Exception as e:
125 sys.excepthook(type(e), e, e.__traceback__)
127 self.run = run_with_our_excepthook
129 threading.Thread.__init__ = new_init
132class Logging:
133 _levels = {
134 "debug": logging.DEBUG,
135 "info": logging.INFO,
136 "warning": logging.WARNING,
137 "error": logging.ERROR,
138 }
140 _logs = ["user", "ui"]
142 def __init__(self):
143 self.log_formatter = logging.Formatter(
144 "{asctime}\t{name}\t{levelname}\t{message}", style="{"
145 )
147 def start(self, config):
148 sys.excepthook = handle_unhandled_exception
149 patch_threading_excepthook()
151 self._level = self._levels[config.get("log_level", "debug")]
152 log_root = config.get("log_root")
153 if log_root == "NONE":
154 file_handler = None
155 self._log_root = None
156 else:
157 _log_root = config.get("log_root", os.path.join("~", "logs", "daquiri"))
158 self._log_root = os.path.abspath(os.path.expanduser(_log_root))
159 os.makedirs(self._log_root, exist_ok=True)
160 file_handler = logging.handlers.TimedRotatingFileHandler(
161 f"{self._log_root}/daiquiri.log", when="d", interval=1, backupCount=5
162 )
163 file_handler.setFormatter(self.log_formatter)
165 stdout_handler = logging.StreamHandler(sys.stdout)
166 stdout_handler.setFormatter(self.log_formatter)
168 logging.captureWarnings(True)
169 logging.getLogger().addHandler(stdout_handler)
170 if file_handler:
171 logging.getLogger().addHandler(file_handler)
172 logging.getLogger().setLevel(self._level)
174 if config.get("graylog_host") and config.get("graylog_port"):
175 self.init_graylog(config)
177 def init_sio(self, socketio):
178 socket_handler = SocketIOLogHandler(socketio)
179 socket_handler.setLevel(self._level)
181 if self._log_root is not None:
182 for ty in self._logs:
183 file_handler = logging.handlers.TimedRotatingFileHandler(
184 f"{self._log_root}/{ty}_json.log",
185 when="d",
186 interval=1,
187 backupCount=5,
188 )
189 file_handler.setFormatter(JSONFormatter())
190 log = self.get(ty).inst()
191 log.addHandler(socket_handler)
192 log.addHandler(file_handler)
194 def get(self, name):
195 if name in self._logs:
196 return LogWrapper(logging.getLogger(name))
198 else:
199 logger.error(f"No such logger {name}")
201 def list_files(self):
202 if self._log_root is None:
203 return {}
204 return {ty: f"{self._log_root}/{ty}_json.log" for ty in self._logs}
206 def init_graylog(self, config):
207 # https://github.com/DiamondLightSource/python-zocalo/blob/main/zocalo/__init__.py
208 class PythonLevelToSyslogConverter:
209 @staticmethod
210 def get(level, _):
211 if level < 20:
212 return 7 # DEBUG
213 elif level < 25:
214 return 6 # INFO
215 elif level < 30:
216 return 5 # NOTICE
217 elif level < 40:
218 return 4 # WARNING
219 elif level < 50:
220 return 3 # ERROR
221 elif level < 60:
222 return 2 # CRITICAL
223 else:
224 return 1 # ALERT
226 graypy.handler.SYSLOG_LEVELS = PythonLevelToSyslogConverter()
228 handler = graypy.GELFUDPHandler
229 graylog = handler(
230 config["graylog_host"], config["graylog_port"], level_names=True
231 )
232 graylog.addFilter(GrayPyFilter(config))
233 logging.getLogger().addHandler(graylog)
236def format_record(record):
237 if record.exc_info:
238 if isinstance(record.exc_info[1], PrettyException):
239 stack_trace = record.exc_info[1].pretty()
240 else:
241 stack_trace = "".join(traceback.format_exception(*record.exc_info))
242 else:
243 stack_trace = ""
244 try:
245 record.asctime
246 except AttributeError:
247 record.asctime = logging._defaultFormatter.formatTime(record)
249 try:
250 record.type
251 except AttributeError:
252 record.type = None
254 try:
255 record.state
256 except AttributeError:
257 record.state = None
259 session = None
260 try:
261 session = g.sessionid
262 except (AttributeError, RuntimeError):
263 pass
265 return {
266 "message": record.getMessage(),
267 "level": record.levelname,
268 "timestamp": record.asctime,
269 "epoch": record.created,
270 "logger": record.name,
271 "stack_trace": stack_trace,
272 "type": record.type,
273 "sessionid": session,
274 "state": record.state,
275 }
278class JSONFormatter(logging.Formatter):
279 def format(self, record):
280 return json.dumps(format_record(record))
283class SocketIOLogHandler(logging.Handler):
284 """SocketIO Log Handler
285 Emits log_records to /logging
286 """
288 def __init__(self, socketio):
289 super().__init__()
290 self._socketio = socketio
292 def emit(self, record):
293 self._socketio.emit("log_record", format_record(record), namespace="/app")
296log = Logging()