From 794e7146d5afcff57f0bbfe7a9754d6e8c853e16 Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Mon, 16 Feb 2026 19:04:49 +0100 Subject: [PATCH 01/12] Add ability to configure ers session --- src/daqpytools/apps/logging_demonstrator.py | 6 +++--- src/daqpytools/logging/logger.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/daqpytools/apps/logging_demonstrator.py b/src/daqpytools/apps/logging_demonstrator.py index 400f6f1..4f8f30e 100644 --- a/src/daqpytools/apps/logging_demonstrator.py +++ b/src/daqpytools/apps/logging_demonstrator.py @@ -296,7 +296,7 @@ def parse_args(self, ctx: click.Context, args: list[str]) -> None: @click.option( "-e", "--ersprotobufstream", - is_flag=True, + type=str, help=( "Set up an ERS handler, and publish to ERS" ) @@ -364,7 +364,7 @@ def main( stream_handlers: bool, child_logger: bool, disable_logger_inheritance: bool, - ersprotobufstream: bool, + ersprotobufstream: str, handlertypes:bool, handlerconf:bool, throttle: bool, @@ -384,7 +384,7 @@ def main( disable_logger_inheritance (bool): If true, disable logger inheritance so each logger instance only uses the logger handlers assigned to the given logger instance. - ersprotobufstream (bool): If true, sets up an ERS protobuf handler. Error msg + ersprotobufstream (str): Sets up an ERS protobuf handler with supplied session name. Error msg are demonstrated in the HandlerType demonstration, requiring handlerconf to be set to true. The topic for these tests is session_tester. handlertypes (bool): If true, demonstrates the advanced feature of HandlerTypes. diff --git a/src/daqpytools/logging/logger.py b/src/daqpytools/logging/logger.py index 2f99d17..bc66419 100644 --- a/src/daqpytools/logging/logger.py +++ b/src/daqpytools/logging/logger.py @@ -71,7 +71,7 @@ def get_daq_logger( rich_handler: bool = False, file_handler_path: str | None = None, stream_handlers: bool = False, - ers_kafka_handler: bool = False, + ers_kafka_handler: str | None = None, throttle: bool = False ) -> logging.Logger: """C'tor for the default logging instances. @@ -84,7 +84,7 @@ def get_daq_logger( file_handler_path (str | None): Path to the file handler log file. If None, no file handler is added. stream_handlers (bool): Whether to add both stdout and stderr stream handlers. - ers_kafka_handler (bool): Whether to add an ERS protobuf handler. + ers_kafka_handler (str): Whether to add an ERS protobuf handler. str is session name throttle (bool): Whether to add the throttle filter or not. Note, does not mean outputs are filtered by default! See ThrottleFilter for details. @@ -150,7 +150,7 @@ def get_daq_logger( add_stdout_handler(logger, use_parent_handlers) add_stderr_handler(logger, use_parent_handlers) if ers_kafka_handler: - add_ers_kafka_handler(logger, use_parent_handlers, "session_tester") + add_ers_kafka_handler(logger, use_parent_handlers, ers_kafka_handler) if throttle: # Note: Default parameters used. No functionality on customisability yet From 5f5cc0792b571d260375d5e2d5881a618a285ad3 Mon Sep 17 00:00:00 2001 From: PawelPlesniak Date: Tue, 17 Feb 2026 12:12:07 +0100 Subject: [PATCH 02/12] Removing defaults that broke the nightly --- src/daqpytools/logging/logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/daqpytools/logging/logger.py b/src/daqpytools/logging/logger.py index bc66419..147f7de 100644 --- a/src/daqpytools/logging/logger.py +++ b/src/daqpytools/logging/logger.py @@ -149,7 +149,7 @@ def get_daq_logger( if stream_handlers: add_stdout_handler(logger, use_parent_handlers) add_stderr_handler(logger, use_parent_handlers) - if ers_kafka_handler: + if ers_kafka_handler: add_ers_kafka_handler(logger, use_parent_handlers, ers_kafka_handler) if throttle: From 0a5cf81a3c9876da88157fc5dd24aaca56bb8ce2 Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Tue, 17 Feb 2026 12:52:29 +0100 Subject: [PATCH 03/12] Add hacky first pass at setup_ers --- src/daqpytools/apps/logging_demonstrator.py | 68 +++++++---- src/daqpytools/logging/handlers.py | 118 ++++++++++++++++++++ src/daqpytools/logging/logger.py | 31 ++++- 3 files changed, 191 insertions(+), 26 deletions(-) diff --git a/src/daqpytools/apps/logging_demonstrator.py b/src/daqpytools/apps/logging_demonstrator.py index 4f8f30e..7f9e4e1 100644 --- a/src/daqpytools/apps/logging_demonstrator.py +++ b/src/daqpytools/apps/logging_demonstrator.py @@ -401,36 +401,58 @@ def main( LoggerSetupError: If no handlers are set up for the logger. """ logger_name = "daqpytools_logging_demonstrator" + + os.environ["DUNEDAQ_ERS_WARNING"] = "erstrace,throttle,lstdout" + os.environ["DUNEDAQ_ERS_INFO"] = "lstderr,throttle,lstdout" + os.environ["DUNEDAQ_ERS_FATAL"] = "rich,lstdout" + os.environ["DUNEDAQ_ERS_ERROR"] = ( + "erstrace," + "throttle," + "lstdout," + "protobufstream(monkafka.cern.ch:30092)" + ) + main_logger: logging.Logger = get_daq_logger( logger_name=logger_name, log_level=log_level, - use_parent_handlers=not disable_logger_inheritance, - rich_handler=rich_handler, - file_handler_path=file_handler_path, - stream_handlers=stream_handlers, - ers_kafka_handler=ersprotobufstream, - throttle=throttle + rich_handler=False, + setup_ers_handlers=True, + ers_kafka_handler="session_temp" ) - if not suppress_basic: - test_main_functions(main_logger) + main_logger.warning("test") + + + # main_logger: logging.Logger = get_daq_logger( + # logger_name=logger_name, + # log_level=log_level, + # use_parent_handlers=not disable_logger_inheritance, + # rich_handler=rich_handler, + # file_handler_path=file_handler_path, + # stream_handlers=stream_handlers, + # ers_kafka_handler=ersprotobufstream, + # throttle=throttle + # ) + + # if not suppress_basic: + # test_main_functions(main_logger) - if child_logger: - test_child_logger( - logger_name, - log_level, - disable_logger_inheritance, - rich_handler, - file_handler_path, - stream_handlers - ) + # if child_logger: + # test_child_logger( + # logger_name, + # log_level, + # disable_logger_inheritance, + # rich_handler, + # file_handler_path, + # stream_handlers + # ) - if throttle: - test_throttle(main_logger) - if handlertypes: - test_handlertypes(main_logger) - if handlerconf: - test_handlerconf(main_logger) + # if throttle: + # test_throttle(main_logger) + # if handlertypes: + # test_handlertypes(main_logger) + # if handlerconf: + # test_handlerconf(main_logger) if __name__ == "__main__": diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index 11c126a..c8c40e6 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -275,6 +275,7 @@ def _convert_str_to_handlertype(handler_str: str) -> tuple[HandlerType, converts "protobufstream(url:port)" to return both the HandlerType and the protobuf configuration """ + # print(f"{handler_str=}") if "erstrace" in handler_str: msg = ( "ERSTrace is a C++ implementation, " @@ -297,10 +298,12 @@ def _make_ers_handler_conf(ers_log_level :str) -> ERSPyLogHandlerConf: """Generates the ERSPyLogHandlerConf from reading an environment variable.""" erspyloghandlerconf = ERSPyLogHandlerConf() envvalue = os.getenv(ers_log_level) + # print(f"{envvalue=}") if envvalue is None: raise ERSEnvError(ers_log_level) for h in envvalue.split(","): + # print(f"{h=}") handlertype, kafkaconf = LogHandlerConf._convert_str_to_handlertype(h) erspyloghandlerconf.handlers.append(handlertype) if kafkaconf: @@ -543,6 +546,19 @@ def _format_timestamp(timestamp: float) -> str: padding: int = LOG_RECORD_PADDING.get("time", 25) time_str: str = dt.strftime(DATE_TIME_BASE_FORMAT).ljust(padding)[:padding] return Text(time_str, style="logging.time") + + +def add_throttle_filter(log: logging.Logger) -> None: + """Add the Throttle filter to the logger. + + Args: + log (logging.Logger): Logger to add the rich handler to. + + Returns: + None + """ + log.addFilter(ThrottleFilter()) + return def check_parent_handlers( log: logging.Logger, @@ -717,3 +733,105 @@ def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str) log.addHandler(file_handler) return + +def _logger_has_handler( + log: logging.Logger, + handler_type: type[logging.Handler], + target_stream: io.IOBase | None = None, +) -> bool: + """Check if logger already has a matching handler. + + For StreamHandler, ``target_stream`` can be used to distinguish stdout/stderr. + """ + type_matches = [isinstance(handler, handler_type) for handler in log.handlers] + stream_matches = [ + handler.stream is target_stream if target_stream else False + for handler in log.handlers + if isinstance(handler, logging.StreamHandler) + ] + return any(type_matches + stream_matches) + + +def _logger_has_filter(log: logging.Logger, filter_type: type[logging.Filter]) -> bool: + """Check if logger already has a matching filter type.""" + return any(isinstance(logger_filter, filter_type) for logger_filter in log.filters) + + +def add_handlers_from_types( + log: logging.Logger, + handler_types: set[HandlerType], + ers_session_name: str | None, +) -> None: + """Add handlers to a logger based on HandlerType values. + + This helper intentionally supports only the default options for now: + - ``use_parent_handlers`` is always True. + - ``HandlerType.File`` is not supported and raises immediately. + - ``HandlerType.Protobufstream`` requires ``ers_session_name``. + """ + if HandlerType.File in handler_types: + err_msg = "HandlerType.File is not supported by add_handlers_from_types" + raise ValueError(err_msg) + + if HandlerType.Protobufstream in handler_types and not ers_session_name: + err_msg = "ers_session_name is required for HandlerType.Protobufstream" + raise ValueError(err_msg) + + effective_handler_types = set(handler_types) + if HandlerType.Stream in effective_handler_types: + effective_handler_types.update({HandlerType.Lstdout, HandlerType.Lstderr}) + + existing_stream_handlers = { + HandlerType.Lstdout + if _logger_has_handler( + log, logging.StreamHandler, target_stream=cast(io.IOBase, sys.stdout) + ) + else None, + HandlerType.Lstderr + if _logger_has_handler( + log, logging.StreamHandler, target_stream=cast(io.IOBase, sys.stderr) + ) + else None, + } + existing_stream_handlers.discard(None) + + existing_handlers = { + HandlerType.Rich if _logger_has_handler(log, FormattedRichHandler) else None, + HandlerType.Protobufstream + if _logger_has_handler(log, ERSKafkaLogHandler) + else None, + HandlerType.Throttle if _logger_has_filter(log, ThrottleFilter) else None, + } + existing_handlers.discard(None) + existing_handlers.update(existing_stream_handlers) + + dispatch = { + HandlerType.Rich: lambda: add_rich_handler(log, True), + HandlerType.Lstdout: lambda: add_stdout_handler(log, True), + HandlerType.Lstderr: lambda: add_stderr_handler(log, True), + HandlerType.Protobufstream: lambda: add_ers_kafka_handler( + log, True, ers_session_name + ), + HandlerType.Throttle: lambda: add_throttle_filter(log) + } + + #! Try to revisit this logic + + install_order = [ + HandlerType.Rich, + HandlerType.Lstdout, + HandlerType.Lstderr, + HandlerType.Protobufstream, + HandlerType.Throttle, + ] + + for handler_type in install_order: + if handler_type not in effective_handler_types: + continue + if handler_type in existing_handlers: + continue + installer = dispatch.get(handler_type) + if installer is None: + continue + installer() + diff --git a/src/daqpytools/logging/logger.py b/src/daqpytools/logging/logger.py index 147f7de..3f0c1fa 100644 --- a/src/daqpytools/logging/logger.py +++ b/src/daqpytools/logging/logger.py @@ -8,7 +8,9 @@ from daqpytools.logging.exceptions import LoggerSetupError from daqpytools.logging.handlers import ( - ThrottleFilter, + LogHandlerConf, + add_handlers_from_types, + add_throttle_filter, add_ers_kafka_handler, add_file_handler, add_rich_handler, @@ -72,7 +74,9 @@ def get_daq_logger( file_handler_path: str | None = None, stream_handlers: bool = False, ers_kafka_handler: str | None = None, - throttle: bool = False + throttle: bool = False, + + setup_ers_handlers: bool = False, ) -> logging.Logger: """C'tor for the default logging instances. @@ -154,7 +158,28 @@ def get_daq_logger( if throttle: # Note: Default parameters used. No functionality on customisability yet - logger.addFilter(ThrottleFilter()) + add_throttle_filter(logger) + + + + if setup_ers_handlers: + + # need to grab the list of relevant handlers that exist in ERS + #! This is very dependent on ERS env variables existing!!! + lhc_conf = LogHandlerConf._get_oks_conf() + all_handlers = {handler for handler_conf in lhc_conf.values() for handler in handler_conf.handlers} + + print(all_handlers) + + add_handlers_from_types(logger, all_handlers, ers_kafka_handler) + + # now what.. Well we have a list of handlers to add now huh.. + + + + + + # Set log level for all handlers if requested if log_level is not logging.NOTSET: From 1a6763c522003e967a61483522799095db3ae03b Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Wed, 18 Feb 2026 10:10:32 +0100 Subject: [PATCH 04/12] Refactor setup_daq_ers, add new default handlers --- src/daqpytools/logging/handlers.py | 94 ++++++++++++++++-------------- src/daqpytools/logging/logger.py | 34 +++++++---- 2 files changed, 72 insertions(+), 56 deletions(-) diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index c8c40e6..fd05453 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -193,6 +193,7 @@ class ERSPyLogHandlerConf: handlers: list = field(default_factory = lambda: []) protobufconf: ProtobufConf = field(default_factory = lambda: None) +#! TODO/now= consider @dataclass(frozen=True) @dataclass class LogHandlerConf: """Dataclass that holds the various streams and relevant handlers. @@ -344,8 +345,9 @@ class BaseHandlerFilter(logging.Filter): """Base filter that hold the logic on choosing if a handler should emit based on what HandlersTypes are supplied to it. """ - def __init__(self) -> None: + def __init__(self, default_case = LogHandlerConf.get_base() ) -> None: """C'tor.""" + self.default_case = default_case super().__init__() def get_allowed(self, record: logging.LogRecord) -> list | None: @@ -370,7 +372,7 @@ def get_allowed(self, record: logging.LogRecord) -> list | None: # Handle the non-ERS case else: - allowed = getattr(record, "handlers", LogHandlerConf.get_base()) + allowed = getattr(record, "handlers", self.default_case) return allowed class HandleIDFilter(BaseHandlerFilter): @@ -378,11 +380,13 @@ class HandleIDFilter(BaseHandlerFilter): if the current handler (defined by the handler_id) is within the set of allowed handlers. """ - def __init__(self, handler_id: HandlerType | list[HandlerType]) -> None: + def __init__(self, handler_id: HandlerType | list[HandlerType], default_case = LogHandlerConf.get_base()) -> None: """Initialises HandleIDFilter with the handler_id, to identify what kind of handler this filter is. """ - super().__init__() + super().__init__( + default_case = default_case + ) # Normalise handler_id to be a set if isinstance(handler_id, list): @@ -560,6 +564,23 @@ def add_throttle_filter(log: logging.Logger) -> None: log.addFilter(ThrottleFilter()) return +def _logger_has_handler( + log: logging.Logger, + handler_type: type[logging.Handler], + target_stream: io.IOBase | None = None, +) -> bool: + """Check if logger already has a matching handler. + + For StreamHandler, ``target_stream`` can be used to distinguish stdout/stderr. + """ + type_matches = [isinstance(handler, handler_type) for handler in log.handlers] + stream_matches = [ + handler.stream is target_stream if target_stream else False + for handler in log.handlers + if isinstance(handler, logging.StreamHandler) + ] + return any(type_matches + stream_matches) + def check_parent_handlers( log: logging.Logger, use_parent_handlers: bool, @@ -588,8 +609,9 @@ def check_parent_handlers( # Check that we are not using the true logging root logger python_root_logger_name = logging.getLogger().name if log.name == python_root_logger_name: - err_nsg = "You should not be interfacing with the root logger" - raise ValueError(err_nsg) + err_msg = "You should not be interfacing with the root logger" + raise ValueError(err_msg) + # Validate the stream handler has a target stream if handler_type.__name__ == "StreamHandler" and target_stream is None: err_msg = ( @@ -607,15 +629,7 @@ def check_parent_handlers( logger_parent = log.parent this_is_root_logger = logger_parent.name == python_root_logger_name while not this_is_root_logger: - handler_checking = [ - isinstance(handler, handler_type) for handler in logger_parent.handlers - ] - stream_handler_checking = [ - handler.stream is target_stream if target_stream else False - for handler in logger_parent.handlers - if isinstance(handler, logging.StreamHandler) - ] - if any(handler_checking + stream_handler_checking): + if _logger_has_handler(logger_parent,handler_type, target_stream): raise LoggerHandlerError(logger_parent.name, handler_type) logger_parent = logger_parent.parent this_is_root_logger = logger_parent.name == python_root_logger_name @@ -638,10 +652,14 @@ def add_rich_handler(log: logging.Logger, use_parent_handlers: bool) -> None: check_parent_handlers(log, use_parent_handlers, FormattedRichHandler) width: int = get_width() handler: RichHandler = FormattedRichHandler(width=width) - handler.addFilter(HandleIDFilter(HandlerType.Rich)) + + #! Here you better initialise handlerid filter + my_default_case = {HandlerType.Rich} + my_rich_filter = HandleIDFilter(handler_id=HandlerType.Rich, default_case=my_default_case) # Should accept the base handlers here + handler.addFilter(my_rich_filter) log.addHandler(handler) return - + def add_ers_kafka_handler(log: logging.Logger, use_parent_handlers: bool, session_name:str, topic: str = "ers_stream", address: str ="monkafka.cern.ch:30092") -> None: @@ -676,11 +694,16 @@ def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool) -> None: ) stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(LoggingFormatter()) - stdout_handler.addFilter(HandleIDFilter([HandlerType.Stream, HandlerType.Lstdout])) + + # repeat ad infinitum for all handlers.... + my_default_case = {HandlerType.Rich} + my_stdout_filter = HandleIDFilter(handler_id=[HandlerType.Stream, HandlerType.Lstdout],default_case=my_default_case) + stdout_handler.addFilter(my_stdout_filter) + log.addHandler(stdout_handler) return - +# Consider seeing if there is a way to generalify the add X handler.. def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool) -> None: """Add a stderr handler to the logger. @@ -734,24 +757,6 @@ def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str) return -def _logger_has_handler( - log: logging.Logger, - handler_type: type[logging.Handler], - target_stream: io.IOBase | None = None, -) -> bool: - """Check if logger already has a matching handler. - - For StreamHandler, ``target_stream`` can be used to distinguish stdout/stderr. - """ - type_matches = [isinstance(handler, handler_type) for handler in log.handlers] - stream_matches = [ - handler.stream is target_stream if target_stream else False - for handler in log.handlers - if isinstance(handler, logging.StreamHandler) - ] - return any(type_matches + stream_matches) - - def _logger_has_filter(log: logging.Logger, filter_type: type[logging.Filter]) -> bool: """Check if logger already has a matching filter type.""" return any(isinstance(logger_filter, filter_type) for logger_filter in log.filters) @@ -762,7 +767,7 @@ def add_handlers_from_types( handler_types: set[HandlerType], ers_session_name: str | None, ) -> None: - """Add handlers to a logger based on HandlerType values. + """Add handlers to a logger based on a set of HandlerType values. This helper intentionally supports only the default options for now: - ``use_parent_handlers`` is always True. @@ -777,10 +782,12 @@ def add_handlers_from_types( err_msg = "ers_session_name is required for HandlerType.Protobufstream" raise ValueError(err_msg) + # Update relevant handler types that was parsed effective_handler_types = set(handler_types) if HandlerType.Stream in effective_handler_types: effective_handler_types.update({HandlerType.Lstdout, HandlerType.Lstderr}) + # Check if current logger has stream handlers, convert to handlertypes existing_stream_handlers = { HandlerType.Lstdout if _logger_has_handler( @@ -795,6 +802,7 @@ def add_handlers_from_types( } existing_stream_handlers.discard(None) + # Check if current logger has the interested handler existing_handlers = { HandlerType.Rich if _logger_has_handler(log, FormattedRichHandler) else None, HandlerType.Protobufstream @@ -805,7 +813,7 @@ def add_handlers_from_types( existing_handlers.discard(None) existing_handlers.update(existing_stream_handlers) - dispatch = { + handlers_init_map = { HandlerType.Rich: lambda: add_rich_handler(log, True), HandlerType.Lstdout: lambda: add_stdout_handler(log, True), HandlerType.Lstderr: lambda: add_stderr_handler(log, True), @@ -815,9 +823,7 @@ def add_handlers_from_types( HandlerType.Throttle: lambda: add_throttle_filter(log) } - #! Try to revisit this logic - - install_order = [ + supported_handers = [ HandlerType.Rich, HandlerType.Lstdout, HandlerType.Lstderr, @@ -825,12 +831,12 @@ def add_handlers_from_types( HandlerType.Throttle, ] - for handler_type in install_order: + for handler_type in supported_handers: if handler_type not in effective_handler_types: continue if handler_type in existing_handlers: continue - installer = dispatch.get(handler_type) + installer = handlers_init_map.get(handler_type) if installer is None: continue installer() diff --git a/src/daqpytools/logging/logger.py b/src/daqpytools/logging/logger.py index 3f0c1fa..9bb5f84 100644 --- a/src/daqpytools/logging/logger.py +++ b/src/daqpytools/logging/logger.py @@ -145,6 +145,10 @@ def get_daq_logger( logger.setLevel(log_level) logger.propagate = use_parent_handlers + #! Okay so before this bit, you capture all the handlers that you want to have + # That would now be the default base handlers + # You apss this in each of the requested handlers here.. + # Add requested handlers if rich_handler: add_rich_handler(logger, use_parent_handlers) @@ -162,18 +166,6 @@ def get_daq_logger( - if setup_ers_handlers: - - # need to grab the list of relevant handlers that exist in ERS - #! This is very dependent on ERS env variables existing!!! - lhc_conf = LogHandlerConf._get_oks_conf() - all_handlers = {handler for handler_conf in lhc_conf.values() for handler in handler_conf.handlers} - - print(all_handlers) - - add_handlers_from_types(logger, all_handlers, ers_kafka_handler) - - # now what.. Well we have a list of handlers to add now huh.. @@ -191,3 +183,21 @@ def get_daq_logger( handler.setLevel(log_level) return logger + + +#! This will mean now that you need some function here that will allow you to go through all the handlers and all the filters and update that stupid self.default_case + +def setup_daq_ers_logger(logger, ers_session_name): + + # need to grab the list of relevant handlers that exist in ERS + #! This is very dependent on ERS env variables existing!!! + + all_handlers = {handler for handler_conf in LogHandlerConf._get_oks_conf().values() for handler in handler_conf.handlers} + + print(all_handlers) + + add_handlers_from_types(logger, all_handlers, ers_session_name) + + + # now what.. Well we have a list of handlers to add now huh.. + From 2284b624235a05c9f8de313d42c07190c747ecc3 Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Wed, 18 Feb 2026 12:27:04 +0100 Subject: [PATCH 05/12] [TEMP] everything should work now.. --- src/daqpytools/apps/logging_demonstrator.py | 56 ++++++++++-- src/daqpytools/logging/handlers.py | 98 +++++++++++++++------ src/daqpytools/logging/logger.py | 40 ++++++--- 3 files changed, 142 insertions(+), 52 deletions(-) diff --git a/src/daqpytools/apps/logging_demonstrator.py b/src/daqpytools/apps/logging_demonstrator.py index 7f9e4e1..b87c615 100644 --- a/src/daqpytools/apps/logging_demonstrator.py +++ b/src/daqpytools/apps/logging_demonstrator.py @@ -10,9 +10,11 @@ from daqpytools.logging.handlers import ( HandlerType, LogHandlerConf, + add_stdout_handler, ) + from daqpytools.logging.levels import logging_log_level_keys -from daqpytools.logging.logger import get_daq_logger +from daqpytools.logging.logger import get_daq_logger, setup_daq_ers_logger from daqpytools.logging.utils import get_width @@ -402,25 +404,61 @@ def main( """ logger_name = "daqpytools_logging_demonstrator" - os.environ["DUNEDAQ_ERS_WARNING"] = "erstrace,throttle,lstdout" - os.environ["DUNEDAQ_ERS_INFO"] = "lstderr,throttle,lstdout" - os.environ["DUNEDAQ_ERS_FATAL"] = "rich,lstdout" + os.environ["DUNEDAQ_ERS_WARNING"] = "erstrace,throttle,lstderr" + os.environ["DUNEDAQ_ERS_INFO"] = "lstderr,throttle,lstderr" + os.environ["DUNEDAQ_ERS_FATAL"] = "lstderr" os.environ["DUNEDAQ_ERS_ERROR"] = ( "erstrace," "throttle," - "lstdout," + "lstderr," "protobufstream(monkafka.cern.ch:30092)" ) + handlerconf = LogHandlerConf(init_ers=True) + main_logger: logging.Logger = get_daq_logger( logger_name=logger_name, log_level=log_level, - rich_handler=False, - setup_ers_handlers=True, - ers_kafka_handler="session_temp" + stream_handlers=False, + rich_handler=True # only rich was defined ) - main_logger.warning("test") + main_logger.warning("Only Rich") + + # add_stdout_handler(main_logger, True) + setup_daq_ers_logger(main_logger, "session_temp") + + main_logger.critical("Should be only rich") + + + # main_logger.critical("test") #use only rich because we only iniitlaise with rich + + main_logger.critical("Stream", extra={"handlers": [HandlerType.Stream]}) + + + + main_logger.critical("ERS (lstderr only)", extra=handlerconf.ERS) + + + + + + # define a default handlerconf so for example + + + """ + Concrete suggestions + + For now: + get_daq_logger = rich_handler = True # save rich_handler and set as base class + + setup_ers(log) #adds stream handler and what have you + + log.warning("something") # only goes to rich because we only initialise with rich + + log.warning("something else", extra= ers) # use whatever is in ers + + """ # main_logger: logging.Logger = get_daq_logger( diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index fd05453..67a33de 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -423,9 +423,13 @@ class ThrottleFilter(BaseHandlerFilter): ... logger.error("Repeated error message") """ - def __init__(self, initial_threshold: int = 30, time_limit: int = 30) -> None: + def __init__(self, default_case=LogHandlerConf.get_base(), initial_threshold: int = 30, time_limit: int = 30) -> None: """C'tor.""" - super().__init__() + #! THERES A BUG HERE.. WHERE IF YOU WRONGLY INITIALISE IT ITS NOT GONNA FIRE AT ALL......... + + super().__init__( + default_case = default_case + ) self.initial_threshold = initial_threshold self.time_limit = time_limit self.issue_map: dict[str, IssueRecord] = defaultdict(IssueRecord) @@ -552,7 +556,7 @@ def _format_timestamp(timestamp: float) -> str: return Text(time_str, style="logging.time") -def add_throttle_filter(log: logging.Logger) -> None: +def add_throttle_filter(log: logging.Logger, default_case = {HandlerType.Throttle}) -> None: """Add the Throttle filter to the logger. Args: @@ -561,7 +565,7 @@ def add_throttle_filter(log: logging.Logger) -> None: Returns: None """ - log.addFilter(ThrottleFilter()) + log.addFilter(ThrottleFilter(default_case=default_case)) return def _logger_has_handler( @@ -573,12 +577,32 @@ def _logger_has_handler( For StreamHandler, ``target_stream`` can be used to distinguish stdout/stderr. """ - type_matches = [isinstance(handler, handler_type) for handler in log.handlers] - stream_matches = [ - handler.stream is target_stream if target_stream else False - for handler in log.handlers - if isinstance(handler, logging.StreamHandler) - ] + #EXCEPT STREAM HANDLER YOU FOOL + type_matches = [isinstance(handler, handler_type) for handler in log.handlers if not isinstance(handler, logging.StreamHandler)] + + + stream_matches = [] + + # print(f"{handler_type=},{target_stream = } ") + + for handler in log.handlers: + if isinstance(handler, logging.StreamHandler): + # print(f"{handler=}, {handler.stream=}") + if target_stream: + # print("Target stream") + if handler.stream is target_stream: + # print("dice") + stream_matches.append(True) + else: + # print("no dice") + stream_matches.append(False) + else: + # print("No Stream") + stream_matches.append(False) + + # print(f"{type_matches=}, {stream_matches=}" ) + # print(f"outcome: {any(type_matches + stream_matches)}") + # print("") return any(type_matches + stream_matches) def check_parent_handlers( @@ -636,7 +660,7 @@ def check_parent_handlers( return -def add_rich_handler(log: logging.Logger, use_parent_handlers: bool) -> None: +def add_rich_handler(log: logging.Logger, use_parent_handlers: bool, default_case={HandlerType.Rich}) -> None: """Add a rich handler to the logger. Args: @@ -654,14 +678,14 @@ def add_rich_handler(log: logging.Logger, use_parent_handlers: bool) -> None: handler: RichHandler = FormattedRichHandler(width=width) #! Here you better initialise handlerid filter - my_default_case = {HandlerType.Rich} - my_rich_filter = HandleIDFilter(handler_id=HandlerType.Rich, default_case=my_default_case) # Should accept the base handlers here - handler.addFilter(my_rich_filter) + + my_filter = HandleIDFilter(handler_id=HandlerType.Rich, default_case=default_case) # Should accept the base handlers here + handler.addFilter(my_filter) log.addHandler(handler) return def add_ers_kafka_handler(log: logging.Logger, use_parent_handlers: bool, - session_name:str, topic: str = "ers_stream", + session_name:str, default_case = {HandlerType.Protobufstream}, topic: str = "ers_stream", address: str ="monkafka.cern.ch:30092") -> None: # TODO/future: topic and address are new, propagate to all relevant implementation """Add an ers protobuf handler to the root logger.""" @@ -670,10 +694,13 @@ def add_ers_kafka_handler(log: logging.Logger, use_parent_handlers: bool, kafka_address = address, kafka_topic = topic ) - handler.addFilter(HandleIDFilter(HandlerType.Protobufstream)) + + + my_filter = HandleIDFilter(HandlerType.Protobufstream, default_case=default_case) + handler.addFilter(my_filter) log.addHandler(handler) -def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool) -> None: +def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool, default_case={HandlerType.Stream, HandlerType.Lstdout}) -> None: """Add a stdout handler to the logger. Args: @@ -696,15 +723,14 @@ def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool) -> None: stdout_handler.setFormatter(LoggingFormatter()) # repeat ad infinitum for all handlers.... - my_default_case = {HandlerType.Rich} - my_stdout_filter = HandleIDFilter(handler_id=[HandlerType.Stream, HandlerType.Lstdout],default_case=my_default_case) - stdout_handler.addFilter(my_stdout_filter) + my_filter = HandleIDFilter(handler_id=[HandlerType.Stream, HandlerType.Lstdout],default_case=default_case) + stdout_handler.addFilter(my_filter) log.addHandler(stdout_handler) return # Consider seeing if there is a way to generalify the add X handler.. -def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool) -> None: +def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool, default_case={HandlerType.Lstderr, HandlerType.Stream}) -> None: """Add a stderr handler to the logger. The error is set to the ERROR level, and will only log messages at that level @@ -729,13 +755,16 @@ def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool) -> None: ) stderr_handler = logging.StreamHandler(sys.stderr) stderr_handler.setFormatter(LoggingFormatter()) - stderr_handler.addFilter(HandleIDFilter([HandlerType.Stream, HandlerType.Lstderr])) + + my_filter = HandleIDFilter(handler_id=[HandlerType.Stream, HandlerType.Lstderr],default_case=default_case) + stderr_handler.addFilter(my_filter) + stderr_handler.setLevel(logging.ERROR) log.addHandler(stderr_handler) return -def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str) -> None: +def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str, default_case={HandlerType.File}) -> None: """Add a file handler to the root logger. Args: @@ -752,7 +781,10 @@ def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str) check_parent_handlers(log, use_parent_handlers, logging.FileHandler) file_handler = logging.FileHandler(filename=path) file_handler.setFormatter(LoggingFormatter()) - file_handler.addFilter(HandleIDFilter(HandlerType.File)) + + my_filter = HandleIDFilter(HandlerType.File, default_case) + file_handler.addFilter(my_filter) + log.addHandler(file_handler) return @@ -774,6 +806,8 @@ def add_handlers_from_types( - ``HandlerType.File`` is not supported and raises immediately. - ``HandlerType.Protobufstream`` requires ``ers_session_name``. """ + + default_case = {HandlerType.Unknown} if HandlerType.File in handler_types: err_msg = "HandlerType.File is not supported by add_handlers_from_types" raise ValueError(err_msg) @@ -801,6 +835,7 @@ def add_handlers_from_types( else None, } existing_stream_handlers.discard(None) + print(f"{existing_stream_handlers=}") # Check if current logger has the interested handler existing_handlers = { @@ -813,14 +848,17 @@ def add_handlers_from_types( existing_handlers.discard(None) existing_handlers.update(existing_stream_handlers) + # print(f"{existing_handlers=}") + # print(f"{effective_handler_types=}") + handlers_init_map = { - HandlerType.Rich: lambda: add_rich_handler(log, True), - HandlerType.Lstdout: lambda: add_stdout_handler(log, True), - HandlerType.Lstderr: lambda: add_stderr_handler(log, True), + HandlerType.Rich: lambda: add_rich_handler(log, True, default_case), + HandlerType.Lstdout: lambda: add_stdout_handler(log, True, default_case), + HandlerType.Lstderr: lambda: add_stderr_handler(log, True, default_case), HandlerType.Protobufstream: lambda: add_ers_kafka_handler( - log, True, ers_session_name + log, True, ers_session_name, default_case ), - HandlerType.Throttle: lambda: add_throttle_filter(log) + HandlerType.Throttle: lambda: add_throttle_filter(log, {HandlerType.Rich}) } supported_handers = [ @@ -839,5 +877,7 @@ def add_handlers_from_types( installer = handlers_init_map.get(handler_type) if installer is None: continue + print(handler_type) + installer() diff --git a/src/daqpytools/logging/logger.py b/src/daqpytools/logging/logger.py index 9bb5f84..4b89186 100644 --- a/src/daqpytools/logging/logger.py +++ b/src/daqpytools/logging/logger.py @@ -16,6 +16,7 @@ add_rich_handler, add_stderr_handler, add_stdout_handler, + HandlerType ) from daqpytools.logging.levels import logging_log_level_to_int from daqpytools.logging.utils import get_width @@ -149,27 +150,38 @@ def get_daq_logger( # That would now be the default base handlers # You apss this in each of the requested handlers here.. + + default_case = {HandlerType.Rich} + # Add requested handlers + # if rich_handler: + # add_rich_handler(logger, use_parent_handlers) + # if file_handler_path: + # add_file_handler(logger, use_parent_handlers, file_handler_path) + # if stream_handlers: + # add_stdout_handler(logger, use_parent_handlers) + # add_stderr_handler(logger, use_parent_handlers) + # if ers_kafka_handler: + # add_ers_kafka_handler(logger, use_parent_handlers, ers_kafka_handler) + + # if throttle: + # # Note: Default parameters used. No functionality on customisability yet + # add_throttle_filter(logger) + + if rich_handler: - add_rich_handler(logger, use_parent_handlers) + add_rich_handler(logger, use_parent_handlers, default_case) if file_handler_path: - add_file_handler(logger, use_parent_handlers, file_handler_path) + add_file_handler(logger, use_parent_handlers, file_handler_path, default_case) if stream_handlers: - add_stdout_handler(logger, use_parent_handlers) - add_stderr_handler(logger, use_parent_handlers) + add_stdout_handler(logger, use_parent_handlers, default_case) + add_stderr_handler(logger, use_parent_handlers, default_case) if ers_kafka_handler: - add_ers_kafka_handler(logger, use_parent_handlers, ers_kafka_handler) + add_ers_kafka_handler(logger, use_parent_handlers, ers_kafka_handler, default_case) if throttle: # Note: Default parameters used. No functionality on customisability yet - add_throttle_filter(logger) - - - - - - - + add_throttle_filter(logger, default_case) @@ -194,7 +206,7 @@ def setup_daq_ers_logger(logger, ers_session_name): all_handlers = {handler for handler_conf in LogHandlerConf._get_oks_conf().values() for handler in handler_conf.handlers} - print(all_handlers) + print(f"{all_handlers=}") add_handlers_from_types(logger, all_handlers, ers_session_name) From 4ac129650f22afdf0313ee5fa46eae007e3dab81 Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Wed, 18 Feb 2026 12:42:33 +0100 Subject: [PATCH 06/12] [squash with above] Fix implementation of default case --- src/daqpytools/logging/handlers.py | 89 +++++++++++++----------------- 1 file changed, 38 insertions(+), 51 deletions(-) diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index 67a33de..4b80972 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -276,7 +276,6 @@ def _convert_str_to_handlertype(handler_str: str) -> tuple[HandlerType, converts "protobufstream(url:port)" to return both the HandlerType and the protobuf configuration """ - # print(f"{handler_str=}") if "erstrace" in handler_str: msg = ( "ERSTrace is a C++ implementation, " @@ -299,12 +298,10 @@ def _make_ers_handler_conf(ers_log_level :str) -> ERSPyLogHandlerConf: """Generates the ERSPyLogHandlerConf from reading an environment variable.""" erspyloghandlerconf = ERSPyLogHandlerConf() envvalue = os.getenv(ers_log_level) - # print(f"{envvalue=}") if envvalue is None: raise ERSEnvError(ers_log_level) for h in envvalue.split(","): - # print(f"{h=}") handlertype, kafkaconf = LogHandlerConf._convert_str_to_handlertype(h) erspyloghandlerconf.handlers.append(handlertype) if kafkaconf: @@ -425,8 +422,6 @@ class ThrottleFilter(BaseHandlerFilter): def __init__(self, default_case=LogHandlerConf.get_base(), initial_threshold: int = 30, time_limit: int = 30) -> None: """C'tor.""" - #! THERES A BUG HERE.. WHERE IF YOU WRONGLY INITIALISE IT ITS NOT GONNA FIRE AT ALL......... - super().__init__( default_case = default_case ) @@ -577,32 +572,14 @@ def _logger_has_handler( For StreamHandler, ``target_stream`` can be used to distinguish stdout/stderr. """ - #EXCEPT STREAM HANDLER YOU FOOL + type_matches = [isinstance(handler, handler_type) for handler in log.handlers if not isinstance(handler, logging.StreamHandler)] - - stream_matches = [] - - # print(f"{handler_type=},{target_stream = } ") - - for handler in log.handlers: - if isinstance(handler, logging.StreamHandler): - # print(f"{handler=}, {handler.stream=}") - if target_stream: - # print("Target stream") - if handler.stream is target_stream: - # print("dice") - stream_matches.append(True) - else: - # print("no dice") - stream_matches.append(False) - else: - # print("No Stream") - stream_matches.append(False) - - # print(f"{type_matches=}, {stream_matches=}" ) - # print(f"outcome: {any(type_matches + stream_matches)}") - # print("") + stream_matches = [ + handler.stream is target_stream if target_stream else False + for handler in log.handlers + if isinstance(handler, logging.StreamHandler) + ] return any(type_matches + stream_matches) def check_parent_handlers( @@ -635,7 +612,6 @@ def check_parent_handlers( if log.name == python_root_logger_name: err_msg = "You should not be interfacing with the root logger" raise ValueError(err_msg) - # Validate the stream handler has a target stream if handler_type.__name__ == "StreamHandler" and target_stream is None: err_msg = ( @@ -675,12 +651,14 @@ def add_rich_handler(log: logging.Logger, use_parent_handlers: bool, default_cas """ check_parent_handlers(log, use_parent_handlers, FormattedRichHandler) width: int = get_width() - handler: RichHandler = FormattedRichHandler(width=width) - - #! Here you better initialise handlerid filter + handler: RichHandler = FormattedRichHandler(width=width) - my_filter = HandleIDFilter(handler_id=HandlerType.Rich, default_case=default_case) # Should accept the base handlers here - handler.addFilter(my_filter) + handler.addFilter( + HandleIDFilter( + handler_id=HandlerType.Rich, + default_case=default_case + ) + ) log.addHandler(handler) return @@ -694,10 +672,13 @@ def add_ers_kafka_handler(log: logging.Logger, use_parent_handlers: bool, kafka_address = address, kafka_topic = topic ) - - - my_filter = HandleIDFilter(HandlerType.Protobufstream, default_case=default_case) - handler.addFilter(my_filter) + + handler.addFilter( + HandleIDFilter( + handler_id=HandlerType.Protobufstream, + default_case=default_case + ) + ) log.addHandler(handler) def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool, default_case={HandlerType.Stream, HandlerType.Lstdout}) -> None: @@ -722,10 +703,12 @@ def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool, default_c stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setFormatter(LoggingFormatter()) - # repeat ad infinitum for all handlers.... - my_filter = HandleIDFilter(handler_id=[HandlerType.Stream, HandlerType.Lstdout],default_case=default_case) - stdout_handler.addFilter(my_filter) - + stdout_handler.addFilter( + HandleIDFilter( + handler_id=[HandlerType.Stream, HandlerType.Lstdout], + default_case=default_case + ) + ) log.addHandler(stdout_handler) return @@ -755,10 +738,12 @@ def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool, default_c ) stderr_handler = logging.StreamHandler(sys.stderr) stderr_handler.setFormatter(LoggingFormatter()) - - my_filter = HandleIDFilter(handler_id=[HandlerType.Stream, HandlerType.Lstderr],default_case=default_case) - stderr_handler.addFilter(my_filter) - + stderr_handler.addFilter( + HandleIDFilter( + handler_id=[HandlerType.Stream, HandlerType.Lstderr], + default_case=default_case + ) + ) stderr_handler.setLevel(logging.ERROR) log.addHandler(stderr_handler) return @@ -781,10 +766,12 @@ def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str, check_parent_handlers(log, use_parent_handlers, logging.FileHandler) file_handler = logging.FileHandler(filename=path) file_handler.setFormatter(LoggingFormatter()) - - my_filter = HandleIDFilter(HandlerType.File, default_case) - file_handler.addFilter(my_filter) - + file_handler.addFilter( + HandleIDFilter( + handler_id=HandlerType.File, + default_case=default_case + ) + ) log.addHandler(file_handler) return From bbec90a074c5c16270088bb0cf9f80a35a99eb4e Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Wed, 18 Feb 2026 17:38:17 +0100 Subject: [PATCH 07/12] Cleanup to working order --- src/daqpytools/logging/handlers.py | 133 ++++++++++++++++++++++------- src/daqpytools/logging/logger.py | 99 ++++++++++----------- 2 files changed, 144 insertions(+), 88 deletions(-) diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index 4b80972..93a7006 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -8,6 +8,7 @@ import sys import time from collections import defaultdict +from collections.abc import Callable from dataclasses import dataclass, field from datetime import datetime from enum import Enum @@ -193,7 +194,6 @@ class ERSPyLogHandlerConf: handlers: list = field(default_factory = lambda: []) protobufconf: ProtobufConf = field(default_factory = lambda: None) -#! TODO/now= consider @dataclass(frozen=True) @dataclass class LogHandlerConf: """Dataclass that holds the various streams and relevant handlers. @@ -342,8 +342,10 @@ class BaseHandlerFilter(logging.Filter): """Base filter that hold the logic on choosing if a handler should emit based on what HandlersTypes are supplied to it. """ - def __init__(self, default_case = LogHandlerConf.get_base() ) -> None: + def __init__(self, default_case: set[HandlerType] | None = None) -> None: """C'tor.""" + if default_case is None: + default_case = set(LogHandlerConf.get_base()) self.default_case = default_case super().__init__() @@ -377,7 +379,11 @@ class HandleIDFilter(BaseHandlerFilter): if the current handler (defined by the handler_id) is within the set of allowed handlers. """ - def __init__(self, handler_id: HandlerType | list[HandlerType], default_case = LogHandlerConf.get_base()) -> None: + def __init__( + self, + handler_id: HandlerType | list[HandlerType], + default_case: set[HandlerType] | None = None, + ) -> None: """Initialises HandleIDFilter with the handler_id, to identify what kind of handler this filter is. """ @@ -420,7 +426,12 @@ class ThrottleFilter(BaseHandlerFilter): ... logger.error("Repeated error message") """ - def __init__(self, default_case=LogHandlerConf.get_base(), initial_threshold: int = 30, time_limit: int = 30) -> None: + def __init__( + self, + default_case: set[HandlerType] | None = None, + initial_threshold: int = 30, + time_limit: int = 30, + ) -> None: """C'tor.""" super().__init__( default_case = default_case @@ -551,15 +562,22 @@ def _format_timestamp(timestamp: float) -> str: return Text(time_str, style="logging.time") -def add_throttle_filter(log: logging.Logger, default_case = {HandlerType.Throttle}) -> None: +def add_throttle_filter( + log: logging.Logger, + default_case: set[HandlerType] | None = None, +) -> None: """Add the Throttle filter to the logger. Args: log (logging.Logger): Logger to add the rich handler to. + default_case (set[HandlerType] | None): Default handler set used when + records do not explicitly include handler routing. Returns: None """ + if default_case is None: + default_case = {HandlerType.Throttle} log.addFilter(ThrottleFilter(default_case=default_case)) return @@ -572,8 +590,11 @@ def _logger_has_handler( For StreamHandler, ``target_stream`` can be used to distinguish stdout/stderr. """ - - type_matches = [isinstance(handler, handler_type) for handler in log.handlers if not isinstance(handler, logging.StreamHandler)] + type_matches = [ + isinstance(handler, handler_type) + for handler in log.handlers + if not isinstance(handler, logging.StreamHandler) + ] stream_matches = [ handler.stream is target_stream if target_stream else False @@ -636,12 +657,18 @@ def check_parent_handlers( return -def add_rich_handler(log: logging.Logger, use_parent_handlers: bool, default_case={HandlerType.Rich}) -> None: +def add_rich_handler( + log: logging.Logger, + use_parent_handlers: bool, + default_case: set[HandlerType] | None = None, +) -> None: """Add a rich handler to the logger. Args: log (logging.Logger): Logger to add the rich handler to. use_parent_handlers (bool): Whether to check parent handlers. + default_case (set[HandlerType] | None): Default handler set used when + records do not explicitly include handler routing. Returns: None @@ -649,6 +676,8 @@ def add_rich_handler(log: logging.Logger, use_parent_handlers: bool, default_cas Raises: LoggerHandlerError: If a parent logger has a rich handler. """ + if default_case is None: + default_case = {HandlerType.Rich} check_parent_handlers(log, use_parent_handlers, FormattedRichHandler) width: int = get_width() handler: RichHandler = FormattedRichHandler(width=width) @@ -662,11 +691,18 @@ def add_rich_handler(log: logging.Logger, use_parent_handlers: bool, default_cas log.addHandler(handler) return -def add_ers_kafka_handler(log: logging.Logger, use_parent_handlers: bool, - session_name:str, default_case = {HandlerType.Protobufstream}, topic: str = "ers_stream", - address: str ="monkafka.cern.ch:30092") -> None: +def add_ers_kafka_handler( + log: logging.Logger, + use_parent_handlers: bool, + session_name: str, + default_case: set[HandlerType] | None = None, + topic: str = "ers_stream", + address: str = "monkafka.cern.ch:30092", +) -> None: # TODO/future: topic and address are new, propagate to all relevant implementation """Add an ers protobuf handler to the root logger.""" + if default_case is None: + default_case = {HandlerType.Protobufstream} check_parent_handlers(log, use_parent_handlers, ERSKafkaLogHandler) handler: ERSKafkaLogHandler = ERSKafkaLogHandler(session=session_name, kafka_address = address, @@ -681,12 +717,18 @@ def add_ers_kafka_handler(log: logging.Logger, use_parent_handlers: bool, ) log.addHandler(handler) -def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool, default_case={HandlerType.Stream, HandlerType.Lstdout}) -> None: +def add_stdout_handler( + log: logging.Logger, + use_parent_handlers: bool, + default_case: set[HandlerType] | None = None, +) -> None: """Add a stdout handler to the logger. Args: log (logging.Logger): Logger to add the stdout handler to. use_parent_handlers (bool): Whether to check parent handlers. + default_case (set[HandlerType] | None): Default handler set used when + records do not explicitly include handler routing. Returns: None @@ -694,6 +736,8 @@ def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool, default_c Raises: LoggerHandlerError: If a parent logger has a stdout handler. """ + if default_case is None: + default_case = {HandlerType.Stream, HandlerType.Lstdout} check_parent_handlers( log, use_parent_handlers, @@ -712,8 +756,11 @@ def add_stdout_handler(log: logging.Logger, use_parent_handlers: bool, default_c log.addHandler(stdout_handler) return -# Consider seeing if there is a way to generalify the add X handler.. -def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool, default_case={HandlerType.Lstderr, HandlerType.Stream}) -> None: +def add_stderr_handler( + log: logging.Logger, + use_parent_handlers: bool, + default_case: set[HandlerType] | None = None, +) -> None: """Add a stderr handler to the logger. The error is set to the ERROR level, and will only log messages at that level @@ -723,6 +770,8 @@ def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool, default_c Args: log (logging.Logger): Logger to add the stderr handler to. use_parent_handlers (bool): Whether to check parent handlers. + default_case (set[HandlerType] | None): Default handler set used when + records do not explicitly include handler routing. Returns: None @@ -730,6 +779,8 @@ def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool, default_c Raises: LoggerHandlerError: If a parent logger has a stderr handler. """ + if default_case is None: + default_case = {HandlerType.Lstderr, HandlerType.Stream} check_parent_handlers( log, use_parent_handlers, @@ -749,13 +800,20 @@ def add_stderr_handler(log: logging.Logger, use_parent_handlers: bool, default_c return -def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str, default_case={HandlerType.File}) -> None: +def add_file_handler( + log: logging.Logger, + use_parent_handlers: bool, + path: str, + default_case: set[HandlerType] | None = None, +) -> None: """Add a file handler to the root logger. Args: log (logging.Logger): Logger to add the file handler to. use_parent_handlers (bool): Whether to check parent handlers. path (str): Path to the log file. + default_case (set[HandlerType] | None): Default handler set used when + records do not explicitly include handler routing. Returns: None @@ -763,6 +821,8 @@ def add_file_handler(log: logging.Logger, use_parent_handlers: bool, path: str, Raises: LoggerHandlerError: If a parent logger has a file handler. """ + if default_case is None: + default_case = {HandlerType.File} check_parent_handlers(log, use_parent_handlers, logging.FileHandler) file_handler = logging.FileHandler(filename=path) file_handler.setFormatter(LoggingFormatter()) @@ -784,7 +844,10 @@ def _logger_has_filter(log: logging.Logger, filter_type: type[logging.Filter]) - def add_handlers_from_types( log: logging.Logger, handler_types: set[HandlerType], + use_parent_handlers: bool, + file_name: str | None, ers_session_name: str | None, + default_case: set[HandlerType], ) -> None: """Add handlers to a logger based on a set of HandlerType values. @@ -793,15 +856,13 @@ def add_handlers_from_types( - ``HandlerType.File`` is not supported and raises immediately. - ``HandlerType.Protobufstream`` requires ``ers_session_name``. """ - - default_case = {HandlerType.Unknown} - if HandlerType.File in handler_types: - err_msg = "HandlerType.File is not supported by add_handlers_from_types" - raise ValueError(err_msg) - if HandlerType.Protobufstream in handler_types and not ers_session_name: err_msg = "ers_session_name is required for HandlerType.Protobufstream" raise ValueError(err_msg) + + if HandlerType.File in handler_types and not file_name: + err_msg = "file_name is required for HandlerType.File" + raise ValueError(err_msg) # Update relevant handler types that was parsed effective_handler_types = set(handler_types) @@ -822,7 +883,6 @@ def add_handlers_from_types( else None, } existing_stream_handlers.discard(None) - print(f"{existing_stream_handlers=}") # Check if current logger has the interested handler existing_handlers = { @@ -831,29 +891,38 @@ def add_handlers_from_types( if _logger_has_handler(log, ERSKafkaLogHandler) else None, HandlerType.Throttle if _logger_has_filter(log, ThrottleFilter) else None, + HandlerType.File if _logger_has_filter(log, logging.FileHandler) else None, } existing_handlers.discard(None) existing_handlers.update(existing_stream_handlers) - # print(f"{existing_handlers=}") - # print(f"{effective_handler_types=}") - - handlers_init_map = { - HandlerType.Rich: lambda: add_rich_handler(log, True, default_case), - HandlerType.Lstdout: lambda: add_stdout_handler(log, True, default_case), - HandlerType.Lstderr: lambda: add_stderr_handler(log, True, default_case), + handlers_init_map: dict[HandlerType, Callable[[], None]] = { + HandlerType.Rich: lambda: add_rich_handler( + log, use_parent_handlers, default_case + ), + HandlerType.Lstdout: lambda: add_stdout_handler( + log, use_parent_handlers, default_case + ), + HandlerType.Lstderr: lambda: add_stderr_handler( + log, use_parent_handlers, default_case + ), + HandlerType.File: lambda: add_file_handler( + log, use_parent_handlers, file_name, default_case + ), HandlerType.Protobufstream: lambda: add_ers_kafka_handler( - log, True, ers_session_name, default_case + log, use_parent_handlers, ers_session_name, default_case ), - HandlerType.Throttle: lambda: add_throttle_filter(log, {HandlerType.Rich}) + HandlerType.Throttle: lambda: add_throttle_filter(log, default_case), } + # Need to clean this bit up supported_handers = [ HandlerType.Rich, HandlerType.Lstdout, HandlerType.Lstderr, HandlerType.Protobufstream, HandlerType.Throttle, + HandlerType.File, ] for handler_type in supported_handers: @@ -864,7 +933,5 @@ def add_handlers_from_types( installer = handlers_init_map.get(handler_type) if installer is None: continue - print(handler_type) - installer() diff --git a/src/daqpytools/logging/logger.py b/src/daqpytools/logging/logger.py index 4b89186..4755de9 100644 --- a/src/daqpytools/logging/logger.py +++ b/src/daqpytools/logging/logger.py @@ -8,15 +8,9 @@ from daqpytools.logging.exceptions import LoggerSetupError from daqpytools.logging.handlers import ( + HandlerType, LogHandlerConf, add_handlers_from_types, - add_throttle_filter, - add_ers_kafka_handler, - add_file_handler, - add_rich_handler, - add_stderr_handler, - add_stdout_handler, - HandlerType ) from daqpytools.logging.levels import logging_log_level_to_int from daqpytools.logging.utils import get_width @@ -76,8 +70,6 @@ def get_daq_logger( stream_handlers: bool = False, ers_kafka_handler: str | None = None, throttle: bool = False, - - setup_ers_handlers: bool = False, ) -> logging.Logger: """C'tor for the default logging instances. @@ -89,8 +81,9 @@ def get_daq_logger( file_handler_path (str | None): Path to the file handler log file. If None, no file handler is added. stream_handlers (bool): Whether to add both stdout and stderr stream handlers. - ers_kafka_handler (str): Whether to add an ERS protobuf handler. str is session name - throttle (bool): Whether to add the throttle filter or not. Note, does not mean + ers_kafka_handler (str | None): ERS session name used to add an ERS + protobuf handler. If None, no ERS protobuf handler is added. + throttle (bool): Whether to add the throttle filter or not. Note, does not mean outputs are filtered by default! See ThrottleFilter for details. Returns: @@ -146,42 +139,26 @@ def get_daq_logger( logger.setLevel(log_level) logger.propagate = use_parent_handlers - #! Okay so before this bit, you capture all the handlers that you want to have - # That would now be the default base handlers - # You apss this in each of the requested handlers here.. - - - default_case = {HandlerType.Rich} - - # Add requested handlers - # if rich_handler: - # add_rich_handler(logger, use_parent_handlers) - # if file_handler_path: - # add_file_handler(logger, use_parent_handlers, file_handler_path) - # if stream_handlers: - # add_stdout_handler(logger, use_parent_handlers) - # add_stderr_handler(logger, use_parent_handlers) - # if ers_kafka_handler: - # add_ers_kafka_handler(logger, use_parent_handlers, ers_kafka_handler) - - # if throttle: - # # Note: Default parameters used. No functionality on customisability yet - # add_throttle_filter(logger) - - + default_case: set[HandlerType] = set() if rich_handler: - add_rich_handler(logger, use_parent_handlers, default_case) + default_case.add(HandlerType.Rich) if file_handler_path: - add_file_handler(logger, use_parent_handlers, file_handler_path, default_case) + default_case.add(HandlerType.File) if stream_handlers: - add_stdout_handler(logger, use_parent_handlers, default_case) - add_stderr_handler(logger, use_parent_handlers, default_case) + default_case.add(HandlerType.Stream) if ers_kafka_handler: - add_ers_kafka_handler(logger, use_parent_handlers, ers_kafka_handler, default_case) - + default_case.add(HandlerType.Protobufstream) if throttle: - # Note: Default parameters used. No functionality on customisability yet - add_throttle_filter(logger, default_case) + default_case.add(HandlerType.Throttle) + + add_handlers_from_types( + logger, + default_case, + use_parent_handlers, + file_handler_path, + ers_kafka_handler, + default_case, + ) @@ -197,19 +174,31 @@ def get_daq_logger( return logger -#! This will mean now that you need some function here that will allow you to go through all the handlers and all the filters and update that stupid self.default_case - -def setup_daq_ers_logger(logger, ers_session_name): - - # need to grab the list of relevant handlers that exist in ERS - #! This is very dependent on ERS env variables existing!!! - - all_handlers = {handler for handler_conf in LogHandlerConf._get_oks_conf().values() for handler in handler_conf.handlers} - - print(f"{all_handlers=}") - - add_handlers_from_types(logger, all_handlers, ers_session_name) +def setup_daq_ers_logger( + logger: logging.Logger, + ers_session_name: str, +) -> None: + """Configure logger handlers from ERS environment-derived configuration. + Args: + logger (logging.Logger): Logger to configure. + ers_session_name (str): ERS session name used for protobufstream handler. - # now what.. Well we have a list of handlers to add now huh.. + Returns: + None + """ + all_handlers = { + handler + for handler_conf in LogHandlerConf._get_oks_conf().values() + for handler in handler_conf.handlers + } + + add_handlers_from_types( + logger, + all_handlers, + use_parent_handlers=True, + file_name=None, + ers_session_name=ers_session_name, + default_case={HandlerType.Unknown}, + ) From 11a955ba2899ccf02ddeb100d9f427b2e7dad45a Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Thu, 19 Feb 2026 11:01:24 +0100 Subject: [PATCH 08/12] update logging demonstrator and namings --- src/daqpytools/apps/logging_demonstrator.py | 200 +++++++++++--------- src/daqpytools/logging/handlers.py | 147 +++++++------- src/daqpytools/logging/logger.py | 18 +- 3 files changed, 192 insertions(+), 173 deletions(-) diff --git a/src/daqpytools/apps/logging_demonstrator.py b/src/daqpytools/apps/logging_demonstrator.py index b87c615..15fd72f 100644 --- a/src/daqpytools/apps/logging_demonstrator.py +++ b/src/daqpytools/apps/logging_demonstrator.py @@ -10,9 +10,9 @@ from daqpytools.logging.handlers import ( HandlerType, LogHandlerConf, + add_stderr_handler, add_stdout_handler, ) - from daqpytools.logging.levels import logging_log_level_keys from daqpytools.logging.logger import get_daq_logger, setup_daq_ers_logger from daqpytools.logging.utils import get_width @@ -227,6 +227,65 @@ def test_handlerconf(main_logger: logging.Logger) -> None: extra=handlerconf.ERS ) + +def test_fallback_handlers(log_level: str) -> None: + """Demonstrate fallback handler behavior for a logger. + + Args: + log_level (str): Log level used to initialize the demo logger. + + Returns: + None + """ + fallback_log: logging.Logger = get_daq_logger( + logger_name="fallback_logger", + log_level=log_level, + stream_handlers=False, + rich_handler=True, + ) + + fallback_log.info("Rich Only") + + add_stdout_handler(fallback_log, True) + add_stderr_handler(fallback_log, True, {HandlerType.Unknown}) + + fallback_log.critical("Rich + stdout only") + fallback_log.critical( + "Rich + stdout + stderr", + extra={"handlers": [HandlerType.Rich, HandlerType.Stream]}, + ) + + +def test_ers_handler_configuration(log_level: str) -> None: + """Demonstrate ERS-driven handler configuration for a logger. + + Args: + log_level (str): Log level used to initialize the demo logger. + + Returns: + None + """ + # Injecting specific + os.environ["DUNEDAQ_ERS_WARNING"] = "lstdout" + os.environ["DUNEDAQ_ERS_INFO"] = "rich" + os.environ["DUNEDAQ_ERS_FATAL"] = "lstderr,rich" + os.environ["DUNEDAQ_ERS_ERROR"] = "rich" + + ers_logger: logging.Logger = get_daq_logger( + logger_name="ers_logger", + log_level=log_level, + stream_handlers=False, + rich_handler=False, + ) + # Sets up the logger with all the relevant handlers + setup_daq_ers_logger(ers_logger, "session_temp") + + ers_hc = LogHandlerConf(init_ers=True) + ers_logger.info("ERS Info rich ", extra=ers_hc.ERS) + ers_logger.warning("ERS error lstdout", extra=ers_hc.ERS) + ers_logger.critical("ERS critical lstderr + rich", extra=ers_hc.ERS) + + class AllOptionsCommand(click.Command): """Parse the arguments passed and validate they are acceptable, otherwise print the relevant options. @@ -296,11 +355,19 @@ def parse_args(self, ctx: click.Context, args: list[str]) -> None: ), ) @click.option( - "-e", + "-ep", "--ersprotobufstream", type=str, help=( - "Set up an ERS handler, and publish to ERS" + "Set up an ERS protobuf handler, and publish to ERS via protobuf." + ) + ) +@click.option( + "-eh", + "--ershandlers", + is_flag=True, + help=( + "Demonstrate automatic logger configuration with ers variables." ) ) @click.option( @@ -359,6 +426,14 @@ def parse_args(self, ctx: click.Context, args: list[str]) -> None: "logger handlers assigned to the given logger instance" ), ) +@click.option( + "-fh", + "--fallback-handlers", + is_flag=True, + help=( + "If true, demonstrates the use of fallback handlers." + ), +) def main( log_level: str, rich_handler: bool, @@ -370,7 +445,10 @@ def main( handlertypes:bool, handlerconf:bool, throttle: bool, - suppress_basic: bool + suppress_basic: bool, + fallback_handlers: bool, + ershandlers: bool, + ) -> None: """Demonstrate use of the daq_logging class with daqpyutils_logging_demonstrator. Note - if you are seeing output logs without any explicit handlers assigned, this is @@ -386,7 +464,8 @@ def main( disable_logger_inheritance (bool): If true, disable logger inheritance so each logger instance only uses the logger handlers assigned to the given logger instance. - ersprotobufstream (str): Sets up an ERS protobuf handler with supplied session name. Error msg + ersprotobufstream (str): Sets up an ERS protobuf handler with supplied + session name. Error msg are demonstrated in the HandlerType demonstration, requiring handlerconf to be set to true. The topic for these tests is session_tester. handlertypes (bool): If true, demonstrates the advanced feature of HandlerTypes. @@ -395,6 +474,8 @@ def main( throttle (bool): If true, demonstrates the throttling feature. Requires Rich. suppress_basic (bool): If true, supresses basic functionality. Useful to only test the advanced features of logging + fallback_handlers (bool): If true, demonstrates fallback handler behavior. + ershandlers (bool): If true, demonstrates ERS-based handler setup. Returns: None @@ -403,95 +484,40 @@ def main( LoggerSetupError: If no handlers are set up for the logger. """ logger_name = "daqpytools_logging_demonstrator" - - os.environ["DUNEDAQ_ERS_WARNING"] = "erstrace,throttle,lstderr" - os.environ["DUNEDAQ_ERS_INFO"] = "lstderr,throttle,lstderr" - os.environ["DUNEDAQ_ERS_FATAL"] = "lstderr" - os.environ["DUNEDAQ_ERS_ERROR"] = ( - "erstrace," - "throttle," - "lstderr," - "protobufstream(monkafka.cern.ch:30092)" - ) - - handlerconf = LogHandlerConf(init_ers=True) - main_logger: logging.Logger = get_daq_logger( logger_name=logger_name, log_level=log_level, - stream_handlers=False, - rich_handler=True # only rich was defined + use_parent_handlers=not disable_logger_inheritance, + rich_handler=rich_handler, + file_handler_path=file_handler_path, + stream_handlers=stream_handlers, + ers_kafka_handler=ersprotobufstream, + throttle=throttle ) - main_logger.warning("Only Rich") - - # add_stdout_handler(main_logger, True) - setup_daq_ers_logger(main_logger, "session_temp") - - main_logger.critical("Should be only rich") - - - # main_logger.critical("test") #use only rich because we only iniitlaise with rich - - main_logger.critical("Stream", extra={"handlers": [HandlerType.Stream]}) - - + if not suppress_basic: + test_main_functions(main_logger) - main_logger.critical("ERS (lstderr only)", extra=handlerconf.ERS) - - - - - - # define a default handlerconf so for example - - - """ - Concrete suggestions - - For now: - get_daq_logger = rich_handler = True # save rich_handler and set as base class - - setup_ers(log) #adds stream handler and what have you - - log.warning("something") # only goes to rich because we only initialise with rich - - log.warning("something else", extra= ers) # use whatever is in ers - - """ - - - # main_logger: logging.Logger = get_daq_logger( - # logger_name=logger_name, - # log_level=log_level, - # use_parent_handlers=not disable_logger_inheritance, - # rich_handler=rich_handler, - # file_handler_path=file_handler_path, - # stream_handlers=stream_handlers, - # ers_kafka_handler=ersprotobufstream, - # throttle=throttle - # ) - - # if not suppress_basic: - # test_main_functions(main_logger) - - # if child_logger: - # test_child_logger( - # logger_name, - # log_level, - # disable_logger_inheritance, - # rich_handler, - # file_handler_path, - # stream_handlers - # ) - - # if throttle: - # test_throttle(main_logger) - # if handlertypes: - # test_handlertypes(main_logger) - # if handlerconf: - # test_handlerconf(main_logger) + if child_logger: + test_child_logger( + logger_name, + log_level, + disable_logger_inheritance, + rich_handler, + file_handler_path, + stream_handlers + ) + if throttle: + test_throttle(main_logger) + if handlertypes: + test_handlertypes(main_logger) + if handlerconf: + test_handlerconf(main_logger) + if fallback_handlers: + test_fallback_handlers(log_level) + if ershandlers: + test_ers_handler_configuration(log_level) if __name__ == "__main__": main() diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index 93a7006..dfd3bdf 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -337,16 +337,15 @@ def reset(self) -> None: self.suppressed_counter: int = 0 self.last_occurrence_formatted: str = "" - class BaseHandlerFilter(logging.Filter): """Base filter that hold the logic on choosing if a handler should emit based on what HandlersTypes are supplied to it. """ - def __init__(self, default_case: set[HandlerType] | None = None) -> None: + def __init__(self, fallback_handlers: set[HandlerType] | None = None) -> None: """C'tor.""" - if default_case is None: - default_case = set(LogHandlerConf.get_base()) - self.default_case = default_case + if fallback_handlers is None: + fallback_handlers = set(LogHandlerConf.get_base()) + self.fallback_handlers = fallback_handlers super().__init__() def get_allowed(self, record: logging.LogRecord) -> list | None: @@ -371,7 +370,7 @@ def get_allowed(self, record: logging.LogRecord) -> list | None: # Handle the non-ERS case else: - allowed = getattr(record, "handlers", self.default_case) + allowed = getattr(record, "handlers", self.fallback_handlers) return allowed class HandleIDFilter(BaseHandlerFilter): @@ -382,13 +381,13 @@ class HandleIDFilter(BaseHandlerFilter): def __init__( self, handler_id: HandlerType | list[HandlerType], - default_case: set[HandlerType] | None = None, + fallback_handlers: set[HandlerType] | None = None, ) -> None: """Initialises HandleIDFilter with the handler_id, to identify what kind of handler this filter is. """ super().__init__( - default_case = default_case + fallback_handlers = fallback_handlers ) # Normalise handler_id to be a set @@ -428,13 +427,13 @@ class ThrottleFilter(BaseHandlerFilter): def __init__( self, - default_case: set[HandlerType] | None = None, + fallback_handlers: set[HandlerType] | None = None, initial_threshold: int = 30, time_limit: int = 30, ) -> None: """C'tor.""" super().__init__( - default_case = default_case + fallback_handlers = fallback_handlers ) self.initial_threshold = initial_threshold self.time_limit = time_limit @@ -561,27 +560,7 @@ def _format_timestamp(timestamp: float) -> str: time_str: str = dt.strftime(DATE_TIME_BASE_FORMAT).ljust(padding)[:padding] return Text(time_str, style="logging.time") - -def add_throttle_filter( - log: logging.Logger, - default_case: set[HandlerType] | None = None, -) -> None: - """Add the Throttle filter to the logger. - - Args: - log (logging.Logger): Logger to add the rich handler to. - default_case (set[HandlerType] | None): Default handler set used when - records do not explicitly include handler routing. - - Returns: - None - """ - if default_case is None: - default_case = {HandlerType.Throttle} - log.addFilter(ThrottleFilter(default_case=default_case)) - return - -def _logger_has_handler( +def logger_has_handler( log: logging.Logger, handler_type: type[logging.Handler], target_stream: io.IOBase | None = None, @@ -603,6 +582,10 @@ def _logger_has_handler( ] return any(type_matches + stream_matches) +def logger_has_filter(log: logging.Logger, filter_type: type[logging.Filter]) -> bool: + """Check if logger already has a matching filter type.""" + return any(isinstance(logger_filter, filter_type) for logger_filter in log.filters) + def check_parent_handlers( log: logging.Logger, use_parent_handlers: bool, @@ -650,24 +633,42 @@ def check_parent_handlers( logger_parent = log.parent this_is_root_logger = logger_parent.name == python_root_logger_name while not this_is_root_logger: - if _logger_has_handler(logger_parent,handler_type, target_stream): + if logger_has_handler(logger_parent,handler_type, target_stream): raise LoggerHandlerError(logger_parent.name, handler_type) logger_parent = logger_parent.parent this_is_root_logger = logger_parent.name == python_root_logger_name return +def add_throttle_filter( + log: logging.Logger, + fallback_handlers: set[HandlerType] | None = None, +) -> None: + """Add the Throttle filter to the logger. + + Args: + log (logging.Logger): Logger to add the rich handler to. + fallback_handlers (set[HandlerType] | None): Default handler set used when + records do not explicitly include handler routing. + + Returns: + None + """ + if fallback_handlers is None: + fallback_handlers = {HandlerType.Throttle} + log.addFilter(ThrottleFilter(fallback_handlers=fallback_handlers)) + return def add_rich_handler( log: logging.Logger, use_parent_handlers: bool, - default_case: set[HandlerType] | None = None, + fallback_handlers: set[HandlerType] | None = None, ) -> None: """Add a rich handler to the logger. Args: log (logging.Logger): Logger to add the rich handler to. use_parent_handlers (bool): Whether to check parent handlers. - default_case (set[HandlerType] | None): Default handler set used when + fallback_handlers (set[HandlerType] | None): Default handler set used when records do not explicitly include handler routing. Returns: @@ -676,8 +677,8 @@ def add_rich_handler( Raises: LoggerHandlerError: If a parent logger has a rich handler. """ - if default_case is None: - default_case = {HandlerType.Rich} + if fallback_handlers is None: + fallback_handlers = {HandlerType.Rich} check_parent_handlers(log, use_parent_handlers, FormattedRichHandler) width: int = get_width() handler: RichHandler = FormattedRichHandler(width=width) @@ -685,7 +686,7 @@ def add_rich_handler( handler.addFilter( HandleIDFilter( handler_id=HandlerType.Rich, - default_case=default_case + fallback_handlers=fallback_handlers ) ) log.addHandler(handler) @@ -695,14 +696,14 @@ def add_ers_kafka_handler( log: logging.Logger, use_parent_handlers: bool, session_name: str, - default_case: set[HandlerType] | None = None, + fallback_handlers: set[HandlerType] | None = None, topic: str = "ers_stream", address: str = "monkafka.cern.ch:30092", ) -> None: # TODO/future: topic and address are new, propagate to all relevant implementation """Add an ers protobuf handler to the root logger.""" - if default_case is None: - default_case = {HandlerType.Protobufstream} + if fallback_handlers is None: + fallback_handlers = {HandlerType.Protobufstream} check_parent_handlers(log, use_parent_handlers, ERSKafkaLogHandler) handler: ERSKafkaLogHandler = ERSKafkaLogHandler(session=session_name, kafka_address = address, @@ -712,7 +713,7 @@ def add_ers_kafka_handler( handler.addFilter( HandleIDFilter( handler_id=HandlerType.Protobufstream, - default_case=default_case + fallback_handlers=fallback_handlers ) ) log.addHandler(handler) @@ -720,14 +721,14 @@ def add_ers_kafka_handler( def add_stdout_handler( log: logging.Logger, use_parent_handlers: bool, - default_case: set[HandlerType] | None = None, + fallback_handlers: set[HandlerType] | None = None, ) -> None: """Add a stdout handler to the logger. Args: log (logging.Logger): Logger to add the stdout handler to. use_parent_handlers (bool): Whether to check parent handlers. - default_case (set[HandlerType] | None): Default handler set used when + fallback_handlers (set[HandlerType] | None): Default handler set used when records do not explicitly include handler routing. Returns: @@ -736,8 +737,8 @@ def add_stdout_handler( Raises: LoggerHandlerError: If a parent logger has a stdout handler. """ - if default_case is None: - default_case = {HandlerType.Stream, HandlerType.Lstdout} + if fallback_handlers is None: + fallback_handlers = {HandlerType.Stream, HandlerType.Lstdout} check_parent_handlers( log, use_parent_handlers, @@ -750,7 +751,7 @@ def add_stdout_handler( stdout_handler.addFilter( HandleIDFilter( handler_id=[HandlerType.Stream, HandlerType.Lstdout], - default_case=default_case + fallback_handlers=fallback_handlers ) ) log.addHandler(stdout_handler) @@ -759,7 +760,7 @@ def add_stdout_handler( def add_stderr_handler( log: logging.Logger, use_parent_handlers: bool, - default_case: set[HandlerType] | None = None, + fallback_handlers: set[HandlerType] | None = None, ) -> None: """Add a stderr handler to the logger. @@ -770,7 +771,7 @@ def add_stderr_handler( Args: log (logging.Logger): Logger to add the stderr handler to. use_parent_handlers (bool): Whether to check parent handlers. - default_case (set[HandlerType] | None): Default handler set used when + fallback_handlers (set[HandlerType] | None): Default handler set used when records do not explicitly include handler routing. Returns: @@ -779,8 +780,8 @@ def add_stderr_handler( Raises: LoggerHandlerError: If a parent logger has a stderr handler. """ - if default_case is None: - default_case = {HandlerType.Lstderr, HandlerType.Stream} + if fallback_handlers is None: + fallback_handlers = {HandlerType.Lstderr, HandlerType.Stream} check_parent_handlers( log, use_parent_handlers, @@ -792,19 +793,18 @@ def add_stderr_handler( stderr_handler.addFilter( HandleIDFilter( handler_id=[HandlerType.Stream, HandlerType.Lstderr], - default_case=default_case + fallback_handlers=fallback_handlers ) ) stderr_handler.setLevel(logging.ERROR) log.addHandler(stderr_handler) return - def add_file_handler( log: logging.Logger, use_parent_handlers: bool, path: str, - default_case: set[HandlerType] | None = None, + fallback_handlers: set[HandlerType] | None = None, ) -> None: """Add a file handler to the root logger. @@ -812,7 +812,7 @@ def add_file_handler( log (logging.Logger): Logger to add the file handler to. use_parent_handlers (bool): Whether to check parent handlers. path (str): Path to the log file. - default_case (set[HandlerType] | None): Default handler set used when + fallback_handlers (set[HandlerType] | None): Default handler set used when records do not explicitly include handler routing. Returns: @@ -821,33 +821,27 @@ def add_file_handler( Raises: LoggerHandlerError: If a parent logger has a file handler. """ - if default_case is None: - default_case = {HandlerType.File} + if fallback_handlers is None: + fallback_handlers = {HandlerType.File} check_parent_handlers(log, use_parent_handlers, logging.FileHandler) file_handler = logging.FileHandler(filename=path) file_handler.setFormatter(LoggingFormatter()) file_handler.addFilter( HandleIDFilter( handler_id=HandlerType.File, - default_case=default_case + fallback_handlers=fallback_handlers ) ) log.addHandler(file_handler) return - -def _logger_has_filter(log: logging.Logger, filter_type: type[logging.Filter]) -> bool: - """Check if logger already has a matching filter type.""" - return any(isinstance(logger_filter, filter_type) for logger_filter in log.filters) - - def add_handlers_from_types( log: logging.Logger, handler_types: set[HandlerType], use_parent_handlers: bool, file_name: str | None, ers_session_name: str | None, - default_case: set[HandlerType], + fallback_handlers: set[HandlerType], ) -> None: """Add handlers to a logger based on a set of HandlerType values. @@ -872,12 +866,12 @@ def add_handlers_from_types( # Check if current logger has stream handlers, convert to handlertypes existing_stream_handlers = { HandlerType.Lstdout - if _logger_has_handler( + if logger_has_handler( log, logging.StreamHandler, target_stream=cast(io.IOBase, sys.stdout) ) else None, HandlerType.Lstderr - if _logger_has_handler( + if logger_has_handler( log, logging.StreamHandler, target_stream=cast(io.IOBase, sys.stderr) ) else None, @@ -886,33 +880,33 @@ def add_handlers_from_types( # Check if current logger has the interested handler existing_handlers = { - HandlerType.Rich if _logger_has_handler(log, FormattedRichHandler) else None, + HandlerType.Rich if logger_has_handler(log, FormattedRichHandler) else None, HandlerType.Protobufstream - if _logger_has_handler(log, ERSKafkaLogHandler) + if logger_has_handler(log, ERSKafkaLogHandler) else None, - HandlerType.Throttle if _logger_has_filter(log, ThrottleFilter) else None, - HandlerType.File if _logger_has_filter(log, logging.FileHandler) else None, + HandlerType.Throttle if logger_has_filter(log, ThrottleFilter) else None, + HandlerType.File if logger_has_filter(log, logging.FileHandler) else None, } existing_handlers.discard(None) existing_handlers.update(existing_stream_handlers) handlers_init_map: dict[HandlerType, Callable[[], None]] = { HandlerType.Rich: lambda: add_rich_handler( - log, use_parent_handlers, default_case + log, use_parent_handlers, fallback_handlers ), HandlerType.Lstdout: lambda: add_stdout_handler( - log, use_parent_handlers, default_case + log, use_parent_handlers, fallback_handlers ), HandlerType.Lstderr: lambda: add_stderr_handler( - log, use_parent_handlers, default_case + log, use_parent_handlers, fallback_handlers ), HandlerType.File: lambda: add_file_handler( - log, use_parent_handlers, file_name, default_case + log, use_parent_handlers, file_name, fallback_handlers ), HandlerType.Protobufstream: lambda: add_ers_kafka_handler( - log, use_parent_handlers, ers_session_name, default_case + log, use_parent_handlers, ers_session_name, fallback_handlers ), - HandlerType.Throttle: lambda: add_throttle_filter(log, default_case), + HandlerType.Throttle: lambda: add_throttle_filter(log, fallback_handlers), } # Need to clean this bit up @@ -934,4 +928,3 @@ def add_handlers_from_types( if installer is None: continue installer() - diff --git a/src/daqpytools/logging/logger.py b/src/daqpytools/logging/logger.py index 4755de9..e298c7a 100644 --- a/src/daqpytools/logging/logger.py +++ b/src/daqpytools/logging/logger.py @@ -139,25 +139,25 @@ def get_daq_logger( logger.setLevel(log_level) logger.propagate = use_parent_handlers - default_case: set[HandlerType] = set() + fallback_handlers: set[HandlerType] = set() if rich_handler: - default_case.add(HandlerType.Rich) + fallback_handlers.add(HandlerType.Rich) if file_handler_path: - default_case.add(HandlerType.File) + fallback_handlers.add(HandlerType.File) if stream_handlers: - default_case.add(HandlerType.Stream) + fallback_handlers.add(HandlerType.Stream) if ers_kafka_handler: - default_case.add(HandlerType.Protobufstream) + fallback_handlers.add(HandlerType.Protobufstream) if throttle: - default_case.add(HandlerType.Throttle) + fallback_handlers.add(HandlerType.Throttle) add_handlers_from_types( logger, - default_case, + fallback_handlers, use_parent_handlers, file_handler_path, ers_kafka_handler, - default_case, + fallback_handlers, ) @@ -199,6 +199,6 @@ def setup_daq_ers_logger( use_parent_handlers=True, file_name=None, ers_session_name=ers_session_name, - default_case={HandlerType.Unknown}, + fallback_handlers={HandlerType.Unknown}, ) From 67123b98ce542633c5854a747477e773e8512d1e Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Thu, 19 Feb 2026 11:09:58 +0100 Subject: [PATCH 09/12] Improve demo to show current setup isnt changed when ers --- src/daqpytools/apps/logging_demonstrator.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/daqpytools/apps/logging_demonstrator.py b/src/daqpytools/apps/logging_demonstrator.py index 15fd72f..4d9e6a1 100644 --- a/src/daqpytools/apps/logging_demonstrator.py +++ b/src/daqpytools/apps/logging_demonstrator.py @@ -275,11 +275,14 @@ def test_ers_handler_configuration(log_level: str) -> None: logger_name="ers_logger", log_level=log_level, stream_handlers=False, - rich_handler=False, + rich_handler=True, ) + ers_logger.info("Just rich is added") + # Sets up the logger with all the relevant handlers setup_daq_ers_logger(ers_logger, "session_temp") - + ers_logger.info("ERS configured, but should still only be rich") + ers_hc = LogHandlerConf(init_ers=True) ers_logger.info("ERS Info rich ", extra=ers_hc.ERS) ers_logger.warning("ERS error lstdout", extra=ers_hc.ERS) From 771c79815fc9683d33c7540ab93faeca79cff277 Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Thu, 19 Feb 2026 12:27:12 +0100 Subject: [PATCH 10/12] refactor handler checking; install code --- src/daqpytools/logging/handlers.py | 151 ++++++++++++++++++----------- 1 file changed, 94 insertions(+), 57 deletions(-) diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index dfd3bdf..0ac2617 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -586,7 +586,7 @@ def logger_has_filter(log: logging.Logger, filter_type: type[logging.Filter]) -> """Check if logger already has a matching filter type.""" return any(isinstance(logger_filter, filter_type) for logger_filter in log.filters) -def check_parent_handlers( +def ancestors_have_handlers( log: logging.Logger, use_parent_handlers: bool, handler_type: type[logging.Handler], @@ -609,7 +609,7 @@ def check_parent_handlers( """ # Sanity check if not use_parent_handlers: - return + return False # Check that we are not using the true logging root logger python_root_logger_name = logging.getLogger().name @@ -634,10 +634,33 @@ def check_parent_handlers( this_is_root_logger = logger_parent.name == python_root_logger_name while not this_is_root_logger: if logger_has_handler(logger_parent,handler_type, target_stream): - raise LoggerHandlerError(logger_parent.name, handler_type) + return True logger_parent = logger_parent.parent this_is_root_logger = logger_parent.name == python_root_logger_name - return + return False + + +def check_parent_handlers( + log: logging.Logger, + use_parent_handlers: bool, + handler_type: type[logging.Handler], + target_stream: io.IOBase | None = None, +) -> None: + """Raise when a matching handler already exists on an ancestor logger.""" + if ancestors_have_handlers(log, use_parent_handlers, handler_type, target_stream): + raise LoggerHandlerError(log.name, handler_type) + + +def logger_or_ancestors_have_handler( + log: logging.Logger, + use_parent_handlers: bool, + handler_type: type[logging.Handler], + target_stream: io.IOBase | None = None, +) -> bool: + """Check if logger or (optionally) its ancestors have a matching handler.""" + return logger_has_handler( + log, handler_type, target_stream + ) or ancestors_have_handlers(log, use_parent_handlers, handler_type, target_stream) def add_throttle_filter( log: logging.Logger, @@ -863,68 +886,82 @@ def add_handlers_from_types( if HandlerType.Stream in effective_handler_types: effective_handler_types.update({HandlerType.Lstdout, HandlerType.Lstderr}) - # Check if current logger has stream handlers, convert to handlertypes - existing_stream_handlers = { - HandlerType.Lstdout - if logger_has_handler( - log, logging.StreamHandler, target_stream=cast(io.IOBase, sys.stdout) - ) - else None, - HandlerType.Lstderr - if logger_has_handler( - log, logging.StreamHandler, target_stream=cast(io.IOBase, sys.stderr) - ) - else None, - } - existing_stream_handlers.discard(None) - - # Check if current logger has the interested handler - existing_handlers = { - HandlerType.Rich if logger_has_handler(log, FormattedRichHandler) else None, - HandlerType.Protobufstream - if logger_has_handler(log, ERSKafkaLogHandler) - else None, - HandlerType.Throttle if logger_has_filter(log, ThrottleFilter) else None, - HandlerType.File if logger_has_filter(log, logging.FileHandler) else None, - } - existing_handlers.discard(None) - existing_handlers.update(existing_stream_handlers) - - handlers_init_map: dict[HandlerType, Callable[[], None]] = { - HandlerType.Rich: lambda: add_rich_handler( - log, use_parent_handlers, fallback_handlers + # Generate handler configurations based on arguments for auto install + handler_configs: dict[ + HandlerType, + tuple[ + type[logging.Handler] | None, # Handler as seen by Python's Logger + io.IOBase | None, # Used for streamhandling + type[logging.Filter] | None, # For filters attached to loggers + Callable[[], None], # Installer code + ], + ] = { + HandlerType.Rich: ( + FormattedRichHandler, + None, + None, + lambda: add_rich_handler(log, use_parent_handlers, fallback_handlers), ), - HandlerType.Lstdout: lambda: add_stdout_handler( - log, use_parent_handlers, fallback_handlers + HandlerType.Lstdout: ( + logging.StreamHandler, + cast(io.IOBase, sys.stdout), + None, + lambda: add_stdout_handler(log, use_parent_handlers, fallback_handlers), ), - HandlerType.Lstderr: lambda: add_stderr_handler( - log, use_parent_handlers, fallback_handlers + HandlerType.Lstderr: ( + logging.StreamHandler, + cast(io.IOBase, sys.stderr), + None, + lambda: add_stderr_handler(log, use_parent_handlers, fallback_handlers), ), - HandlerType.File: lambda: add_file_handler( - log, use_parent_handlers, file_name, fallback_handlers + HandlerType.Protobufstream: ( + ERSKafkaLogHandler, + None, + None, + lambda: add_ers_kafka_handler( + log, use_parent_handlers, ers_session_name, fallback_handlers + ), ), - HandlerType.Protobufstream: lambda: add_ers_kafka_handler( - log, use_parent_handlers, ers_session_name, fallback_handlers + HandlerType.Throttle: ( + None, + None, + ThrottleFilter, + lambda: add_throttle_filter(log, fallback_handlers), + ), + HandlerType.File: ( + logging.FileHandler, + None, + None, + lambda: add_file_handler( + log, use_parent_handlers, file_name, fallback_handlers + ), ), - HandlerType.Throttle: lambda: add_throttle_filter(log, fallback_handlers), } - # Need to clean this bit up - supported_handers = [ - HandlerType.Rich, - HandlerType.Lstdout, - HandlerType.Lstderr, - HandlerType.Protobufstream, - HandlerType.Throttle, - HandlerType.File, - ] - for handler_type in supported_handers: + for handler_type, ( + handler_class, + target_stream, + filter_type, + installer, + ) in handler_configs.items(): + + # Skips if it encounters an unrequested handler if handler_type not in effective_handler_types: continue - if handler_type in existing_handlers: - continue - installer = handlers_init_map.get(handler_type) - if installer is None: + + # Skips if handler/filter exists in either the logger or any of its ancestors + handler_exists = ( + handler_class is not None + and logger_or_ancestors_have_handler( + log, + use_parent_handlers, + handler_class, + target_stream=target_stream, + ) + ) or (filter_type is not None and logger_has_filter(log, filter_type)) + + if handler_exists: continue + installer() From 3d74cfe62e7bf2cccbb515dd617da41038b86efb Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Fri, 20 Feb 2026 09:08:43 +0100 Subject: [PATCH 11/12] Remove ERSkafka by default.. --- src/daqpytools/logging/handlers.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index 0ac2617..1cf9bb3 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -919,7 +919,8 @@ def add_handlers_from_types( None, None, lambda: add_ers_kafka_handler( - log, use_parent_handlers, ers_session_name, fallback_handlers + log, use_parent_handlers, ers_session_name, {HandlerType.Unknown} + # WE DONT WANT TO TRANSMIT BY DEFAULT ), ), HandlerType.Throttle: ( From 79232d0fca574a37b09c9844c4bd283fcd08cacb Mon Sep 17 00:00:00 2001 From: Emir Muhammad Date: Mon, 23 Feb 2026 17:48:58 +0100 Subject: [PATCH 12/12] Fix against mock loggers --- src/daqpytools/logging/handlers.py | 24 +++++++++++++++++++++--- tests/logging/test_logger.py | 16 ++++++++++++++++ 2 files changed, 37 insertions(+), 3 deletions(-) diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index 1cf9bb3..09b1547 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -569,6 +569,10 @@ def logger_has_handler( For StreamHandler, ``target_stream`` can be used to distinguish stdout/stderr. """ + # Catches cases when a MockLogger is used in pytest + if not isinstance(log, logging.Logger): + return False + type_matches = [ isinstance(handler, handler_type) for handler in log.handlers @@ -584,6 +588,9 @@ def logger_has_handler( def logger_has_filter(log: logging.Logger, filter_type: type[logging.Filter]) -> bool: """Check if logger already has a matching filter type.""" + if not isinstance(log, logging.Logger): + return False + return any(isinstance(logger_filter, filter_type) for logger_filter in log.filters) def ancestors_have_handlers( @@ -611,6 +618,9 @@ def ancestors_have_handlers( if not use_parent_handlers: return False + if not isinstance(log, logging.Logger): + return False + # Check that we are not using the true logging root logger python_root_logger_name = logging.getLogger().name if log.name == python_root_logger_name: @@ -631,12 +641,20 @@ def ancestors_have_handlers( raise ValueError(err_msg) logger_parent = log.parent - this_is_root_logger = logger_parent.name == python_root_logger_name - while not this_is_root_logger: + visited_logger_ids: set[int] = set() + + while isinstance(logger_parent, logging.Logger): + logger_id = id(logger_parent) + if logger_id in visited_logger_ids: + return False # Prevents infinite loop + visited_logger_ids.add(logger_id) + + if logger_parent.name == python_root_logger_name: + break + if logger_has_handler(logger_parent,handler_type, target_stream): return True logger_parent = logger_parent.parent - this_is_root_logger = logger_parent.name == python_root_logger_name return False diff --git a/tests/logging/test_logger.py b/tests/logging/test_logger.py index 684ef35..ce7db56 100644 --- a/tests/logging/test_logger.py +++ b/tests/logging/test_logger.py @@ -1,9 +1,11 @@ import logging import tempfile +from unittest.mock import MagicMock import pytest from daqpytools.logging.exceptions import LoggerSetupError +from daqpytools.logging.handlers import logger_or_ancestors_have_handler from daqpytools.logging.logger import get_daq_logger, setup_root_logger test_logger_name = "test_logger" @@ -209,3 +211,17 @@ def test_get_daq_logger(caplog: pytest.LogCaptureFixture): # Shutdown logging to reset any internal state logging.shutdown() + + +def test_logger_parent_walk_handles_mock_logger_cycle(): + """Ensure parent traversal does not hang on mock logger-like objects.""" + fake_logger = MagicMock() + fake_parent = MagicMock() + + fake_logger.parent = fake_parent + fake_parent.parent = fake_parent + + assert ( + logger_or_ancestors_have_handler(fake_logger, True, logging.NullHandler) + is False + )