Skip to content

Commit 8b8bc8f

Browse files
Merge pull request #181 from amd/alex_no_console_log
No console logging: --no-console-log
2 parents e561b83 + b3e8052 commit 8b8bc8f

7 files changed

Lines changed: 343 additions & 44 deletions

File tree

nodescraper/cli/cli.py

Lines changed: 51 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -161,6 +161,14 @@ def build_parser(
161161
help="Change python log level",
162162
)
163163

164+
parser.add_argument(
165+
"--no-console-log",
166+
action="store_true",
167+
help="Write logs only to nodescraper.log under the run directory; do not print to stdout. "
168+
"If no run log directory would be created (e.g. --log-path None), uses ./scraper_logs_<host>_<timestamp>/ "
169+
"like the default layout.",
170+
)
171+
164172
parser.add_argument(
165173
"--gen-reference-config",
166174
dest="reference_config",
@@ -316,42 +324,55 @@ def build_parser(
316324
parser_builder = DynamicParserBuilder(plugin_subparser, plugin_class)
317325
model_type_map = parser_builder.build_plugin_parser()
318326
except Exception as e:
319-
print(f"Exception building arg parsers for {plugin_name}: {str(e)}") # noqa: T201
327+
logging.getLogger(DEFAULT_LOGGER).error(
328+
"Exception building arg parsers for %s: %s", plugin_name, e, exc_info=True
329+
)
320330
continue
321331
plugin_subparser_map[plugin_name] = (plugin_subparser, model_type_map)
322332

323333
return parser, plugin_subparser_map
324334

325335

326-
def setup_logger(log_level: str = "INFO", log_path: Optional[str] = None) -> logging.Logger:
336+
def setup_logger(
337+
log_level: str = "INFO",
338+
log_path: Optional[str] = None,
339+
*,
340+
console: bool = True,
341+
) -> logging.Logger:
327342
"""set up root logger when using the CLI
328343
329344
Args:
330345
log_level (str): log level to use
331346
log_path (Optional[str]): optional path to filesystem log location
347+
console (bool): if False, omit the stdout StreamHandler (file-only when log_path is set)
332348
333349
Returns:
334350
logging.Logger: logger intstance
335351
"""
336-
log_level = getattr(logging, log_level, "INFO")
352+
log_level_no = getattr(logging, log_level, logging.INFO)
337353

338-
handlers = [logging.StreamHandler(stream=sys.stdout)]
354+
handlers: list[logging.Handler] = []
355+
if console:
356+
handlers.append(logging.StreamHandler(stream=sys.stdout))
339357

340358
if log_path:
341359
log_file_name = os.path.join(log_path, "nodescraper.log")
342360
handlers.append(
343361
logging.FileHandler(filename=log_file_name, mode="wt", encoding="utf-8"),
344362
)
345363

364+
if not handlers:
365+
handlers.append(logging.NullHandler())
366+
346367
logging.basicConfig(
347368
force=True,
348-
level=log_level,
369+
level=log_level_no,
349370
format="%(asctime)25s %(levelname)10s %(name)25s | %(message)s",
350371
datefmt="%Y-%m-%d %H:%M:%S %Z",
351372
handlers=handlers,
352373
encoding="utf-8",
353374
)
354-
logging.root.setLevel(logging.INFO)
375+
logging.root.setLevel(log_level_no)
355376
logging.getLogger("paramiko").setLevel(logging.ERROR)
356377

357378
logger = logging.getLogger(DEFAULT_LOGGER)
@@ -391,11 +412,7 @@ def main(arg_input: Optional[list[str]] = None):
391412
sname = system_info.name.lower().replace("-", "_").replace(".", "_")
392413
timestamp = datetime.datetime.now().strftime("%Y_%m_%d-%I_%M_%S_%p")
393414

394-
if parsed_args.log_path and parsed_args.subcmd not in [
395-
"gen-plugin-config",
396-
"describe",
397-
"compare-runs",
398-
]:
415+
if parsed_args.log_path:
399416
log_path = os.path.join(
400417
parsed_args.log_path,
401418
f"scraper_logs_{sname}_{timestamp}",
@@ -404,7 +421,16 @@ def main(arg_input: Optional[list[str]] = None):
404421
else:
405422
log_path = None
406423

407-
logger = setup_logger(parsed_args.log_level, log_path)
424+
if parsed_args.no_console_log and not log_path:
425+
base_dir = parsed_args.log_path if parsed_args.log_path else "."
426+
log_path = os.path.join(base_dir, f"scraper_logs_{sname}_{timestamp}")
427+
os.makedirs(log_path, exist_ok=True)
428+
429+
logger = setup_logger(
430+
parsed_args.log_level,
431+
log_path,
432+
console=not parsed_args.no_console_log,
433+
)
408434
if log_path:
409435
logger.info("Log path: %s", log_path)
410436

@@ -416,7 +442,12 @@ def main(arg_input: Optional[list[str]] = None):
416442
)
417443

418444
if parsed_args.subcmd == "summary":
419-
generate_summary(parsed_args.search_path, parsed_args.output_path, logger)
445+
generate_summary(
446+
parsed_args.search_path,
447+
parsed_args.output_path,
448+
logger,
449+
artifact_dir=log_path,
450+
)
420451
sys.exit(0)
421452

422453
if parsed_args.subcmd == "describe":
@@ -431,6 +462,7 @@ def main(arg_input: Optional[list[str]] = None):
431462
skip_plugins=getattr(parsed_args, "skip_plugins", None) or [],
432463
include_plugins=getattr(parsed_args, "include_plugins", None),
433464
truncate_message=not getattr(parsed_args, "dont_truncate", False),
465+
artifact_dir=log_path,
434466
)
435467
sys.exit(0)
436468

@@ -463,7 +495,7 @@ def main(arg_input: Optional[list[str]] = None):
463495
"Could not read OEMDiagnosticDataType@Redfish.AllowableValues from LogService"
464496
)
465497
sys.exit(1)
466-
print(json.dumps(allowable, indent=2)) # noqa: T201
498+
logger.info("%s", json.dumps(allowable, indent=2))
467499
finally:
468500
conn.close()
469501
sys.exit(0)
@@ -474,10 +506,8 @@ def main(arg_input: Optional[list[str]] = None):
474506
ref_config = generate_reference_config_from_logs(
475507
parsed_args.reference_config_from_logs, plugin_reg, logger
476508
)
477-
output_path = os.getcwd()
478-
if parsed_args.output_path:
479-
output_path = parsed_args.output_path
480-
path = os.path.join(output_path, "reference_config.json")
509+
out_dir = log_path if log_path else parsed_args.output_path
510+
path = os.path.join(out_dir, "reference_config.json")
481511
try:
482512
with open(path, "w") as f:
483513
json.dump(
@@ -490,7 +520,9 @@ def main(arg_input: Optional[list[str]] = None):
490520
logger.error(exp)
491521
sys.exit(0)
492522

493-
parse_gen_plugin_config(parsed_args, plugin_reg, config_reg, logger)
523+
parse_gen_plugin_config(
524+
parsed_args, plugin_reg, config_reg, logger, artifact_dir=log_path
525+
)
494526

495527
parsed_plugin_args = {}
496528
for plugin, plugin_args in plugin_arg_map.items():

nodescraper/cli/compare_runs.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
###############################################################################
2626
import json
2727
import logging
28+
import os
2829
import re
2930
import sys
3031
from pathlib import Path
@@ -359,6 +360,7 @@ def run_compare_runs(
359360
include_plugins: Optional[Sequence[str]] = None,
360361
output_path: Optional[str] = None,
361362
truncate_message: bool = True,
363+
artifact_dir: Optional[str] = None,
362364
) -> None:
363365
"""Compare datamodels from two run log directories and log results.
364366
@@ -369,8 +371,10 @@ def run_compare_runs(
369371
logger: Logger for output.
370372
skip_plugins: Optional list of plugin names to exclude from comparison.
371373
include_plugins: Optional list of plugin names to include; if set, only these are compared.
372-
output_path: Optional path for full diff report; default is <path1>_<path2>_diff.txt.
374+
output_path: Optional path for full diff report; default is <path1>_<path2>_diff.txt
375+
in the current directory, or under artifact_dir when set.
373376
truncate_message: If True, truncate message text and show only first 3 errors; if False, show full text and all.
377+
artifact_dir: When set and output_path is not, write the diff file inside this directory (e.g. CLI run log dir).
374378
"""
375379
p1 = Path(path1)
376380
p2 = Path(path2)
@@ -482,11 +486,11 @@ def run_compare_runs(
482486

483487
out_file = output_path
484488
if not out_file:
485-
out_file = f"{Path(path1).name}_{Path(path2).name}_diff.txt"
489+
basename = f"{Path(path1).name}_{Path(path2).name}_diff.txt"
490+
out_file = os.path.join(artifact_dir, basename) if artifact_dir else basename
486491
full_report = _build_full_diff_report(path1, path2, data1, data2, all_plugins)
487492
Path(out_file).write_text(full_report, encoding="utf-8")
488493
logger.info("Full diff report written to: %s", out_file)
489494

490495
table_summary = TableSummary(logger=logger)
491496
table_summary.collate_results(plugin_results=plugin_results, connection_results=[])
492-
print(f"Diff file written to {out_file}") # noqa: T201

nodescraper/cli/helper.py

Lines changed: 43 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
import os
3232
import sys
3333
from pathlib import Path
34-
from typing import Optional, Tuple
34+
from typing import Optional, Sequence, Tuple
3535

3636
from pydantic import BaseModel
3737

@@ -187,6 +187,13 @@ def build_config(
187187
return config
188188

189189

190+
def log_cli_text_block(logger: logging.Logger, lines: Sequence[str]) -> None:
191+
"""Emit user-facing multi-line text through logging (respects handlers / --no-console-log)."""
192+
text = "\n".join(lines).rstrip("\n")
193+
if text:
194+
logger.info("%s", text)
195+
196+
190197
def parse_describe(
191198
parsed_args: argparse.Namespace,
192199
plugin_reg: PluginRegistry,
@@ -202,35 +209,44 @@ def parse_describe(
202209
logger (logging.Logger): logger instance
203210
"""
204211
if not parsed_args.name:
212+
out: list[str] = []
205213
if parsed_args.type == "config":
206-
print("Available built-in configs:") # noqa: T201
214+
out.append("Available built-in configs:")
207215
for name in config_reg.configs:
208-
print(f" {name}") # noqa: T201
216+
out.append(f" {name}")
209217
elif parsed_args.type == "plugin":
210-
print("Available plugins:") # noqa: T201
218+
out.append("Available plugins:")
211219
for name in plugin_reg.plugins:
212-
print(f" {name}") # noqa: T201
213-
print(f"\nUsage: describe {parsed_args.type} <name>") # noqa: T201
220+
out.append(f" {name}")
221+
out.append("")
222+
out.append(f"Usage: describe {parsed_args.type} <name>")
223+
log_cli_text_block(logger, out)
214224
sys.exit(0)
215225

216226
if parsed_args.type == "config":
217227
if parsed_args.name not in config_reg.configs:
218228
logger.error("No config found for name: %s", parsed_args.name)
219229
sys.exit(1)
220230
config_model = config_reg.configs[parsed_args.name]
221-
print(f"Config Name: {parsed_args.name}") # noqa: T201
222-
print(f"Description: {getattr(config_model, 'desc', '')}") # noqa: T201
223-
print("Plugins:") # noqa: T201
231+
out = [
232+
f"Config Name: {parsed_args.name}",
233+
f"Description: {getattr(config_model, 'desc', '')}",
234+
"Plugins:",
235+
]
224236
for plugin in getattr(config_model, "plugins", []):
225-
print(f"\t{plugin}") # noqa: T201
237+
out.append(f"\t{plugin}")
238+
log_cli_text_block(logger, out)
226239

227240
elif parsed_args.type == "plugin":
228241
if parsed_args.name not in plugin_reg.plugins:
229242
logger.error("No plugin found for name: %s", parsed_args.name)
230243
sys.exit(1)
231244
plugin_class = plugin_reg.plugins[parsed_args.name]
232-
print(f"Plugin Name: {parsed_args.name}") # noqa: T201
233-
print(f"Description: {getattr(plugin_class, '__doc__', '')}") # noqa: T201
245+
out = [
246+
f"Plugin Name: {parsed_args.name}",
247+
f"Description: {getattr(plugin_class, '__doc__', '')}",
248+
]
249+
log_cli_text_block(logger, out)
234250

235251
sys.exit(0)
236252

@@ -240,6 +256,7 @@ def parse_gen_plugin_config(
240256
plugin_reg: PluginRegistry,
241257
config_reg: ConfigRegistry,
242258
logger: logging.Logger,
259+
artifact_dir: Optional[str] = None,
243260
):
244261
"""parse 'gen_plugin_config' cmd line argument
245262
@@ -248,6 +265,7 @@ def parse_gen_plugin_config(
248265
plugin_reg (PluginRegistry): plugin registry instance
249266
config_reg (ConfigRegistry): config registry instance
250267
logger (logging.Logger): logger instance
268+
artifact_dir (Optional[str]): if set, write the config under this directory (CLI run log dir)
251269
"""
252270
try:
253271
config = build_config(
@@ -256,7 +274,8 @@ def parse_gen_plugin_config(
256274

257275
config.name = parsed_args.config_name.split(".")[0]
258276
config.desc = "Auto generated config"
259-
output_path = os.path.join(parsed_args.output_path, parsed_args.config_name)
277+
out_dir = artifact_dir if artifact_dir else parsed_args.output_path
278+
output_path = os.path.join(out_dir, parsed_args.config_name)
260279
with open(output_path, "w", encoding="utf-8") as out_file:
261280
out_file.write(config.model_dump_json(indent=2))
262281

@@ -576,13 +595,19 @@ def dump_to_csv(all_rows: list, filename: str, fieldnames: list[str], logger: lo
576595
logger.info("Data written to csv file: %s", filename)
577596

578597

579-
def generate_summary(search_path: str, output_path: Optional[str], logger: logging.Logger):
598+
def generate_summary(
599+
search_path: str,
600+
output_path: Optional[str],
601+
logger: logging.Logger,
602+
artifact_dir: Optional[str] = None,
603+
):
580604
"""Concatenate csv files into 1 summary csv file
581605
582606
Args:
583607
search_path (str): Path for previous runs
584-
output_path (Optional[str]): Path for new summary csv file
608+
output_path (Optional[str]): Directory for new summary.csv (ignored when artifact_dir is set)
585609
logger (logging.Logger): instance of logger
610+
artifact_dir (Optional[str]): if set, write summary.csv under this directory (CLI run log dir)
586611
"""
587612

588613
fieldnames = ["nodename", "plugin", "status", "timestamp", "message"]
@@ -606,8 +631,6 @@ def generate_summary(search_path: str, output_path: Optional[str], logger: loggi
606631
logger.error("No data rows found in matched CSV files.")
607632
return
608633

609-
if not output_path:
610-
output_path = os.getcwd()
611-
612-
output_path = os.path.join(output_path, "summary.csv")
613-
dump_to_csv(all_rows, output_path, fieldnames, logger)
634+
base_dir = artifact_dir if artifact_dir else (output_path or os.getcwd())
635+
out_file = os.path.join(base_dir, "summary.csv")
636+
dump_to_csv(all_rows, out_file, fieldnames, logger)

nodescraper/models/taskresult.py

Lines changed: 15 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,18 @@ def log_result(self, log_path: str) -> None:
209209
with open(event_log, "w", encoding="utf-8") as log_file:
210210
json.dump(all_events, log_file, indent=2)
211211

212+
@staticmethod
213+
def _event_occurrence_count(event: Event) -> int:
214+
"""Occurrences represented by one event (RegexAnalyzer groups repeats in data['count'])."""
215+
raw = event.data.get("count")
216+
if raw is None:
217+
return 1
218+
try:
219+
n = int(raw)
220+
except (TypeError, ValueError):
221+
return 1
222+
return max(1, n)
223+
212224
def _get_event_summary(self) -> str:
213225
"""Get summary string for events
214226
@@ -219,12 +231,13 @@ def _get_event_summary(self) -> str:
219231
warning_msg_counts: dict[str, int] = {}
220232

221233
for event in self.events:
234+
n = self._event_occurrence_count(event)
222235
if event.priority == EventPriority.WARNING:
223236
warning_msg_counts[event.description] = (
224-
warning_msg_counts.get(event.description, 0) + 1
237+
warning_msg_counts.get(event.description, 0) + n
225238
)
226239
elif event.priority >= EventPriority.ERROR:
227-
error_msg_counts[event.description] = error_msg_counts.get(event.description, 0) + 1
240+
error_msg_counts[event.description] = error_msg_counts.get(event.description, 0) + n
228241

229242
summary_parts = []
230243

0 commit comments

Comments
 (0)