oscura 0.0.1__py3-none-any.whl → 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oscura/__init__.py +813 -8
- oscura/__main__.py +392 -0
- oscura/analyzers/__init__.py +37 -0
- oscura/analyzers/digital/__init__.py +177 -0
- oscura/analyzers/digital/bus.py +691 -0
- oscura/analyzers/digital/clock.py +805 -0
- oscura/analyzers/digital/correlation.py +720 -0
- oscura/analyzers/digital/edges.py +632 -0
- oscura/analyzers/digital/extraction.py +413 -0
- oscura/analyzers/digital/quality.py +878 -0
- oscura/analyzers/digital/signal_quality.py +877 -0
- oscura/analyzers/digital/thresholds.py +708 -0
- oscura/analyzers/digital/timing.py +1104 -0
- oscura/analyzers/eye/__init__.py +46 -0
- oscura/analyzers/eye/diagram.py +434 -0
- oscura/analyzers/eye/metrics.py +555 -0
- oscura/analyzers/jitter/__init__.py +83 -0
- oscura/analyzers/jitter/ber.py +333 -0
- oscura/analyzers/jitter/decomposition.py +759 -0
- oscura/analyzers/jitter/measurements.py +413 -0
- oscura/analyzers/jitter/spectrum.py +220 -0
- oscura/analyzers/measurements.py +40 -0
- oscura/analyzers/packet/__init__.py +171 -0
- oscura/analyzers/packet/daq.py +1077 -0
- oscura/analyzers/packet/metrics.py +437 -0
- oscura/analyzers/packet/parser.py +327 -0
- oscura/analyzers/packet/payload.py +2156 -0
- oscura/analyzers/packet/payload_analysis.py +1312 -0
- oscura/analyzers/packet/payload_extraction.py +236 -0
- oscura/analyzers/packet/payload_patterns.py +670 -0
- oscura/analyzers/packet/stream.py +359 -0
- oscura/analyzers/patterns/__init__.py +266 -0
- oscura/analyzers/patterns/clustering.py +1036 -0
- oscura/analyzers/patterns/discovery.py +539 -0
- oscura/analyzers/patterns/learning.py +797 -0
- oscura/analyzers/patterns/matching.py +1091 -0
- oscura/analyzers/patterns/periodic.py +650 -0
- oscura/analyzers/patterns/sequences.py +767 -0
- oscura/analyzers/power/__init__.py +116 -0
- oscura/analyzers/power/ac_power.py +391 -0
- oscura/analyzers/power/basic.py +383 -0
- oscura/analyzers/power/conduction.py +314 -0
- oscura/analyzers/power/efficiency.py +297 -0
- oscura/analyzers/power/ripple.py +356 -0
- oscura/analyzers/power/soa.py +372 -0
- oscura/analyzers/power/switching.py +479 -0
- oscura/analyzers/protocol/__init__.py +150 -0
- oscura/analyzers/protocols/__init__.py +150 -0
- oscura/analyzers/protocols/base.py +500 -0
- oscura/analyzers/protocols/can.py +620 -0
- oscura/analyzers/protocols/can_fd.py +448 -0
- oscura/analyzers/protocols/flexray.py +405 -0
- oscura/analyzers/protocols/hdlc.py +399 -0
- oscura/analyzers/protocols/i2c.py +368 -0
- oscura/analyzers/protocols/i2s.py +296 -0
- oscura/analyzers/protocols/jtag.py +393 -0
- oscura/analyzers/protocols/lin.py +445 -0
- oscura/analyzers/protocols/manchester.py +333 -0
- oscura/analyzers/protocols/onewire.py +501 -0
- oscura/analyzers/protocols/spi.py +334 -0
- oscura/analyzers/protocols/swd.py +325 -0
- oscura/analyzers/protocols/uart.py +393 -0
- oscura/analyzers/protocols/usb.py +495 -0
- oscura/analyzers/signal_integrity/__init__.py +63 -0
- oscura/analyzers/signal_integrity/embedding.py +294 -0
- oscura/analyzers/signal_integrity/equalization.py +370 -0
- oscura/analyzers/signal_integrity/sparams.py +484 -0
- oscura/analyzers/spectral/__init__.py +53 -0
- oscura/analyzers/spectral/chunked.py +273 -0
- oscura/analyzers/spectral/chunked_fft.py +571 -0
- oscura/analyzers/spectral/chunked_wavelet.py +391 -0
- oscura/analyzers/spectral/fft.py +92 -0
- oscura/analyzers/statistical/__init__.py +250 -0
- oscura/analyzers/statistical/checksum.py +923 -0
- oscura/analyzers/statistical/chunked_corr.py +228 -0
- oscura/analyzers/statistical/classification.py +778 -0
- oscura/analyzers/statistical/entropy.py +1113 -0
- oscura/analyzers/statistical/ngrams.py +614 -0
- oscura/analyzers/statistics/__init__.py +119 -0
- oscura/analyzers/statistics/advanced.py +885 -0
- oscura/analyzers/statistics/basic.py +263 -0
- oscura/analyzers/statistics/correlation.py +630 -0
- oscura/analyzers/statistics/distribution.py +298 -0
- oscura/analyzers/statistics/outliers.py +463 -0
- oscura/analyzers/statistics/streaming.py +93 -0
- oscura/analyzers/statistics/trend.py +520 -0
- oscura/analyzers/validation.py +598 -0
- oscura/analyzers/waveform/__init__.py +36 -0
- oscura/analyzers/waveform/measurements.py +943 -0
- oscura/analyzers/waveform/measurements_with_uncertainty.py +371 -0
- oscura/analyzers/waveform/spectral.py +1689 -0
- oscura/analyzers/waveform/wavelets.py +298 -0
- oscura/api/__init__.py +62 -0
- oscura/api/dsl.py +538 -0
- oscura/api/fluent.py +571 -0
- oscura/api/operators.py +498 -0
- oscura/api/optimization.py +392 -0
- oscura/api/profiling.py +396 -0
- oscura/automotive/__init__.py +73 -0
- oscura/automotive/can/__init__.py +52 -0
- oscura/automotive/can/analysis.py +356 -0
- oscura/automotive/can/checksum.py +250 -0
- oscura/automotive/can/correlation.py +212 -0
- oscura/automotive/can/discovery.py +355 -0
- oscura/automotive/can/message_wrapper.py +375 -0
- oscura/automotive/can/models.py +385 -0
- oscura/automotive/can/patterns.py +381 -0
- oscura/automotive/can/session.py +452 -0
- oscura/automotive/can/state_machine.py +300 -0
- oscura/automotive/can/stimulus_response.py +461 -0
- oscura/automotive/dbc/__init__.py +15 -0
- oscura/automotive/dbc/generator.py +156 -0
- oscura/automotive/dbc/parser.py +146 -0
- oscura/automotive/dtc/__init__.py +30 -0
- oscura/automotive/dtc/database.py +3036 -0
- oscura/automotive/j1939/__init__.py +14 -0
- oscura/automotive/j1939/decoder.py +745 -0
- oscura/automotive/loaders/__init__.py +35 -0
- oscura/automotive/loaders/asc.py +98 -0
- oscura/automotive/loaders/blf.py +77 -0
- oscura/automotive/loaders/csv_can.py +136 -0
- oscura/automotive/loaders/dispatcher.py +136 -0
- oscura/automotive/loaders/mdf.py +331 -0
- oscura/automotive/loaders/pcap.py +132 -0
- oscura/automotive/obd/__init__.py +14 -0
- oscura/automotive/obd/decoder.py +707 -0
- oscura/automotive/uds/__init__.py +48 -0
- oscura/automotive/uds/decoder.py +265 -0
- oscura/automotive/uds/models.py +64 -0
- oscura/automotive/visualization.py +369 -0
- oscura/batch/__init__.py +55 -0
- oscura/batch/advanced.py +627 -0
- oscura/batch/aggregate.py +300 -0
- oscura/batch/analyze.py +139 -0
- oscura/batch/logging.py +487 -0
- oscura/batch/metrics.py +556 -0
- oscura/builders/__init__.py +41 -0
- oscura/builders/signal_builder.py +1131 -0
- oscura/cli/__init__.py +14 -0
- oscura/cli/batch.py +339 -0
- oscura/cli/characterize.py +273 -0
- oscura/cli/compare.py +775 -0
- oscura/cli/decode.py +551 -0
- oscura/cli/main.py +247 -0
- oscura/cli/shell.py +350 -0
- oscura/comparison/__init__.py +66 -0
- oscura/comparison/compare.py +397 -0
- oscura/comparison/golden.py +487 -0
- oscura/comparison/limits.py +391 -0
- oscura/comparison/mask.py +434 -0
- oscura/comparison/trace_diff.py +30 -0
- oscura/comparison/visualization.py +481 -0
- oscura/compliance/__init__.py +70 -0
- oscura/compliance/advanced.py +756 -0
- oscura/compliance/masks.py +363 -0
- oscura/compliance/reporting.py +483 -0
- oscura/compliance/testing.py +298 -0
- oscura/component/__init__.py +38 -0
- oscura/component/impedance.py +365 -0
- oscura/component/reactive.py +598 -0
- oscura/component/transmission_line.py +312 -0
- oscura/config/__init__.py +191 -0
- oscura/config/defaults.py +254 -0
- oscura/config/loader.py +348 -0
- oscura/config/memory.py +271 -0
- oscura/config/migration.py +458 -0
- oscura/config/pipeline.py +1077 -0
- oscura/config/preferences.py +530 -0
- oscura/config/protocol.py +875 -0
- oscura/config/schema.py +713 -0
- oscura/config/settings.py +420 -0
- oscura/config/thresholds.py +599 -0
- oscura/convenience.py +457 -0
- oscura/core/__init__.py +299 -0
- oscura/core/audit.py +457 -0
- oscura/core/backend_selector.py +405 -0
- oscura/core/cache.py +590 -0
- oscura/core/cancellation.py +439 -0
- oscura/core/confidence.py +225 -0
- oscura/core/config.py +506 -0
- oscura/core/correlation.py +216 -0
- oscura/core/cross_domain.py +422 -0
- oscura/core/debug.py +301 -0
- oscura/core/edge_cases.py +541 -0
- oscura/core/exceptions.py +535 -0
- oscura/core/gpu_backend.py +523 -0
- oscura/core/lazy.py +832 -0
- oscura/core/log_query.py +540 -0
- oscura/core/logging.py +931 -0
- oscura/core/logging_advanced.py +952 -0
- oscura/core/memoize.py +171 -0
- oscura/core/memory_check.py +274 -0
- oscura/core/memory_guard.py +290 -0
- oscura/core/memory_limits.py +336 -0
- oscura/core/memory_monitor.py +453 -0
- oscura/core/memory_progress.py +465 -0
- oscura/core/memory_warnings.py +315 -0
- oscura/core/numba_backend.py +362 -0
- oscura/core/performance.py +352 -0
- oscura/core/progress.py +524 -0
- oscura/core/provenance.py +358 -0
- oscura/core/results.py +331 -0
- oscura/core/types.py +504 -0
- oscura/core/uncertainty.py +383 -0
- oscura/discovery/__init__.py +52 -0
- oscura/discovery/anomaly_detector.py +672 -0
- oscura/discovery/auto_decoder.py +415 -0
- oscura/discovery/comparison.py +497 -0
- oscura/discovery/quality_validator.py +528 -0
- oscura/discovery/signal_detector.py +769 -0
- oscura/dsl/__init__.py +73 -0
- oscura/dsl/commands.py +246 -0
- oscura/dsl/interpreter.py +455 -0
- oscura/dsl/parser.py +689 -0
- oscura/dsl/repl.py +172 -0
- oscura/exceptions.py +59 -0
- oscura/exploratory/__init__.py +111 -0
- oscura/exploratory/error_recovery.py +642 -0
- oscura/exploratory/fuzzy.py +513 -0
- oscura/exploratory/fuzzy_advanced.py +786 -0
- oscura/exploratory/legacy.py +831 -0
- oscura/exploratory/parse.py +358 -0
- oscura/exploratory/recovery.py +275 -0
- oscura/exploratory/sync.py +382 -0
- oscura/exploratory/unknown.py +707 -0
- oscura/export/__init__.py +25 -0
- oscura/export/wireshark/README.md +265 -0
- oscura/export/wireshark/__init__.py +47 -0
- oscura/export/wireshark/generator.py +312 -0
- oscura/export/wireshark/lua_builder.py +159 -0
- oscura/export/wireshark/templates/dissector.lua.j2 +92 -0
- oscura/export/wireshark/type_mapping.py +165 -0
- oscura/export/wireshark/validator.py +105 -0
- oscura/exporters/__init__.py +94 -0
- oscura/exporters/csv.py +303 -0
- oscura/exporters/exporters.py +44 -0
- oscura/exporters/hdf5.py +219 -0
- oscura/exporters/html_export.py +701 -0
- oscura/exporters/json_export.py +291 -0
- oscura/exporters/markdown_export.py +367 -0
- oscura/exporters/matlab_export.py +354 -0
- oscura/exporters/npz_export.py +219 -0
- oscura/exporters/spice_export.py +210 -0
- oscura/extensibility/__init__.py +131 -0
- oscura/extensibility/docs.py +752 -0
- oscura/extensibility/extensions.py +1125 -0
- oscura/extensibility/logging.py +259 -0
- oscura/extensibility/measurements.py +485 -0
- oscura/extensibility/plugins.py +414 -0
- oscura/extensibility/registry.py +346 -0
- oscura/extensibility/templates.py +913 -0
- oscura/extensibility/validation.py +651 -0
- oscura/filtering/__init__.py +89 -0
- oscura/filtering/base.py +563 -0
- oscura/filtering/convenience.py +564 -0
- oscura/filtering/design.py +725 -0
- oscura/filtering/filters.py +32 -0
- oscura/filtering/introspection.py +605 -0
- oscura/guidance/__init__.py +24 -0
- oscura/guidance/recommender.py +429 -0
- oscura/guidance/wizard.py +518 -0
- oscura/inference/__init__.py +251 -0
- oscura/inference/active_learning/README.md +153 -0
- oscura/inference/active_learning/__init__.py +38 -0
- oscura/inference/active_learning/lstar.py +257 -0
- oscura/inference/active_learning/observation_table.py +230 -0
- oscura/inference/active_learning/oracle.py +78 -0
- oscura/inference/active_learning/teachers/__init__.py +15 -0
- oscura/inference/active_learning/teachers/simulator.py +192 -0
- oscura/inference/adaptive_tuning.py +453 -0
- oscura/inference/alignment.py +653 -0
- oscura/inference/bayesian.py +943 -0
- oscura/inference/binary.py +1016 -0
- oscura/inference/crc_reverse.py +711 -0
- oscura/inference/logic.py +288 -0
- oscura/inference/message_format.py +1305 -0
- oscura/inference/protocol.py +417 -0
- oscura/inference/protocol_dsl.py +1084 -0
- oscura/inference/protocol_library.py +1230 -0
- oscura/inference/sequences.py +809 -0
- oscura/inference/signal_intelligence.py +1509 -0
- oscura/inference/spectral.py +215 -0
- oscura/inference/state_machine.py +634 -0
- oscura/inference/stream.py +918 -0
- oscura/integrations/__init__.py +59 -0
- oscura/integrations/llm.py +1827 -0
- oscura/jupyter/__init__.py +32 -0
- oscura/jupyter/display.py +268 -0
- oscura/jupyter/magic.py +334 -0
- oscura/loaders/__init__.py +526 -0
- oscura/loaders/binary.py +69 -0
- oscura/loaders/configurable.py +1255 -0
- oscura/loaders/csv.py +26 -0
- oscura/loaders/csv_loader.py +473 -0
- oscura/loaders/hdf5.py +9 -0
- oscura/loaders/hdf5_loader.py +510 -0
- oscura/loaders/lazy.py +370 -0
- oscura/loaders/mmap_loader.py +583 -0
- oscura/loaders/numpy_loader.py +436 -0
- oscura/loaders/pcap.py +432 -0
- oscura/loaders/preprocessing.py +368 -0
- oscura/loaders/rigol.py +287 -0
- oscura/loaders/sigrok.py +321 -0
- oscura/loaders/tdms.py +367 -0
- oscura/loaders/tektronix.py +711 -0
- oscura/loaders/validation.py +584 -0
- oscura/loaders/vcd.py +464 -0
- oscura/loaders/wav.py +233 -0
- oscura/math/__init__.py +45 -0
- oscura/math/arithmetic.py +824 -0
- oscura/math/interpolation.py +413 -0
- oscura/onboarding/__init__.py +39 -0
- oscura/onboarding/help.py +498 -0
- oscura/onboarding/tutorials.py +405 -0
- oscura/onboarding/wizard.py +466 -0
- oscura/optimization/__init__.py +19 -0
- oscura/optimization/parallel.py +440 -0
- oscura/optimization/search.py +532 -0
- oscura/pipeline/__init__.py +43 -0
- oscura/pipeline/base.py +338 -0
- oscura/pipeline/composition.py +242 -0
- oscura/pipeline/parallel.py +448 -0
- oscura/pipeline/pipeline.py +375 -0
- oscura/pipeline/reverse_engineering.py +1119 -0
- oscura/plugins/__init__.py +122 -0
- oscura/plugins/base.py +272 -0
- oscura/plugins/cli.py +497 -0
- oscura/plugins/discovery.py +411 -0
- oscura/plugins/isolation.py +418 -0
- oscura/plugins/lifecycle.py +959 -0
- oscura/plugins/manager.py +493 -0
- oscura/plugins/registry.py +421 -0
- oscura/plugins/versioning.py +372 -0
- oscura/py.typed +0 -0
- oscura/quality/__init__.py +65 -0
- oscura/quality/ensemble.py +740 -0
- oscura/quality/explainer.py +338 -0
- oscura/quality/scoring.py +616 -0
- oscura/quality/warnings.py +456 -0
- oscura/reporting/__init__.py +248 -0
- oscura/reporting/advanced.py +1234 -0
- oscura/reporting/analyze.py +448 -0
- oscura/reporting/argument_preparer.py +596 -0
- oscura/reporting/auto_report.py +507 -0
- oscura/reporting/batch.py +615 -0
- oscura/reporting/chart_selection.py +223 -0
- oscura/reporting/comparison.py +330 -0
- oscura/reporting/config.py +615 -0
- oscura/reporting/content/__init__.py +39 -0
- oscura/reporting/content/executive.py +127 -0
- oscura/reporting/content/filtering.py +191 -0
- oscura/reporting/content/minimal.py +257 -0
- oscura/reporting/content/verbosity.py +162 -0
- oscura/reporting/core.py +508 -0
- oscura/reporting/core_formats/__init__.py +17 -0
- oscura/reporting/core_formats/multi_format.py +210 -0
- oscura/reporting/engine.py +836 -0
- oscura/reporting/export.py +366 -0
- oscura/reporting/formatting/__init__.py +129 -0
- oscura/reporting/formatting/emphasis.py +81 -0
- oscura/reporting/formatting/numbers.py +403 -0
- oscura/reporting/formatting/standards.py +55 -0
- oscura/reporting/formatting.py +466 -0
- oscura/reporting/html.py +578 -0
- oscura/reporting/index.py +590 -0
- oscura/reporting/multichannel.py +296 -0
- oscura/reporting/output.py +379 -0
- oscura/reporting/pdf.py +373 -0
- oscura/reporting/plots.py +731 -0
- oscura/reporting/pptx_export.py +360 -0
- oscura/reporting/renderers/__init__.py +11 -0
- oscura/reporting/renderers/pdf.py +94 -0
- oscura/reporting/sections.py +471 -0
- oscura/reporting/standards.py +680 -0
- oscura/reporting/summary_generator.py +368 -0
- oscura/reporting/tables.py +397 -0
- oscura/reporting/template_system.py +724 -0
- oscura/reporting/templates/__init__.py +15 -0
- oscura/reporting/templates/definition.py +205 -0
- oscura/reporting/templates/index.html +649 -0
- oscura/reporting/templates/index.md +173 -0
- oscura/schemas/__init__.py +158 -0
- oscura/schemas/bus_configuration.json +322 -0
- oscura/schemas/device_mapping.json +182 -0
- oscura/schemas/packet_format.json +418 -0
- oscura/schemas/protocol_definition.json +363 -0
- oscura/search/__init__.py +16 -0
- oscura/search/anomaly.py +292 -0
- oscura/search/context.py +149 -0
- oscura/search/pattern.py +160 -0
- oscura/session/__init__.py +34 -0
- oscura/session/annotations.py +289 -0
- oscura/session/history.py +313 -0
- oscura/session/session.py +445 -0
- oscura/streaming/__init__.py +43 -0
- oscura/streaming/chunked.py +611 -0
- oscura/streaming/progressive.py +393 -0
- oscura/streaming/realtime.py +622 -0
- oscura/testing/__init__.py +54 -0
- oscura/testing/synthetic.py +808 -0
- oscura/triggering/__init__.py +68 -0
- oscura/triggering/base.py +229 -0
- oscura/triggering/edge.py +353 -0
- oscura/triggering/pattern.py +344 -0
- oscura/triggering/pulse.py +581 -0
- oscura/triggering/window.py +453 -0
- oscura/ui/__init__.py +48 -0
- oscura/ui/formatters.py +526 -0
- oscura/ui/progressive_display.py +340 -0
- oscura/utils/__init__.py +99 -0
- oscura/utils/autodetect.py +338 -0
- oscura/utils/buffer.py +389 -0
- oscura/utils/lazy.py +407 -0
- oscura/utils/lazy_imports.py +147 -0
- oscura/utils/memory.py +836 -0
- oscura/utils/memory_advanced.py +1326 -0
- oscura/utils/memory_extensions.py +465 -0
- oscura/utils/progressive.py +352 -0
- oscura/utils/windowing.py +362 -0
- oscura/visualization/__init__.py +321 -0
- oscura/visualization/accessibility.py +526 -0
- oscura/visualization/annotations.py +374 -0
- oscura/visualization/axis_scaling.py +305 -0
- oscura/visualization/colors.py +453 -0
- oscura/visualization/digital.py +337 -0
- oscura/visualization/eye.py +420 -0
- oscura/visualization/histogram.py +281 -0
- oscura/visualization/interactive.py +858 -0
- oscura/visualization/jitter.py +702 -0
- oscura/visualization/keyboard.py +394 -0
- oscura/visualization/layout.py +365 -0
- oscura/visualization/optimization.py +1028 -0
- oscura/visualization/palettes.py +446 -0
- oscura/visualization/plot.py +92 -0
- oscura/visualization/power.py +290 -0
- oscura/visualization/power_extended.py +626 -0
- oscura/visualization/presets.py +467 -0
- oscura/visualization/protocols.py +932 -0
- oscura/visualization/render.py +207 -0
- oscura/visualization/rendering.py +444 -0
- oscura/visualization/reverse_engineering.py +791 -0
- oscura/visualization/signal_integrity.py +808 -0
- oscura/visualization/specialized.py +553 -0
- oscura/visualization/spectral.py +811 -0
- oscura/visualization/styles.py +381 -0
- oscura/visualization/thumbnails.py +311 -0
- oscura/visualization/time_axis.py +351 -0
- oscura/visualization/waveform.py +367 -0
- oscura/workflow/__init__.py +13 -0
- oscura/workflow/dag.py +377 -0
- oscura/workflows/__init__.py +58 -0
- oscura/workflows/compliance.py +280 -0
- oscura/workflows/digital.py +272 -0
- oscura/workflows/multi_trace.py +502 -0
- oscura/workflows/power.py +178 -0
- oscura/workflows/protocol.py +492 -0
- oscura/workflows/reverse_engineering.py +639 -0
- oscura/workflows/signal_integrity.py +227 -0
- oscura-0.1.0.dist-info/METADATA +300 -0
- oscura-0.1.0.dist-info/RECORD +463 -0
- oscura-0.1.0.dist-info/entry_points.txt +2 -0
- {oscura-0.0.1.dist-info → oscura-0.1.0.dist-info}/licenses/LICENSE +1 -1
- oscura-0.0.1.dist-info/METADATA +0 -63
- oscura-0.0.1.dist-info/RECORD +0 -5
- {oscura-0.0.1.dist-info → oscura-0.1.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,952 @@
|
|
|
1
|
+
"""Advanced logging features for TraceKit.
|
|
2
|
+
|
|
3
|
+
This module provides advanced logging capabilities including log aggregation,
|
|
4
|
+
analysis, alerting, sampling, and external system integration.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import gzip
|
|
10
|
+
import hashlib
|
|
11
|
+
import json
|
|
12
|
+
import logging
|
|
13
|
+
import os
|
|
14
|
+
import queue
|
|
15
|
+
import re
|
|
16
|
+
import threading
|
|
17
|
+
import time
|
|
18
|
+
from collections import Counter, deque
|
|
19
|
+
from dataclasses import dataclass, field
|
|
20
|
+
from datetime import datetime, timedelta
|
|
21
|
+
from enum import Enum, auto
|
|
22
|
+
from typing import TYPE_CHECKING, Any
|
|
23
|
+
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from collections.abc import Callable
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# =============================================================================
|
|
31
|
+
# =============================================================================
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@dataclass
|
|
35
|
+
class AggregatedLogEntry:
|
|
36
|
+
"""Aggregated log entry.
|
|
37
|
+
|
|
38
|
+
Attributes:
|
|
39
|
+
key: Aggregation key
|
|
40
|
+
count: Number of occurrences
|
|
41
|
+
first_seen: First occurrence timestamp
|
|
42
|
+
last_seen: Last occurrence timestamp
|
|
43
|
+
sample_message: Sample message
|
|
44
|
+
levels: Counter of log levels
|
|
45
|
+
sources: Set of source loggers
|
|
46
|
+
|
|
47
|
+
References:
|
|
48
|
+
LOG-009: Log Aggregation
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
key: str
|
|
52
|
+
count: int = 0
|
|
53
|
+
first_seen: datetime = field(default_factory=datetime.now)
|
|
54
|
+
last_seen: datetime = field(default_factory=datetime.now)
|
|
55
|
+
sample_message: str = ""
|
|
56
|
+
levels: Counter = field(default_factory=Counter) # type: ignore[type-arg]
|
|
57
|
+
sources: set[str] = field(default_factory=set)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class LogAggregator:
|
|
61
|
+
"""Aggregates log messages by pattern.
|
|
62
|
+
|
|
63
|
+
Groups similar log messages together to reduce noise and
|
|
64
|
+
identify patterns.
|
|
65
|
+
|
|
66
|
+
Example:
|
|
67
|
+
>>> aggregator = LogAggregator()
|
|
68
|
+
>>> aggregator.add(record)
|
|
69
|
+
>>> summary = aggregator.get_summary()
|
|
70
|
+
|
|
71
|
+
References:
|
|
72
|
+
LOG-009: Log Aggregation
|
|
73
|
+
"""
|
|
74
|
+
|
|
75
|
+
def __init__(self, window_seconds: int = 60, min_count: int = 2):
|
|
76
|
+
"""Initialize aggregator.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
window_seconds: Aggregation window size
|
|
80
|
+
min_count: Minimum occurrences to report
|
|
81
|
+
"""
|
|
82
|
+
self.window_seconds = window_seconds
|
|
83
|
+
self.min_count = min_count
|
|
84
|
+
self._entries: dict[str, AggregatedLogEntry] = {}
|
|
85
|
+
self._lock = threading.Lock()
|
|
86
|
+
|
|
87
|
+
def _normalize_message(self, message: str) -> str:
|
|
88
|
+
"""Normalize message for grouping.
|
|
89
|
+
|
|
90
|
+
Replaces variable parts with placeholders.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
message: Log message to normalize.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
Normalized message with placeholders.
|
|
97
|
+
"""
|
|
98
|
+
# Replace numbers
|
|
99
|
+
normalized = re.sub(r"\d+", "<NUM>", message)
|
|
100
|
+
# Replace UUIDs
|
|
101
|
+
normalized = re.sub(
|
|
102
|
+
r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}",
|
|
103
|
+
"<UUID>",
|
|
104
|
+
normalized,
|
|
105
|
+
flags=re.IGNORECASE,
|
|
106
|
+
)
|
|
107
|
+
# Replace file paths
|
|
108
|
+
normalized = re.sub(r"[/\\][\w./\\-]+", "<PATH>", normalized)
|
|
109
|
+
return normalized
|
|
110
|
+
|
|
111
|
+
def add(self, record: logging.LogRecord) -> None:
|
|
112
|
+
"""Add log record to aggregator.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
record: Log record
|
|
116
|
+
"""
|
|
117
|
+
key = self._normalize_message(record.getMessage())
|
|
118
|
+
now = datetime.now()
|
|
119
|
+
|
|
120
|
+
with self._lock:
|
|
121
|
+
if key not in self._entries:
|
|
122
|
+
self._entries[key] = AggregatedLogEntry(
|
|
123
|
+
key=key, first_seen=now, sample_message=record.getMessage()
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
entry = self._entries[key]
|
|
127
|
+
entry.count += 1
|
|
128
|
+
entry.last_seen = now
|
|
129
|
+
entry.levels[record.levelname] += 1
|
|
130
|
+
entry.sources.add(record.name)
|
|
131
|
+
|
|
132
|
+
def get_summary(self) -> list[AggregatedLogEntry]:
|
|
133
|
+
"""Get aggregation summary.
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
List of aggregated entries meeting threshold
|
|
137
|
+
"""
|
|
138
|
+
with self._lock:
|
|
139
|
+
return [entry for entry in self._entries.values() if entry.count >= self.min_count]
|
|
140
|
+
|
|
141
|
+
def cleanup_old(self) -> None:
|
|
142
|
+
"""Remove entries outside window."""
|
|
143
|
+
cutoff = datetime.now() - timedelta(seconds=self.window_seconds)
|
|
144
|
+
with self._lock:
|
|
145
|
+
self._entries = {k: v for k, v in self._entries.items() if v.last_seen >= cutoff}
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
# =============================================================================
|
|
149
|
+
# =============================================================================
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
@dataclass
|
|
153
|
+
class LogPattern:
|
|
154
|
+
"""Detected log pattern.
|
|
155
|
+
|
|
156
|
+
References:
|
|
157
|
+
LOG-010: Log Analysis and Patterns
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
pattern: str
|
|
161
|
+
count: int
|
|
162
|
+
severity_distribution: dict[str, int]
|
|
163
|
+
time_distribution: dict[int, int] # Hour -> count
|
|
164
|
+
example: str
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
class LogAnalyzer:
|
|
168
|
+
"""Analyzes log patterns and trends.
|
|
169
|
+
|
|
170
|
+
References:
|
|
171
|
+
LOG-010: Log Analysis and Patterns
|
|
172
|
+
"""
|
|
173
|
+
|
|
174
|
+
def __init__(self, max_history: int = 10000):
|
|
175
|
+
self._history: deque = deque(maxlen=max_history) # type: ignore[type-arg]
|
|
176
|
+
self._patterns: dict[str, LogPattern] = {}
|
|
177
|
+
|
|
178
|
+
def add(self, record: logging.LogRecord) -> None:
|
|
179
|
+
"""Add record to analysis history."""
|
|
180
|
+
self._history.append(
|
|
181
|
+
{
|
|
182
|
+
"message": record.getMessage(),
|
|
183
|
+
"level": record.levelname,
|
|
184
|
+
"time": datetime.now(),
|
|
185
|
+
"logger": record.name,
|
|
186
|
+
}
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
def analyze_patterns(self) -> list[LogPattern]:
|
|
190
|
+
"""Analyze log patterns.
|
|
191
|
+
|
|
192
|
+
Returns:
|
|
193
|
+
List of detected patterns
|
|
194
|
+
"""
|
|
195
|
+
pattern_counts: Counter = Counter() # type: ignore[type-arg]
|
|
196
|
+
pattern_levels: dict[str, Counter] = {} # type: ignore[type-arg]
|
|
197
|
+
pattern_hours: dict[str, Counter] = {} # type: ignore[type-arg]
|
|
198
|
+
pattern_examples: dict[str, str] = {}
|
|
199
|
+
|
|
200
|
+
for entry in self._history:
|
|
201
|
+
# Normalize message
|
|
202
|
+
normalized = re.sub(r"\d+", "<N>", entry["message"])
|
|
203
|
+
pattern_counts[normalized] += 1
|
|
204
|
+
|
|
205
|
+
if normalized not in pattern_levels:
|
|
206
|
+
pattern_levels[normalized] = Counter()
|
|
207
|
+
pattern_hours[normalized] = Counter()
|
|
208
|
+
pattern_examples[normalized] = entry["message"]
|
|
209
|
+
|
|
210
|
+
pattern_levels[normalized][entry["level"]] += 1
|
|
211
|
+
pattern_hours[normalized][entry["time"].hour] += 1
|
|
212
|
+
|
|
213
|
+
return [
|
|
214
|
+
LogPattern(
|
|
215
|
+
pattern=pattern,
|
|
216
|
+
count=count,
|
|
217
|
+
severity_distribution=dict(pattern_levels.get(pattern, {})),
|
|
218
|
+
time_distribution=dict(pattern_hours.get(pattern, {})),
|
|
219
|
+
example=pattern_examples.get(pattern, ""),
|
|
220
|
+
)
|
|
221
|
+
for pattern, count in pattern_counts.most_common(20)
|
|
222
|
+
]
|
|
223
|
+
|
|
224
|
+
def get_error_rate(self, window_minutes: int = 60) -> float:
|
|
225
|
+
"""Get error rate over window.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
window_minutes: Window size in minutes
|
|
229
|
+
|
|
230
|
+
Returns:
|
|
231
|
+
Error rate (0.0 to 1.0)
|
|
232
|
+
"""
|
|
233
|
+
cutoff = datetime.now() - timedelta(minutes=window_minutes)
|
|
234
|
+
recent = [e for e in self._history if e["time"] >= cutoff]
|
|
235
|
+
|
|
236
|
+
if not recent:
|
|
237
|
+
return 0.0
|
|
238
|
+
|
|
239
|
+
errors = sum(1 for e in recent if e["level"] in ("ERROR", "CRITICAL"))
|
|
240
|
+
return errors / len(recent)
|
|
241
|
+
|
|
242
|
+
def get_trend(self) -> str:
|
|
243
|
+
"""Get error trend (increasing, stable, decreasing)."""
|
|
244
|
+
if len(self._history) < 100:
|
|
245
|
+
return "insufficient_data"
|
|
246
|
+
|
|
247
|
+
# Compare first half to second half
|
|
248
|
+
mid = len(self._history) // 2
|
|
249
|
+
first_half = list(self._history)[:mid]
|
|
250
|
+
second_half = list(self._history)[mid:]
|
|
251
|
+
|
|
252
|
+
first_errors = sum(1 for e in first_half if e["level"] in ("ERROR", "CRITICAL"))
|
|
253
|
+
second_errors = sum(1 for e in second_half if e["level"] in ("ERROR", "CRITICAL"))
|
|
254
|
+
|
|
255
|
+
first_rate = first_errors / len(first_half)
|
|
256
|
+
second_rate = second_errors / len(second_half)
|
|
257
|
+
|
|
258
|
+
if second_rate > first_rate * 1.2:
|
|
259
|
+
return "increasing"
|
|
260
|
+
elif second_rate < first_rate * 0.8:
|
|
261
|
+
return "decreasing"
|
|
262
|
+
return "stable"
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
# =============================================================================
|
|
266
|
+
# =============================================================================
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
class AlertSeverity(Enum):
|
|
270
|
+
"""Alert severity levels."""
|
|
271
|
+
|
|
272
|
+
INFO = auto()
|
|
273
|
+
WARNING = auto()
|
|
274
|
+
ERROR = auto()
|
|
275
|
+
CRITICAL = auto()
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
@dataclass
|
|
279
|
+
class LogAlert:
|
|
280
|
+
"""Log alert definition.
|
|
281
|
+
|
|
282
|
+
References:
|
|
283
|
+
LOG-012: Log Alerting
|
|
284
|
+
"""
|
|
285
|
+
|
|
286
|
+
id: str
|
|
287
|
+
name: str
|
|
288
|
+
condition: Callable[[logging.LogRecord], bool]
|
|
289
|
+
severity: AlertSeverity = AlertSeverity.WARNING
|
|
290
|
+
cooldown_seconds: int = 300
|
|
291
|
+
last_triggered: datetime | None = None
|
|
292
|
+
enabled: bool = True
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
@dataclass
|
|
296
|
+
class TriggeredAlert:
|
|
297
|
+
"""Triggered alert instance."""
|
|
298
|
+
|
|
299
|
+
alert: LogAlert
|
|
300
|
+
record: logging.LogRecord
|
|
301
|
+
timestamp: datetime
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
class LogAlerter:
|
|
305
|
+
"""Log alerting system.
|
|
306
|
+
|
|
307
|
+
Monitors logs and triggers alerts based on conditions.
|
|
308
|
+
|
|
309
|
+
Example:
|
|
310
|
+
>>> alerter = LogAlerter()
|
|
311
|
+
>>> alerter.add_alert("error_burst", lambda r: r.levelno >= logging.ERROR)
|
|
312
|
+
>>> alerter.on_alert(lambda a: send_notification(a))
|
|
313
|
+
|
|
314
|
+
References:
|
|
315
|
+
LOG-012: Log Alerting
|
|
316
|
+
"""
|
|
317
|
+
|
|
318
|
+
def __init__(self) -> None:
|
|
319
|
+
self._alerts: dict[str, LogAlert] = {}
|
|
320
|
+
self._handlers: list[Callable[[TriggeredAlert], None]] = []
|
|
321
|
+
self._lock = threading.Lock()
|
|
322
|
+
|
|
323
|
+
def add_alert(
|
|
324
|
+
self,
|
|
325
|
+
name: str,
|
|
326
|
+
condition: Callable[[logging.LogRecord], bool],
|
|
327
|
+
severity: AlertSeverity = AlertSeverity.WARNING,
|
|
328
|
+
cooldown_seconds: int = 300,
|
|
329
|
+
) -> str:
|
|
330
|
+
"""Add alert definition.
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
name: Alert name
|
|
334
|
+
condition: Condition function
|
|
335
|
+
severity: Alert severity
|
|
336
|
+
cooldown_seconds: Minimum time between triggers
|
|
337
|
+
|
|
338
|
+
Returns:
|
|
339
|
+
Alert ID
|
|
340
|
+
"""
|
|
341
|
+
import uuid
|
|
342
|
+
|
|
343
|
+
alert_id = str(uuid.uuid4())
|
|
344
|
+
|
|
345
|
+
alert = LogAlert(
|
|
346
|
+
id=alert_id,
|
|
347
|
+
name=name,
|
|
348
|
+
condition=condition,
|
|
349
|
+
severity=severity,
|
|
350
|
+
cooldown_seconds=cooldown_seconds,
|
|
351
|
+
)
|
|
352
|
+
self._alerts[alert_id] = alert
|
|
353
|
+
return alert_id
|
|
354
|
+
|
|
355
|
+
def check(self, record: logging.LogRecord) -> list[TriggeredAlert]:
|
|
356
|
+
"""Check record against all alerts.
|
|
357
|
+
|
|
358
|
+
Args:
|
|
359
|
+
record: Log record to check
|
|
360
|
+
|
|
361
|
+
Returns:
|
|
362
|
+
List of triggered alerts
|
|
363
|
+
"""
|
|
364
|
+
triggered = []
|
|
365
|
+
now = datetime.now()
|
|
366
|
+
|
|
367
|
+
with self._lock:
|
|
368
|
+
for alert in self._alerts.values():
|
|
369
|
+
if not alert.enabled:
|
|
370
|
+
continue
|
|
371
|
+
|
|
372
|
+
# Check cooldown
|
|
373
|
+
if alert.last_triggered:
|
|
374
|
+
elapsed = (now - alert.last_triggered).total_seconds()
|
|
375
|
+
if elapsed < alert.cooldown_seconds:
|
|
376
|
+
continue
|
|
377
|
+
|
|
378
|
+
# Check condition
|
|
379
|
+
try:
|
|
380
|
+
if alert.condition(record):
|
|
381
|
+
alert.last_triggered = now
|
|
382
|
+
triggered_alert = TriggeredAlert(alert=alert, record=record, timestamp=now)
|
|
383
|
+
triggered.append(triggered_alert)
|
|
384
|
+
self._notify(triggered_alert)
|
|
385
|
+
except Exception as e:
|
|
386
|
+
logger.warning(f"Alert condition check failed: {e}")
|
|
387
|
+
|
|
388
|
+
return triggered
|
|
389
|
+
|
|
390
|
+
def on_alert(self, handler: Callable[[TriggeredAlert], None]) -> None:
|
|
391
|
+
"""Register alert handler."""
|
|
392
|
+
self._handlers.append(handler)
|
|
393
|
+
|
|
394
|
+
def _notify(self, alert: TriggeredAlert) -> None:
|
|
395
|
+
"""Notify handlers of triggered alert."""
|
|
396
|
+
for handler in self._handlers:
|
|
397
|
+
try:
|
|
398
|
+
handler(alert)
|
|
399
|
+
except Exception as e:
|
|
400
|
+
logger.warning(f"Alert handler failed: {e}")
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
# =============================================================================
|
|
404
|
+
# =============================================================================
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
class SamplingStrategy(Enum):
|
|
408
|
+
"""Sampling strategy."""
|
|
409
|
+
|
|
410
|
+
RANDOM = auto()
|
|
411
|
+
RATE_LIMIT = auto()
|
|
412
|
+
ADAPTIVE = auto()
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
class LogSampler:
|
|
416
|
+
"""Samples log messages for high-volume scenarios.
|
|
417
|
+
|
|
418
|
+
References:
|
|
419
|
+
LOG-015: Log Sampling for High-Volume
|
|
420
|
+
"""
|
|
421
|
+
|
|
422
|
+
def __init__(
|
|
423
|
+
self,
|
|
424
|
+
strategy: SamplingStrategy = SamplingStrategy.RATE_LIMIT,
|
|
425
|
+
rate: float = 0.1, # 10% for random
|
|
426
|
+
max_per_second: int = 100, # for rate limit
|
|
427
|
+
):
|
|
428
|
+
self.strategy = strategy
|
|
429
|
+
self.rate = rate
|
|
430
|
+
self.max_per_second = max_per_second
|
|
431
|
+
self._count_this_second = 0
|
|
432
|
+
self._last_second = 0
|
|
433
|
+
self._lock = threading.Lock()
|
|
434
|
+
|
|
435
|
+
def should_log(self, record: logging.LogRecord) -> bool:
|
|
436
|
+
"""Determine if record should be logged.
|
|
437
|
+
|
|
438
|
+
Args:
|
|
439
|
+
record: Log record
|
|
440
|
+
|
|
441
|
+
Returns:
|
|
442
|
+
True if should log
|
|
443
|
+
"""
|
|
444
|
+
# Always log errors and above
|
|
445
|
+
if record.levelno >= logging.ERROR:
|
|
446
|
+
return True
|
|
447
|
+
|
|
448
|
+
if self.strategy == SamplingStrategy.RANDOM:
|
|
449
|
+
import random
|
|
450
|
+
|
|
451
|
+
return random.random() < self.rate
|
|
452
|
+
|
|
453
|
+
elif self.strategy == SamplingStrategy.RATE_LIMIT:
|
|
454
|
+
with self._lock:
|
|
455
|
+
current_second = int(time.time())
|
|
456
|
+
if current_second != self._last_second:
|
|
457
|
+
self._last_second = current_second
|
|
458
|
+
self._count_this_second = 0
|
|
459
|
+
|
|
460
|
+
if self._count_this_second < self.max_per_second:
|
|
461
|
+
self._count_this_second += 1
|
|
462
|
+
return True
|
|
463
|
+
return False
|
|
464
|
+
|
|
465
|
+
elif self.strategy == SamplingStrategy.ADAPTIVE:
|
|
466
|
+
# Reduce sampling as volume increases
|
|
467
|
+
with self._lock:
|
|
468
|
+
current_second = int(time.time())
|
|
469
|
+
if current_second != self._last_second:
|
|
470
|
+
volume = self._count_this_second
|
|
471
|
+
self._last_second = current_second
|
|
472
|
+
self._count_this_second = 0
|
|
473
|
+
|
|
474
|
+
# Adjust rate based on volume
|
|
475
|
+
if volume > 1000:
|
|
476
|
+
self.rate = 0.01
|
|
477
|
+
elif volume > 100:
|
|
478
|
+
self.rate = 0.1
|
|
479
|
+
else:
|
|
480
|
+
self.rate = 1.0
|
|
481
|
+
|
|
482
|
+
self._count_this_second += 1
|
|
483
|
+
import random
|
|
484
|
+
|
|
485
|
+
return random.random() < self.rate
|
|
486
|
+
|
|
487
|
+
return True # type: ignore[unreachable]
|
|
488
|
+
|
|
489
|
+
|
|
490
|
+
# =============================================================================
|
|
491
|
+
# =============================================================================
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
class LogBuffer:
|
|
495
|
+
"""Buffers log messages for batch writing.
|
|
496
|
+
|
|
497
|
+
References:
|
|
498
|
+
LOG-016: Log Buffer for Batch Writing
|
|
499
|
+
"""
|
|
500
|
+
|
|
501
|
+
def __init__(self, max_size: int = 1000, flush_interval_seconds: float = 5.0):
|
|
502
|
+
self.max_size = max_size
|
|
503
|
+
self.flush_interval = flush_interval_seconds
|
|
504
|
+
self._buffer: queue.Queue = queue.Queue(maxsize=max_size) # type: ignore[type-arg]
|
|
505
|
+
self._handlers: list[Callable[[list[logging.LogRecord]], None]] = []
|
|
506
|
+
self._flush_thread: threading.Thread | None = None
|
|
507
|
+
self._running = False
|
|
508
|
+
|
|
509
|
+
def add(self, record: logging.LogRecord) -> None:
|
|
510
|
+
"""Add record to buffer."""
|
|
511
|
+
try:
|
|
512
|
+
self._buffer.put_nowait(record)
|
|
513
|
+
except queue.Full:
|
|
514
|
+
# Buffer full, force flush
|
|
515
|
+
self.flush()
|
|
516
|
+
self._buffer.put_nowait(record)
|
|
517
|
+
|
|
518
|
+
def flush(self) -> None:
|
|
519
|
+
"""Flush buffer to handlers."""
|
|
520
|
+
records = []
|
|
521
|
+
while not self._buffer.empty():
|
|
522
|
+
try:
|
|
523
|
+
records.append(self._buffer.get_nowait())
|
|
524
|
+
except queue.Empty:
|
|
525
|
+
break
|
|
526
|
+
|
|
527
|
+
if records:
|
|
528
|
+
for handler in self._handlers:
|
|
529
|
+
try:
|
|
530
|
+
handler(records)
|
|
531
|
+
except Exception as e:
|
|
532
|
+
logger.warning(f"Buffer flush handler failed: {e}")
|
|
533
|
+
|
|
534
|
+
def on_flush(self, handler: Callable[[list[logging.LogRecord]], None]) -> None:
|
|
535
|
+
"""Register flush handler."""
|
|
536
|
+
self._handlers.append(handler)
|
|
537
|
+
|
|
538
|
+
def start_auto_flush(self) -> None:
|
|
539
|
+
"""Start automatic flush thread."""
|
|
540
|
+
self._running = True
|
|
541
|
+
self._flush_thread = threading.Thread(target=self._flush_loop, daemon=True)
|
|
542
|
+
self._flush_thread.start()
|
|
543
|
+
|
|
544
|
+
def stop_auto_flush(self) -> None:
|
|
545
|
+
"""Stop automatic flush thread."""
|
|
546
|
+
self._running = False
|
|
547
|
+
if self._flush_thread:
|
|
548
|
+
self._flush_thread.join(timeout=2)
|
|
549
|
+
self.flush()
|
|
550
|
+
|
|
551
|
+
def _flush_loop(self) -> None:
|
|
552
|
+
"""Periodic flush loop."""
|
|
553
|
+
while self._running:
|
|
554
|
+
time.sleep(self.flush_interval)
|
|
555
|
+
self.flush()
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
# =============================================================================
|
|
559
|
+
# =============================================================================
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
class CompressedLogHandler(logging.Handler):
|
|
563
|
+
"""Handler that writes compressed logs.
|
|
564
|
+
|
|
565
|
+
References:
|
|
566
|
+
LOG-017: Log Compression
|
|
567
|
+
"""
|
|
568
|
+
|
|
569
|
+
def __init__(
|
|
570
|
+
self,
|
|
571
|
+
filename: str,
|
|
572
|
+
max_bytes: int = 10_000_000,
|
|
573
|
+
backup_count: int = 5,
|
|
574
|
+
compression_level: int = 9,
|
|
575
|
+
):
|
|
576
|
+
super().__init__()
|
|
577
|
+
self.filename = filename
|
|
578
|
+
self.max_bytes = max_bytes
|
|
579
|
+
self.backup_count = backup_count
|
|
580
|
+
self.compression_level = compression_level
|
|
581
|
+
self._current_file: Any = None
|
|
582
|
+
self._current_size = 0
|
|
583
|
+
self._lock = threading.Lock()
|
|
584
|
+
|
|
585
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
586
|
+
"""Emit log record."""
|
|
587
|
+
try:
|
|
588
|
+
msg = self.format(record) + "\n"
|
|
589
|
+
msg_bytes = msg.encode("utf-8")
|
|
590
|
+
|
|
591
|
+
with self._lock:
|
|
592
|
+
if self._current_file is None:
|
|
593
|
+
self._open_file()
|
|
594
|
+
|
|
595
|
+
if self._current_size + len(msg_bytes) > self.max_bytes:
|
|
596
|
+
self._rotate()
|
|
597
|
+
|
|
598
|
+
self._current_file.write(msg_bytes)
|
|
599
|
+
self._current_size += len(msg_bytes)
|
|
600
|
+
|
|
601
|
+
except Exception:
|
|
602
|
+
self.handleError(record)
|
|
603
|
+
|
|
604
|
+
def _open_file(self) -> None:
|
|
605
|
+
"""Open current log file."""
|
|
606
|
+
self._current_file = gzip.open( # noqa: SIM115
|
|
607
|
+
f"{self.filename}.gz", "ab", compresslevel=self.compression_level
|
|
608
|
+
)
|
|
609
|
+
try:
|
|
610
|
+
self._current_size = os.path.getsize(f"{self.filename}.gz") # noqa: PTH202
|
|
611
|
+
except OSError:
|
|
612
|
+
self._current_size = 0
|
|
613
|
+
|
|
614
|
+
def _rotate(self) -> None:
|
|
615
|
+
"""Rotate log files."""
|
|
616
|
+
if self._current_file:
|
|
617
|
+
self._current_file.close()
|
|
618
|
+
|
|
619
|
+
# Shift existing backups
|
|
620
|
+
for i in range(self.backup_count - 1, 0, -1):
|
|
621
|
+
src = f"{self.filename}.{i}.gz"
|
|
622
|
+
dst = f"{self.filename}.{i + 1}.gz"
|
|
623
|
+
if os.path.exists(src):
|
|
624
|
+
os.rename(src, dst) # noqa: PTH104
|
|
625
|
+
|
|
626
|
+
# Move current to .1
|
|
627
|
+
if os.path.exists(f"{self.filename}.gz"):
|
|
628
|
+
os.rename(f"{self.filename}.gz", f"{self.filename}.1.gz") # noqa: PTH104
|
|
629
|
+
|
|
630
|
+
self._open_file()
|
|
631
|
+
|
|
632
|
+
def close(self) -> None:
|
|
633
|
+
"""Close handler."""
|
|
634
|
+
with self._lock:
|
|
635
|
+
if self._current_file:
|
|
636
|
+
self._current_file.close()
|
|
637
|
+
self._current_file = None
|
|
638
|
+
super().close()
|
|
639
|
+
|
|
640
|
+
|
|
641
|
+
# =============================================================================
|
|
642
|
+
# =============================================================================
|
|
643
|
+
|
|
644
|
+
|
|
645
|
+
class EncryptedLogHandler(logging.Handler):
|
|
646
|
+
"""Handler that writes encrypted logs.
|
|
647
|
+
|
|
648
|
+
Uses simple XOR encryption for demonstration.
|
|
649
|
+
In production, use proper encryption (AES, etc.).
|
|
650
|
+
|
|
651
|
+
References:
|
|
652
|
+
LOG-018: Log Encryption
|
|
653
|
+
"""
|
|
654
|
+
|
|
655
|
+
def __init__(self, filename: str, key: str):
|
|
656
|
+
super().__init__()
|
|
657
|
+
self.filename = filename
|
|
658
|
+
self._key = hashlib.sha256(key.encode()).digest()
|
|
659
|
+
self._file: Any = None
|
|
660
|
+
self._lock = threading.Lock()
|
|
661
|
+
|
|
662
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
663
|
+
"""Emit encrypted log record."""
|
|
664
|
+
try:
|
|
665
|
+
msg = self.format(record) + "\n"
|
|
666
|
+
encrypted = self._encrypt(msg.encode("utf-8"))
|
|
667
|
+
|
|
668
|
+
with self._lock:
|
|
669
|
+
if self._file is None:
|
|
670
|
+
self._file = open(self.filename, "ab") # noqa: SIM115
|
|
671
|
+
|
|
672
|
+
# Write length-prefixed encrypted message
|
|
673
|
+
length = len(encrypted).to_bytes(4, "big")
|
|
674
|
+
self._file.write(length + encrypted)
|
|
675
|
+
self._file.flush()
|
|
676
|
+
|
|
677
|
+
except Exception:
|
|
678
|
+
self.handleError(record)
|
|
679
|
+
|
|
680
|
+
def _encrypt(self, data: bytes) -> bytes:
|
|
681
|
+
"""Encrypt data with XOR."""
|
|
682
|
+
encrypted = bytearray()
|
|
683
|
+
for i, byte in enumerate(data):
|
|
684
|
+
encrypted.append(byte ^ self._key[i % len(self._key)])
|
|
685
|
+
return bytes(encrypted)
|
|
686
|
+
|
|
687
|
+
def close(self) -> None:
|
|
688
|
+
"""Close handler."""
|
|
689
|
+
with self._lock:
|
|
690
|
+
if self._file:
|
|
691
|
+
self._file.close()
|
|
692
|
+
self._file = None
|
|
693
|
+
super().close()
|
|
694
|
+
|
|
695
|
+
|
|
696
|
+
# =============================================================================
|
|
697
|
+
# =============================================================================
|
|
698
|
+
|
|
699
|
+
|
|
700
|
+
class LogForwarderProtocol(Enum):
|
|
701
|
+
"""Log forwarding protocols."""
|
|
702
|
+
|
|
703
|
+
SYSLOG = auto()
|
|
704
|
+
HTTP = auto()
|
|
705
|
+
TCP = auto()
|
|
706
|
+
UDP = auto()
|
|
707
|
+
|
|
708
|
+
|
|
709
|
+
@dataclass
|
|
710
|
+
class ForwardingConfig:
|
|
711
|
+
"""Log forwarding configuration.
|
|
712
|
+
|
|
713
|
+
References:
|
|
714
|
+
LOG-019: Log Forwarding
|
|
715
|
+
"""
|
|
716
|
+
|
|
717
|
+
protocol: LogForwarderProtocol
|
|
718
|
+
host: str
|
|
719
|
+
port: int
|
|
720
|
+
timeout: float = 5.0
|
|
721
|
+
batch_size: int = 100
|
|
722
|
+
tls: bool = False
|
|
723
|
+
|
|
724
|
+
|
|
725
|
+
class LogForwarder:
|
|
726
|
+
"""Forwards logs to external systems.
|
|
727
|
+
|
|
728
|
+
References:
|
|
729
|
+
LOG-019: Log Forwarding
|
|
730
|
+
"""
|
|
731
|
+
|
|
732
|
+
def __init__(self, config: ForwardingConfig):
|
|
733
|
+
self.config = config
|
|
734
|
+
self._buffer: list[dict[str, Any]] = []
|
|
735
|
+
self._lock = threading.Lock()
|
|
736
|
+
|
|
737
|
+
def forward(self, record: logging.LogRecord) -> None:
|
|
738
|
+
"""Forward log record.
|
|
739
|
+
|
|
740
|
+
Args:
|
|
741
|
+
record: Log record
|
|
742
|
+
"""
|
|
743
|
+
entry = {
|
|
744
|
+
"timestamp": datetime.now().isoformat(),
|
|
745
|
+
"level": record.levelname,
|
|
746
|
+
"logger": record.name,
|
|
747
|
+
"message": record.getMessage(),
|
|
748
|
+
"hostname": os.uname().nodename if hasattr(os, "uname") else "unknown",
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
with self._lock:
|
|
752
|
+
self._buffer.append(entry)
|
|
753
|
+
if len(self._buffer) >= self.config.batch_size:
|
|
754
|
+
self._flush()
|
|
755
|
+
|
|
756
|
+
def _flush(self) -> None:
|
|
757
|
+
"""Flush buffer to destination."""
|
|
758
|
+
if not self._buffer:
|
|
759
|
+
return
|
|
760
|
+
|
|
761
|
+
entries = self._buffer.copy()
|
|
762
|
+
self._buffer.clear()
|
|
763
|
+
|
|
764
|
+
try:
|
|
765
|
+
if self.config.protocol == LogForwarderProtocol.HTTP:
|
|
766
|
+
self._send_http(entries)
|
|
767
|
+
elif self.config.protocol == LogForwarderProtocol.SYSLOG:
|
|
768
|
+
self._send_syslog(entries)
|
|
769
|
+
elif self.config.protocol == LogForwarderProtocol.TCP:
|
|
770
|
+
self._send_tcp(entries)
|
|
771
|
+
elif self.config.protocol == LogForwarderProtocol.UDP:
|
|
772
|
+
self._send_udp(entries)
|
|
773
|
+
except Exception as e:
|
|
774
|
+
logger.warning(f"Log forwarding failed: {e}")
|
|
775
|
+
# Put entries back in buffer
|
|
776
|
+
self._buffer.extend(entries)
|
|
777
|
+
|
|
778
|
+
def _send_http(self, entries: list[dict[str, Any]]) -> None:
|
|
779
|
+
"""Send via HTTP."""
|
|
780
|
+
import urllib.request
|
|
781
|
+
|
|
782
|
+
data = json.dumps(entries).encode("utf-8")
|
|
783
|
+
req = urllib.request.Request(
|
|
784
|
+
f"{'https' if self.config.tls else 'http'}://{self.config.host}:{self.config.port}/logs",
|
|
785
|
+
data=data,
|
|
786
|
+
headers={"Content-Type": "application/json"},
|
|
787
|
+
)
|
|
788
|
+
urllib.request.urlopen(req, timeout=self.config.timeout)
|
|
789
|
+
|
|
790
|
+
def _send_syslog(self, entries: list[dict[str, Any]]) -> None:
|
|
791
|
+
"""Send via syslog."""
|
|
792
|
+
import socket
|
|
793
|
+
|
|
794
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
795
|
+
for entry in entries:
|
|
796
|
+
msg = f"<14>{entry['timestamp']} {entry['logger']}: {entry['message']}"
|
|
797
|
+
sock.sendto(msg.encode(), (self.config.host, self.config.port))
|
|
798
|
+
sock.close()
|
|
799
|
+
|
|
800
|
+
def _send_tcp(self, entries: list[dict[str, Any]]) -> None:
|
|
801
|
+
"""Send via TCP."""
|
|
802
|
+
import socket
|
|
803
|
+
|
|
804
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
805
|
+
sock.settimeout(self.config.timeout)
|
|
806
|
+
sock.connect((self.config.host, self.config.port))
|
|
807
|
+
for entry in entries:
|
|
808
|
+
msg = json.dumps(entry) + "\n"
|
|
809
|
+
sock.send(msg.encode())
|
|
810
|
+
sock.close()
|
|
811
|
+
|
|
812
|
+
def _send_udp(self, entries: list[dict[str, Any]]) -> None:
|
|
813
|
+
"""Send via UDP."""
|
|
814
|
+
import socket
|
|
815
|
+
|
|
816
|
+
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
|
817
|
+
for entry in entries:
|
|
818
|
+
msg = json.dumps(entry)
|
|
819
|
+
sock.sendto(msg.encode(), (self.config.host, self.config.port))
|
|
820
|
+
sock.close()
|
|
821
|
+
|
|
822
|
+
|
|
823
|
+
# =============================================================================
|
|
824
|
+
# =============================================================================
|
|
825
|
+
|
|
826
|
+
|
|
827
|
+
@dataclass
|
|
828
|
+
class DashboardMetrics:
|
|
829
|
+
"""Metrics for log visualization dashboard.
|
|
830
|
+
|
|
831
|
+
References:
|
|
832
|
+
LOG-020: Log Visualization Dashboard Data
|
|
833
|
+
"""
|
|
834
|
+
|
|
835
|
+
total_logs: int = 0
|
|
836
|
+
logs_by_level: dict[str, int] = field(default_factory=dict)
|
|
837
|
+
logs_by_logger: dict[str, int] = field(default_factory=dict)
|
|
838
|
+
logs_per_minute: list[int] = field(default_factory=list)
|
|
839
|
+
error_rate: float = 0.0
|
|
840
|
+
top_patterns: list[tuple[str, int]] = field(default_factory=list)
|
|
841
|
+
recent_errors: list[dict[str, Any]] = field(default_factory=list)
|
|
842
|
+
|
|
843
|
+
|
|
844
|
+
class LogDashboardCollector:
|
|
845
|
+
"""Collects metrics for log visualization.
|
|
846
|
+
|
|
847
|
+
References:
|
|
848
|
+
LOG-020: Log Visualization Dashboard Data
|
|
849
|
+
"""
|
|
850
|
+
|
|
851
|
+
def __init__(self, window_minutes: int = 60):
|
|
852
|
+
self.window_minutes = window_minutes
|
|
853
|
+
self._logs: deque = deque() # type: ignore[type-arg]
|
|
854
|
+
self._lock = threading.Lock()
|
|
855
|
+
|
|
856
|
+
def add(self, record: logging.LogRecord) -> None:
|
|
857
|
+
"""Add log record to metrics."""
|
|
858
|
+
entry = {
|
|
859
|
+
"timestamp": datetime.now(),
|
|
860
|
+
"level": record.levelname,
|
|
861
|
+
"logger": record.name,
|
|
862
|
+
"message": record.getMessage(),
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
with self._lock:
|
|
866
|
+
self._logs.append(entry)
|
|
867
|
+
# Trim old entries
|
|
868
|
+
cutoff = datetime.now() - timedelta(minutes=self.window_minutes)
|
|
869
|
+
while self._logs and self._logs[0]["timestamp"] < cutoff:
|
|
870
|
+
self._logs.popleft()
|
|
871
|
+
|
|
872
|
+
def get_metrics(self) -> DashboardMetrics:
|
|
873
|
+
"""Get current dashboard metrics.
|
|
874
|
+
|
|
875
|
+
Returns:
|
|
876
|
+
Dashboard metrics
|
|
877
|
+
"""
|
|
878
|
+
with self._lock:
|
|
879
|
+
logs = list(self._logs)
|
|
880
|
+
|
|
881
|
+
if not logs:
|
|
882
|
+
return DashboardMetrics()
|
|
883
|
+
|
|
884
|
+
# Count by level
|
|
885
|
+
level_counts = Counter(log["level"] for log in logs)
|
|
886
|
+
|
|
887
|
+
# Count by logger
|
|
888
|
+
logger_counts = Counter(log["logger"] for log in logs)
|
|
889
|
+
|
|
890
|
+
# Logs per minute
|
|
891
|
+
now = datetime.now()
|
|
892
|
+
per_minute = []
|
|
893
|
+
for i in range(60):
|
|
894
|
+
minute_start = now - timedelta(minutes=i + 1)
|
|
895
|
+
minute_end = now - timedelta(minutes=i)
|
|
896
|
+
count = sum(1 for log in logs if minute_start <= log["timestamp"] < minute_end)
|
|
897
|
+
per_minute.append(count)
|
|
898
|
+
per_minute.reverse()
|
|
899
|
+
|
|
900
|
+
# Error rate
|
|
901
|
+
error_count = sum(1 for log in logs if log["level"] in ("ERROR", "CRITICAL"))
|
|
902
|
+
error_rate = error_count / len(logs) if logs else 0.0
|
|
903
|
+
|
|
904
|
+
# Top patterns
|
|
905
|
+
patterns: Counter[str] = Counter()
|
|
906
|
+
for log in logs:
|
|
907
|
+
normalized = re.sub(r"\d+", "<N>", log["message"])
|
|
908
|
+
patterns[normalized] += 1
|
|
909
|
+
|
|
910
|
+
# Recent errors
|
|
911
|
+
recent_errors = [log for log in logs if log["level"] in ("ERROR", "CRITICAL")][-10:]
|
|
912
|
+
|
|
913
|
+
return DashboardMetrics(
|
|
914
|
+
total_logs=len(logs),
|
|
915
|
+
logs_by_level=dict(level_counts),
|
|
916
|
+
logs_by_logger=dict(logger_counts.most_common(10)),
|
|
917
|
+
logs_per_minute=per_minute,
|
|
918
|
+
error_rate=error_rate,
|
|
919
|
+
top_patterns=patterns.most_common(10),
|
|
920
|
+
recent_errors=recent_errors,
|
|
921
|
+
)
|
|
922
|
+
|
|
923
|
+
|
|
924
|
+
__all__ = [
|
|
925
|
+
# Aggregation (LOG-009)
|
|
926
|
+
"AggregatedLogEntry",
|
|
927
|
+
# Alerting (LOG-012)
|
|
928
|
+
"AlertSeverity",
|
|
929
|
+
# Compression (LOG-017)
|
|
930
|
+
"CompressedLogHandler",
|
|
931
|
+
# Dashboard (LOG-020)
|
|
932
|
+
"DashboardMetrics",
|
|
933
|
+
# Encryption (LOG-018)
|
|
934
|
+
"EncryptedLogHandler",
|
|
935
|
+
# Forwarding (LOG-019)
|
|
936
|
+
"ForwardingConfig",
|
|
937
|
+
"LogAggregator",
|
|
938
|
+
"LogAlert",
|
|
939
|
+
"LogAlerter",
|
|
940
|
+
# Analysis (LOG-010)
|
|
941
|
+
"LogAnalyzer",
|
|
942
|
+
# Buffer (LOG-016)
|
|
943
|
+
"LogBuffer",
|
|
944
|
+
"LogDashboardCollector",
|
|
945
|
+
"LogForwarder",
|
|
946
|
+
"LogForwarderProtocol",
|
|
947
|
+
"LogPattern",
|
|
948
|
+
# Sampling (LOG-015)
|
|
949
|
+
"LogSampler",
|
|
950
|
+
"SamplingStrategy",
|
|
951
|
+
"TriggeredAlert",
|
|
952
|
+
]
|