oscura 0.5.0__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- oscura/__init__.py +169 -167
- oscura/analyzers/__init__.py +3 -0
- oscura/analyzers/classification.py +659 -0
- oscura/analyzers/digital/__init__.py +0 -48
- oscura/analyzers/digital/edges.py +325 -65
- oscura/analyzers/digital/extraction.py +0 -195
- oscura/analyzers/digital/quality.py +293 -166
- oscura/analyzers/digital/timing.py +260 -115
- oscura/analyzers/digital/timing_numba.py +334 -0
- oscura/analyzers/entropy.py +605 -0
- oscura/analyzers/eye/diagram.py +176 -109
- oscura/analyzers/eye/metrics.py +5 -5
- oscura/analyzers/jitter/__init__.py +6 -4
- oscura/analyzers/jitter/ber.py +52 -52
- oscura/analyzers/jitter/classification.py +156 -0
- oscura/analyzers/jitter/decomposition.py +163 -113
- oscura/analyzers/jitter/spectrum.py +80 -64
- oscura/analyzers/ml/__init__.py +39 -0
- oscura/analyzers/ml/features.py +600 -0
- oscura/analyzers/ml/signal_classifier.py +604 -0
- oscura/analyzers/packet/daq.py +246 -158
- oscura/analyzers/packet/parser.py +12 -1
- oscura/analyzers/packet/payload.py +50 -2110
- oscura/analyzers/packet/payload_analysis.py +361 -181
- oscura/analyzers/packet/payload_patterns.py +133 -70
- oscura/analyzers/packet/stream.py +84 -23
- oscura/analyzers/patterns/__init__.py +26 -5
- oscura/analyzers/patterns/anomaly_detection.py +908 -0
- oscura/analyzers/patterns/clustering.py +169 -108
- oscura/analyzers/patterns/clustering_optimized.py +227 -0
- oscura/analyzers/patterns/discovery.py +1 -1
- oscura/analyzers/patterns/matching.py +581 -197
- oscura/analyzers/patterns/pattern_mining.py +778 -0
- oscura/analyzers/patterns/periodic.py +121 -38
- oscura/analyzers/patterns/sequences.py +175 -78
- oscura/analyzers/power/conduction.py +1 -1
- oscura/analyzers/power/soa.py +6 -6
- oscura/analyzers/power/switching.py +250 -110
- oscura/analyzers/protocol/__init__.py +17 -1
- oscura/analyzers/protocols/__init__.py +1 -22
- oscura/analyzers/protocols/base.py +6 -6
- oscura/analyzers/protocols/ble/__init__.py +38 -0
- oscura/analyzers/protocols/ble/analyzer.py +809 -0
- oscura/analyzers/protocols/ble/uuids.py +288 -0
- oscura/analyzers/protocols/can.py +257 -127
- oscura/analyzers/protocols/can_fd.py +107 -80
- oscura/analyzers/protocols/flexray.py +139 -80
- oscura/analyzers/protocols/hdlc.py +93 -58
- oscura/analyzers/protocols/i2c.py +247 -106
- oscura/analyzers/protocols/i2s.py +138 -86
- oscura/analyzers/protocols/industrial/__init__.py +40 -0
- oscura/analyzers/protocols/industrial/bacnet/__init__.py +33 -0
- oscura/analyzers/protocols/industrial/bacnet/analyzer.py +708 -0
- oscura/analyzers/protocols/industrial/bacnet/encoding.py +412 -0
- oscura/analyzers/protocols/industrial/bacnet/services.py +622 -0
- oscura/analyzers/protocols/industrial/ethercat/__init__.py +30 -0
- oscura/analyzers/protocols/industrial/ethercat/analyzer.py +474 -0
- oscura/analyzers/protocols/industrial/ethercat/mailbox.py +339 -0
- oscura/analyzers/protocols/industrial/ethercat/topology.py +166 -0
- oscura/analyzers/protocols/industrial/modbus/__init__.py +31 -0
- oscura/analyzers/protocols/industrial/modbus/analyzer.py +525 -0
- oscura/analyzers/protocols/industrial/modbus/crc.py +79 -0
- oscura/analyzers/protocols/industrial/modbus/functions.py +436 -0
- oscura/analyzers/protocols/industrial/opcua/__init__.py +21 -0
- oscura/analyzers/protocols/industrial/opcua/analyzer.py +552 -0
- oscura/analyzers/protocols/industrial/opcua/datatypes.py +446 -0
- oscura/analyzers/protocols/industrial/opcua/services.py +264 -0
- oscura/analyzers/protocols/industrial/profinet/__init__.py +23 -0
- oscura/analyzers/protocols/industrial/profinet/analyzer.py +441 -0
- oscura/analyzers/protocols/industrial/profinet/dcp.py +263 -0
- oscura/analyzers/protocols/industrial/profinet/ptcp.py +200 -0
- oscura/analyzers/protocols/jtag.py +180 -98
- oscura/analyzers/protocols/lin.py +219 -114
- oscura/analyzers/protocols/manchester.py +4 -4
- oscura/analyzers/protocols/onewire.py +253 -149
- oscura/analyzers/protocols/parallel_bus/__init__.py +20 -0
- oscura/analyzers/protocols/parallel_bus/centronics.py +92 -0
- oscura/analyzers/protocols/parallel_bus/gpib.py +137 -0
- oscura/analyzers/protocols/spi.py +192 -95
- oscura/analyzers/protocols/swd.py +321 -167
- oscura/analyzers/protocols/uart.py +267 -125
- oscura/analyzers/protocols/usb.py +235 -131
- oscura/analyzers/side_channel/power.py +17 -12
- oscura/analyzers/signal/__init__.py +15 -0
- oscura/analyzers/signal/timing_analysis.py +1086 -0
- oscura/analyzers/signal_integrity/__init__.py +4 -1
- oscura/analyzers/signal_integrity/sparams.py +2 -19
- oscura/analyzers/spectral/chunked.py +129 -60
- oscura/analyzers/spectral/chunked_fft.py +300 -94
- oscura/analyzers/spectral/chunked_wavelet.py +100 -80
- oscura/analyzers/statistical/checksum.py +376 -217
- oscura/analyzers/statistical/classification.py +229 -107
- oscura/analyzers/statistical/entropy.py +78 -53
- oscura/analyzers/statistics/correlation.py +407 -211
- oscura/analyzers/statistics/outliers.py +2 -2
- oscura/analyzers/statistics/streaming.py +30 -5
- oscura/analyzers/validation.py +216 -101
- oscura/analyzers/waveform/measurements.py +9 -0
- oscura/analyzers/waveform/measurements_with_uncertainty.py +31 -15
- oscura/analyzers/waveform/spectral.py +500 -228
- oscura/api/__init__.py +31 -5
- oscura/api/dsl/__init__.py +582 -0
- oscura/{dsl → api/dsl}/commands.py +43 -76
- oscura/{dsl → api/dsl}/interpreter.py +26 -51
- oscura/{dsl → api/dsl}/parser.py +107 -77
- oscura/{dsl → api/dsl}/repl.py +2 -2
- oscura/api/dsl.py +1 -1
- oscura/{integrations → api/integrations}/__init__.py +1 -1
- oscura/{integrations → api/integrations}/llm.py +201 -102
- oscura/api/operators.py +3 -3
- oscura/api/optimization.py +144 -30
- oscura/api/rest_server.py +921 -0
- oscura/api/server/__init__.py +17 -0
- oscura/api/server/dashboard.py +850 -0
- oscura/api/server/static/README.md +34 -0
- oscura/api/server/templates/base.html +181 -0
- oscura/api/server/templates/export.html +120 -0
- oscura/api/server/templates/home.html +284 -0
- oscura/api/server/templates/protocols.html +58 -0
- oscura/api/server/templates/reports.html +43 -0
- oscura/api/server/templates/session_detail.html +89 -0
- oscura/api/server/templates/sessions.html +83 -0
- oscura/api/server/templates/waveforms.html +73 -0
- oscura/automotive/__init__.py +8 -1
- oscura/automotive/can/__init__.py +10 -0
- oscura/automotive/can/checksum.py +3 -1
- oscura/automotive/can/dbc_generator.py +590 -0
- oscura/automotive/can/message_wrapper.py +121 -74
- oscura/automotive/can/patterns.py +98 -21
- oscura/automotive/can/session.py +292 -56
- oscura/automotive/can/state_machine.py +6 -3
- oscura/automotive/can/stimulus_response.py +97 -75
- oscura/automotive/dbc/__init__.py +10 -2
- oscura/automotive/dbc/generator.py +84 -56
- oscura/automotive/dbc/parser.py +6 -6
- oscura/automotive/dtc/data.json +2763 -0
- oscura/automotive/dtc/database.py +2 -2
- oscura/automotive/flexray/__init__.py +31 -0
- oscura/automotive/flexray/analyzer.py +504 -0
- oscura/automotive/flexray/crc.py +185 -0
- oscura/automotive/flexray/fibex.py +449 -0
- oscura/automotive/j1939/__init__.py +45 -8
- oscura/automotive/j1939/analyzer.py +605 -0
- oscura/automotive/j1939/spns.py +326 -0
- oscura/automotive/j1939/transport.py +306 -0
- oscura/automotive/lin/__init__.py +47 -0
- oscura/automotive/lin/analyzer.py +612 -0
- oscura/automotive/loaders/blf.py +13 -2
- oscura/automotive/loaders/csv_can.py +143 -72
- oscura/automotive/loaders/dispatcher.py +50 -2
- oscura/automotive/loaders/mdf.py +86 -45
- oscura/automotive/loaders/pcap.py +111 -61
- oscura/automotive/uds/__init__.py +4 -0
- oscura/automotive/uds/analyzer.py +725 -0
- oscura/automotive/uds/decoder.py +140 -58
- oscura/automotive/uds/models.py +7 -1
- oscura/automotive/visualization.py +1 -1
- oscura/cli/analyze.py +348 -0
- oscura/cli/batch.py +142 -122
- oscura/cli/benchmark.py +275 -0
- oscura/cli/characterize.py +137 -82
- oscura/cli/compare.py +224 -131
- oscura/cli/completion.py +250 -0
- oscura/cli/config_cmd.py +361 -0
- oscura/cli/decode.py +164 -87
- oscura/cli/export.py +286 -0
- oscura/cli/main.py +115 -31
- oscura/{onboarding → cli/onboarding}/__init__.py +3 -3
- oscura/{onboarding → cli/onboarding}/help.py +80 -58
- oscura/{onboarding → cli/onboarding}/tutorials.py +97 -72
- oscura/{onboarding → cli/onboarding}/wizard.py +55 -36
- oscura/cli/progress.py +147 -0
- oscura/cli/shell.py +157 -135
- oscura/cli/validate_cmd.py +204 -0
- oscura/cli/visualize.py +158 -0
- oscura/convenience.py +125 -79
- oscura/core/__init__.py +4 -2
- oscura/core/backend_selector.py +3 -3
- oscura/core/cache.py +126 -15
- oscura/core/cancellation.py +1 -1
- oscura/{config → core/config}/__init__.py +20 -11
- oscura/{config → core/config}/defaults.py +1 -1
- oscura/{config → core/config}/loader.py +7 -5
- oscura/{config → core/config}/memory.py +5 -5
- oscura/{config → core/config}/migration.py +1 -1
- oscura/{config → core/config}/pipeline.py +99 -23
- oscura/{config → core/config}/preferences.py +1 -1
- oscura/{config → core/config}/protocol.py +3 -3
- oscura/{config → core/config}/schema.py +426 -272
- oscura/{config → core/config}/settings.py +1 -1
- oscura/{config → core/config}/thresholds.py +195 -153
- oscura/core/correlation.py +5 -6
- oscura/core/cross_domain.py +0 -2
- oscura/core/debug.py +9 -5
- oscura/{extensibility → core/extensibility}/docs.py +158 -70
- oscura/{extensibility → core/extensibility}/extensions.py +160 -76
- oscura/{extensibility → core/extensibility}/logging.py +1 -1
- oscura/{extensibility → core/extensibility}/measurements.py +1 -1
- oscura/{extensibility → core/extensibility}/plugins.py +1 -1
- oscura/{extensibility → core/extensibility}/templates.py +73 -3
- oscura/{extensibility → core/extensibility}/validation.py +1 -1
- oscura/core/gpu_backend.py +11 -7
- oscura/core/log_query.py +101 -11
- oscura/core/logging.py +126 -54
- oscura/core/logging_advanced.py +5 -5
- oscura/core/memory_limits.py +108 -70
- oscura/core/memory_monitor.py +2 -2
- oscura/core/memory_progress.py +7 -7
- oscura/core/memory_warnings.py +1 -1
- oscura/core/numba_backend.py +13 -13
- oscura/{plugins → core/plugins}/__init__.py +9 -9
- oscura/{plugins → core/plugins}/base.py +7 -7
- oscura/{plugins → core/plugins}/cli.py +3 -3
- oscura/{plugins → core/plugins}/discovery.py +186 -106
- oscura/{plugins → core/plugins}/lifecycle.py +1 -1
- oscura/{plugins → core/plugins}/manager.py +7 -7
- oscura/{plugins → core/plugins}/registry.py +3 -3
- oscura/{plugins → core/plugins}/versioning.py +1 -1
- oscura/core/progress.py +16 -1
- oscura/core/provenance.py +8 -2
- oscura/{schemas → core/schemas}/__init__.py +2 -2
- oscura/core/schemas/bus_configuration.json +322 -0
- oscura/core/schemas/device_mapping.json +182 -0
- oscura/core/schemas/packet_format.json +418 -0
- oscura/core/schemas/protocol_definition.json +363 -0
- oscura/core/types.py +4 -0
- oscura/core/uncertainty.py +3 -3
- oscura/correlation/__init__.py +52 -0
- oscura/correlation/multi_protocol.py +811 -0
- oscura/discovery/auto_decoder.py +117 -35
- oscura/discovery/comparison.py +191 -86
- oscura/discovery/quality_validator.py +155 -68
- oscura/discovery/signal_detector.py +196 -79
- oscura/export/__init__.py +18 -20
- oscura/export/kaitai_struct.py +513 -0
- oscura/export/scapy_layer.py +801 -0
- oscura/export/wireshark/README.md +15 -15
- oscura/export/wireshark/generator.py +1 -1
- oscura/export/wireshark/templates/dissector.lua.j2 +2 -2
- oscura/export/wireshark_dissector.py +746 -0
- oscura/guidance/wizard.py +207 -111
- oscura/hardware/__init__.py +19 -0
- oscura/{acquisition → hardware/acquisition}/__init__.py +4 -4
- oscura/{acquisition → hardware/acquisition}/file.py +2 -2
- oscura/{acquisition → hardware/acquisition}/hardware.py +7 -7
- oscura/{acquisition → hardware/acquisition}/saleae.py +15 -12
- oscura/{acquisition → hardware/acquisition}/socketcan.py +1 -1
- oscura/{acquisition → hardware/acquisition}/streaming.py +2 -2
- oscura/{acquisition → hardware/acquisition}/synthetic.py +3 -3
- oscura/{acquisition → hardware/acquisition}/visa.py +33 -11
- oscura/hardware/firmware/__init__.py +29 -0
- oscura/hardware/firmware/pattern_recognition.py +874 -0
- oscura/hardware/hal_detector.py +736 -0
- oscura/hardware/security/__init__.py +37 -0
- oscura/hardware/security/side_channel_detector.py +1126 -0
- oscura/inference/__init__.py +4 -0
- oscura/inference/active_learning/README.md +7 -7
- oscura/inference/active_learning/observation_table.py +4 -1
- oscura/inference/alignment.py +216 -123
- oscura/inference/bayesian.py +113 -33
- oscura/inference/crc_reverse.py +101 -55
- oscura/inference/logic.py +6 -2
- oscura/inference/message_format.py +342 -183
- oscura/inference/protocol.py +95 -44
- oscura/inference/protocol_dsl.py +180 -82
- oscura/inference/signal_intelligence.py +1439 -706
- oscura/inference/spectral.py +99 -57
- oscura/inference/state_machine.py +810 -158
- oscura/inference/stream.py +270 -110
- oscura/iot/__init__.py +34 -0
- oscura/iot/coap/__init__.py +32 -0
- oscura/iot/coap/analyzer.py +668 -0
- oscura/iot/coap/options.py +212 -0
- oscura/iot/lorawan/__init__.py +21 -0
- oscura/iot/lorawan/crypto.py +206 -0
- oscura/iot/lorawan/decoder.py +801 -0
- oscura/iot/lorawan/mac_commands.py +341 -0
- oscura/iot/mqtt/__init__.py +27 -0
- oscura/iot/mqtt/analyzer.py +999 -0
- oscura/iot/mqtt/properties.py +315 -0
- oscura/iot/zigbee/__init__.py +31 -0
- oscura/iot/zigbee/analyzer.py +615 -0
- oscura/iot/zigbee/security.py +153 -0
- oscura/iot/zigbee/zcl.py +349 -0
- oscura/jupyter/display.py +125 -45
- oscura/{exploratory → jupyter/exploratory}/__init__.py +8 -8
- oscura/{exploratory → jupyter/exploratory}/error_recovery.py +298 -141
- oscura/jupyter/exploratory/fuzzy.py +746 -0
- oscura/{exploratory → jupyter/exploratory}/fuzzy_advanced.py +258 -100
- oscura/{exploratory → jupyter/exploratory}/legacy.py +464 -242
- oscura/{exploratory → jupyter/exploratory}/parse.py +167 -145
- oscura/{exploratory → jupyter/exploratory}/recovery.py +119 -87
- oscura/jupyter/exploratory/sync.py +612 -0
- oscura/{exploratory → jupyter/exploratory}/unknown.py +299 -176
- oscura/jupyter/magic.py +4 -4
- oscura/{ui → jupyter/ui}/__init__.py +2 -2
- oscura/{ui → jupyter/ui}/formatters.py +3 -3
- oscura/{ui → jupyter/ui}/progressive_display.py +153 -82
- oscura/loaders/__init__.py +171 -63
- oscura/loaders/binary.py +88 -1
- oscura/loaders/chipwhisperer.py +153 -137
- oscura/loaders/configurable.py +208 -86
- oscura/loaders/csv_loader.py +458 -215
- oscura/loaders/hdf5_loader.py +278 -119
- oscura/loaders/lazy.py +87 -54
- oscura/loaders/mmap_loader.py +1 -1
- oscura/loaders/numpy_loader.py +253 -116
- oscura/loaders/pcap.py +226 -151
- oscura/loaders/rigol.py +110 -49
- oscura/loaders/sigrok.py +201 -78
- oscura/loaders/tdms.py +81 -58
- oscura/loaders/tektronix.py +291 -174
- oscura/loaders/touchstone.py +182 -87
- oscura/loaders/vcd.py +215 -117
- oscura/loaders/wav.py +155 -68
- oscura/reporting/__init__.py +9 -7
- oscura/reporting/analyze.py +352 -146
- oscura/reporting/argument_preparer.py +69 -14
- oscura/reporting/auto_report.py +97 -61
- oscura/reporting/batch.py +131 -58
- oscura/reporting/chart_selection.py +57 -45
- oscura/reporting/comparison.py +63 -17
- oscura/reporting/content/executive.py +76 -24
- oscura/reporting/core_formats/multi_format.py +11 -8
- oscura/reporting/engine.py +312 -158
- oscura/reporting/enhanced_reports.py +949 -0
- oscura/reporting/export.py +86 -43
- oscura/reporting/formatting/numbers.py +69 -42
- oscura/reporting/html.py +139 -58
- oscura/reporting/index.py +137 -65
- oscura/reporting/output.py +158 -67
- oscura/reporting/pdf.py +67 -102
- oscura/reporting/plots.py +191 -112
- oscura/reporting/sections.py +88 -47
- oscura/reporting/standards.py +104 -61
- oscura/reporting/summary_generator.py +75 -55
- oscura/reporting/tables.py +138 -54
- oscura/reporting/templates/enhanced/protocol_re.html +525 -0
- oscura/reporting/templates/index.md +13 -13
- oscura/sessions/__init__.py +14 -23
- oscura/sessions/base.py +3 -3
- oscura/sessions/blackbox.py +106 -10
- oscura/sessions/generic.py +2 -2
- oscura/sessions/legacy.py +783 -0
- oscura/side_channel/__init__.py +63 -0
- oscura/side_channel/dpa.py +1025 -0
- oscura/utils/__init__.py +15 -1
- oscura/utils/autodetect.py +1 -5
- oscura/utils/bitwise.py +118 -0
- oscura/{builders → utils/builders}/__init__.py +1 -1
- oscura/{comparison → utils/comparison}/__init__.py +6 -6
- oscura/{comparison → utils/comparison}/compare.py +202 -101
- oscura/{comparison → utils/comparison}/golden.py +83 -63
- oscura/{comparison → utils/comparison}/limits.py +313 -89
- oscura/{comparison → utils/comparison}/mask.py +151 -45
- oscura/{comparison → utils/comparison}/trace_diff.py +1 -1
- oscura/{comparison → utils/comparison}/visualization.py +147 -89
- oscura/{component → utils/component}/__init__.py +3 -3
- oscura/{component → utils/component}/impedance.py +122 -58
- oscura/{component → utils/component}/reactive.py +165 -168
- oscura/{component → utils/component}/transmission_line.py +3 -3
- oscura/{filtering → utils/filtering}/__init__.py +6 -6
- oscura/{filtering → utils/filtering}/base.py +1 -1
- oscura/{filtering → utils/filtering}/convenience.py +2 -2
- oscura/{filtering → utils/filtering}/design.py +169 -93
- oscura/{filtering → utils/filtering}/filters.py +2 -2
- oscura/{filtering → utils/filtering}/introspection.py +2 -2
- oscura/utils/geometry.py +31 -0
- oscura/utils/imports.py +184 -0
- oscura/utils/lazy.py +1 -1
- oscura/{math → utils/math}/__init__.py +2 -2
- oscura/{math → utils/math}/arithmetic.py +114 -48
- oscura/{math → utils/math}/interpolation.py +139 -106
- oscura/utils/memory.py +129 -66
- oscura/utils/memory_advanced.py +92 -9
- oscura/utils/memory_extensions.py +10 -8
- oscura/{optimization → utils/optimization}/__init__.py +1 -1
- oscura/{optimization → utils/optimization}/search.py +2 -2
- oscura/utils/performance/__init__.py +58 -0
- oscura/utils/performance/caching.py +889 -0
- oscura/utils/performance/lsh_clustering.py +333 -0
- oscura/utils/performance/memory_optimizer.py +699 -0
- oscura/utils/performance/optimizations.py +675 -0
- oscura/utils/performance/parallel.py +654 -0
- oscura/utils/performance/profiling.py +661 -0
- oscura/{pipeline → utils/pipeline}/base.py +1 -1
- oscura/{pipeline → utils/pipeline}/composition.py +11 -3
- oscura/{pipeline → utils/pipeline}/parallel.py +3 -2
- oscura/{pipeline → utils/pipeline}/pipeline.py +1 -1
- oscura/{pipeline → utils/pipeline}/reverse_engineering.py +412 -221
- oscura/{search → utils/search}/__init__.py +3 -3
- oscura/{search → utils/search}/anomaly.py +188 -58
- oscura/utils/search/context.py +294 -0
- oscura/{search → utils/search}/pattern.py +138 -10
- oscura/utils/serial.py +51 -0
- oscura/utils/storage/__init__.py +61 -0
- oscura/utils/storage/database.py +1166 -0
- oscura/{streaming → utils/streaming}/chunked.py +302 -143
- oscura/{streaming → utils/streaming}/progressive.py +1 -1
- oscura/{streaming → utils/streaming}/realtime.py +3 -2
- oscura/{triggering → utils/triggering}/__init__.py +6 -6
- oscura/{triggering → utils/triggering}/base.py +6 -6
- oscura/{triggering → utils/triggering}/edge.py +2 -2
- oscura/{triggering → utils/triggering}/pattern.py +2 -2
- oscura/{triggering → utils/triggering}/pulse.py +115 -74
- oscura/{triggering → utils/triggering}/window.py +2 -2
- oscura/utils/validation.py +32 -0
- oscura/validation/__init__.py +121 -0
- oscura/{compliance → validation/compliance}/__init__.py +5 -5
- oscura/{compliance → validation/compliance}/advanced.py +5 -5
- oscura/{compliance → validation/compliance}/masks.py +1 -1
- oscura/{compliance → validation/compliance}/reporting.py +127 -53
- oscura/{compliance → validation/compliance}/testing.py +114 -52
- oscura/validation/compliance_tests.py +915 -0
- oscura/validation/fuzzer.py +990 -0
- oscura/validation/grammar_tests.py +596 -0
- oscura/validation/grammar_validator.py +904 -0
- oscura/validation/hil_testing.py +977 -0
- oscura/{quality → validation/quality}/__init__.py +4 -4
- oscura/{quality → validation/quality}/ensemble.py +251 -171
- oscura/{quality → validation/quality}/explainer.py +3 -3
- oscura/{quality → validation/quality}/scoring.py +1 -1
- oscura/{quality → validation/quality}/warnings.py +4 -4
- oscura/validation/regression_suite.py +808 -0
- oscura/validation/replay.py +788 -0
- oscura/{testing → validation/testing}/__init__.py +2 -2
- oscura/{testing → validation/testing}/synthetic.py +5 -5
- oscura/visualization/__init__.py +9 -0
- oscura/visualization/accessibility.py +1 -1
- oscura/visualization/annotations.py +64 -67
- oscura/visualization/colors.py +7 -7
- oscura/visualization/digital.py +180 -81
- oscura/visualization/eye.py +236 -85
- oscura/visualization/interactive.py +320 -143
- oscura/visualization/jitter.py +587 -247
- oscura/visualization/layout.py +169 -134
- oscura/visualization/optimization.py +103 -52
- oscura/visualization/palettes.py +1 -1
- oscura/visualization/power.py +427 -211
- oscura/visualization/power_extended.py +626 -297
- oscura/visualization/presets.py +2 -0
- oscura/visualization/protocols.py +495 -181
- oscura/visualization/render.py +79 -63
- oscura/visualization/reverse_engineering.py +171 -124
- oscura/visualization/signal_integrity.py +460 -279
- oscura/visualization/specialized.py +190 -100
- oscura/visualization/spectral.py +670 -255
- oscura/visualization/thumbnails.py +166 -137
- oscura/visualization/waveform.py +150 -63
- oscura/workflows/__init__.py +3 -0
- oscura/{batch → workflows/batch}/__init__.py +5 -5
- oscura/{batch → workflows/batch}/advanced.py +150 -75
- oscura/workflows/batch/aggregate.py +531 -0
- oscura/workflows/batch/analyze.py +236 -0
- oscura/{batch → workflows/batch}/logging.py +2 -2
- oscura/{batch → workflows/batch}/metrics.py +1 -1
- oscura/workflows/complete_re.py +1144 -0
- oscura/workflows/compliance.py +44 -54
- oscura/workflows/digital.py +197 -51
- oscura/workflows/legacy/__init__.py +12 -0
- oscura/{workflow → workflows/legacy}/dag.py +4 -1
- oscura/workflows/multi_trace.py +9 -9
- oscura/workflows/power.py +42 -62
- oscura/workflows/protocol.py +82 -49
- oscura/workflows/reverse_engineering.py +351 -150
- oscura/workflows/signal_integrity.py +157 -82
- oscura-0.6.0.dist-info/METADATA +643 -0
- oscura-0.6.0.dist-info/RECORD +590 -0
- oscura/analyzers/digital/ic_database.py +0 -498
- oscura/analyzers/digital/timing_paths.py +0 -339
- oscura/analyzers/digital/vintage.py +0 -377
- oscura/analyzers/digital/vintage_result.py +0 -148
- oscura/analyzers/protocols/parallel_bus.py +0 -449
- oscura/batch/aggregate.py +0 -300
- oscura/batch/analyze.py +0 -139
- oscura/dsl/__init__.py +0 -73
- oscura/exceptions.py +0 -59
- oscura/exploratory/fuzzy.py +0 -513
- oscura/exploratory/sync.py +0 -384
- oscura/export/wavedrom.py +0 -430
- oscura/exporters/__init__.py +0 -94
- oscura/exporters/csv.py +0 -303
- oscura/exporters/exporters.py +0 -44
- oscura/exporters/hdf5.py +0 -217
- oscura/exporters/html_export.py +0 -701
- oscura/exporters/json_export.py +0 -338
- oscura/exporters/markdown_export.py +0 -367
- oscura/exporters/matlab_export.py +0 -354
- oscura/exporters/npz_export.py +0 -219
- oscura/exporters/spice_export.py +0 -210
- oscura/exporters/vintage_logic_csv.py +0 -247
- oscura/reporting/vintage_logic_report.py +0 -523
- oscura/search/context.py +0 -149
- oscura/session/__init__.py +0 -34
- oscura/session/annotations.py +0 -289
- oscura/session/history.py +0 -313
- oscura/session/session.py +0 -520
- oscura/visualization/digital_advanced.py +0 -718
- oscura/visualization/figure_manager.py +0 -156
- oscura/workflow/__init__.py +0 -13
- oscura-0.5.0.dist-info/METADATA +0 -407
- oscura-0.5.0.dist-info/RECORD +0 -486
- /oscura/core/{config.py → config/legacy.py} +0 -0
- /oscura/{extensibility → core/extensibility}/__init__.py +0 -0
- /oscura/{extensibility → core/extensibility}/registry.py +0 -0
- /oscura/{plugins → core/plugins}/isolation.py +0 -0
- /oscura/{builders → utils/builders}/signal_builder.py +0 -0
- /oscura/{optimization → utils/optimization}/parallel.py +0 -0
- /oscura/{pipeline → utils/pipeline}/__init__.py +0 -0
- /oscura/{streaming → utils/streaming}/__init__.py +0 -0
- {oscura-0.5.0.dist-info → oscura-0.6.0.dist-info}/WHEEL +0 -0
- {oscura-0.5.0.dist-info → oscura-0.6.0.dist-info}/entry_points.txt +0 -0
- {oscura-0.5.0.dist-info → oscura-0.6.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,889 @@
|
|
|
1
|
+
"""Caching layer for expensive computations in signal analysis.
|
|
2
|
+
|
|
3
|
+
This module provides a comprehensive caching system for expensive operations
|
|
4
|
+
like FFT, correlation, and protocol decoding. Supports multiple backends
|
|
5
|
+
(memory, disk, Redis) with automatic key generation, TTL expiration, LRU
|
|
6
|
+
eviction, and cache statistics.
|
|
7
|
+
|
|
8
|
+
Example:
|
|
9
|
+
>>> cache = CacheManager(backend="memory", max_size_mb=100)
|
|
10
|
+
>>> @cache.cached(ttl=3600)
|
|
11
|
+
... def expensive_fft(signal: np.ndarray) -> np.ndarray:
|
|
12
|
+
... return np.fft.fft(signal)
|
|
13
|
+
>>> result = expensive_fft(my_signal) # Computed and cached
|
|
14
|
+
>>> result = expensive_fft(my_signal) # Retrieved from cache
|
|
15
|
+
|
|
16
|
+
References:
|
|
17
|
+
Cache algorithms: https://en.wikipedia.org/wiki/Cache_replacement_policies
|
|
18
|
+
Redis protocol: https://redis.io/docs/reference/protocol-spec/
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
from __future__ import annotations
|
|
22
|
+
|
|
23
|
+
import functools
|
|
24
|
+
import hashlib
|
|
25
|
+
import hmac
|
|
26
|
+
import json
|
|
27
|
+
import logging
|
|
28
|
+
import pickle
|
|
29
|
+
import secrets
|
|
30
|
+
import time
|
|
31
|
+
from collections import OrderedDict
|
|
32
|
+
from dataclasses import asdict, dataclass, field
|
|
33
|
+
from enum import Enum
|
|
34
|
+
from pathlib import Path
|
|
35
|
+
from typing import TYPE_CHECKING, Any, TypeAlias
|
|
36
|
+
|
|
37
|
+
import numpy as np
|
|
38
|
+
|
|
39
|
+
from oscura.core.exceptions import SecurityError
|
|
40
|
+
|
|
41
|
+
if TYPE_CHECKING:
|
|
42
|
+
from collections.abc import Callable
|
|
43
|
+
|
|
44
|
+
logger = logging.getLogger(__name__)
|
|
45
|
+
|
|
46
|
+
__all__ = [
|
|
47
|
+
"CacheBackend",
|
|
48
|
+
"CacheEntry",
|
|
49
|
+
"CacheManager",
|
|
50
|
+
"CachePolicy",
|
|
51
|
+
"CacheStats",
|
|
52
|
+
]
|
|
53
|
+
|
|
54
|
+
# Type aliases
|
|
55
|
+
CacheKey: TypeAlias = str
|
|
56
|
+
CacheValue: TypeAlias = Any
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class CacheBackend(Enum):
|
|
60
|
+
"""Cache storage backend.
|
|
61
|
+
|
|
62
|
+
Attributes:
|
|
63
|
+
MEMORY: In-memory cache using OrderedDict (LRU)
|
|
64
|
+
DISK: Disk-based cache using pickle files
|
|
65
|
+
REDIS: Distributed cache using Redis (optional, graceful degradation)
|
|
66
|
+
MULTI_LEVEL: Memory cache with disk fallback
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
MEMORY = "memory"
|
|
70
|
+
DISK = "disk"
|
|
71
|
+
REDIS = "redis"
|
|
72
|
+
MULTI_LEVEL = "multi_level"
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class EvictionPolicy(Enum):
|
|
76
|
+
"""Cache eviction policy.
|
|
77
|
+
|
|
78
|
+
Attributes:
|
|
79
|
+
LRU: Least Recently Used
|
|
80
|
+
LFU: Least Frequently Used
|
|
81
|
+
FIFO: First In First Out
|
|
82
|
+
SIZE_BASED: Evict when size limit reached
|
|
83
|
+
"""
|
|
84
|
+
|
|
85
|
+
LRU = "lru"
|
|
86
|
+
LFU = "lfu"
|
|
87
|
+
FIFO = "fifo"
|
|
88
|
+
SIZE_BASED = "size_based"
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
@dataclass
|
|
92
|
+
class CacheEntry:
|
|
93
|
+
"""Cached data entry with metadata.
|
|
94
|
+
|
|
95
|
+
Attributes:
|
|
96
|
+
key: Cache key (hash of function + args)
|
|
97
|
+
value: Cached value
|
|
98
|
+
timestamp: Creation time (Unix timestamp)
|
|
99
|
+
access_count: Number of times accessed
|
|
100
|
+
ttl: Time-to-live in seconds (None = no expiration)
|
|
101
|
+
size_bytes: Approximate size in bytes
|
|
102
|
+
last_access: Last access time (Unix timestamp)
|
|
103
|
+
"""
|
|
104
|
+
|
|
105
|
+
key: str
|
|
106
|
+
value: Any
|
|
107
|
+
timestamp: float
|
|
108
|
+
access_count: int = 0
|
|
109
|
+
ttl: float | None = None
|
|
110
|
+
size_bytes: int = 0
|
|
111
|
+
last_access: float = field(default_factory=time.time)
|
|
112
|
+
|
|
113
|
+
def is_expired(self) -> bool:
|
|
114
|
+
"""Check if entry has expired based on TTL.
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
True if expired, False otherwise
|
|
118
|
+
"""
|
|
119
|
+
if self.ttl is None:
|
|
120
|
+
return False
|
|
121
|
+
return (time.time() - self.timestamp) > self.ttl
|
|
122
|
+
|
|
123
|
+
def touch(self) -> None:
|
|
124
|
+
"""Update access metadata."""
|
|
125
|
+
self.access_count += 1
|
|
126
|
+
self.last_access = time.time()
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@dataclass
|
|
130
|
+
class CacheStats:
|
|
131
|
+
"""Cache performance statistics.
|
|
132
|
+
|
|
133
|
+
Attributes:
|
|
134
|
+
hits: Number of cache hits
|
|
135
|
+
misses: Number of cache misses
|
|
136
|
+
hit_rate: Hit rate (hits / (hits + misses))
|
|
137
|
+
size_mb: Current cache size in megabytes
|
|
138
|
+
entry_count: Number of cached entries
|
|
139
|
+
evictions: Number of entries evicted
|
|
140
|
+
expired: Number of entries expired
|
|
141
|
+
backend: Cache backend type
|
|
142
|
+
"""
|
|
143
|
+
|
|
144
|
+
hits: int = 0
|
|
145
|
+
misses: int = 0
|
|
146
|
+
hit_rate: float = 0.0
|
|
147
|
+
size_mb: float = 0.0
|
|
148
|
+
entry_count: int = 0
|
|
149
|
+
evictions: int = 0
|
|
150
|
+
expired: int = 0
|
|
151
|
+
backend: str = "memory"
|
|
152
|
+
|
|
153
|
+
def __post_init__(self) -> None:
|
|
154
|
+
"""Calculate derived statistics."""
|
|
155
|
+
total = self.hits + self.misses
|
|
156
|
+
self.hit_rate = self.hits / total if total > 0 else 0.0
|
|
157
|
+
|
|
158
|
+
def to_dict(self) -> dict[str, Any]:
|
|
159
|
+
"""Export statistics to dictionary.
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
Dictionary with all statistics
|
|
163
|
+
"""
|
|
164
|
+
return asdict(self)
|
|
165
|
+
|
|
166
|
+
def to_json(self, filepath: str | Path) -> None:
|
|
167
|
+
"""Export statistics to JSON file.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
filepath: Output JSON file path
|
|
171
|
+
"""
|
|
172
|
+
with open(filepath, "w") as f:
|
|
173
|
+
json.dump(self.to_dict(), f, indent=2)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@dataclass
|
|
177
|
+
class CachePolicy:
|
|
178
|
+
"""Cache behavior policy.
|
|
179
|
+
|
|
180
|
+
Attributes:
|
|
181
|
+
ttl: Default time-to-live in seconds (None = no expiration)
|
|
182
|
+
max_size_mb: Maximum cache size in megabytes
|
|
183
|
+
eviction: Eviction policy when size limit reached
|
|
184
|
+
serialize_numpy: Whether to pickle numpy arrays
|
|
185
|
+
compress: Whether to compress cached data
|
|
186
|
+
version: Cache version (invalidate when changed)
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
ttl: float | None = 3600.0 # 1 hour default
|
|
190
|
+
max_size_mb: float = 100.0
|
|
191
|
+
eviction: EvictionPolicy = EvictionPolicy.LRU
|
|
192
|
+
serialize_numpy: bool = True
|
|
193
|
+
compress: bool = False
|
|
194
|
+
version: str = "1.0"
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
class CacheManager:
|
|
198
|
+
"""Multi-backend cache manager for expensive computations.
|
|
199
|
+
|
|
200
|
+
Manages caching with automatic key generation, TTL expiration, size-based
|
|
201
|
+
eviction, and performance statistics. Supports memory, disk, and Redis backends.
|
|
202
|
+
|
|
203
|
+
Args:
|
|
204
|
+
backend: Cache storage backend
|
|
205
|
+
cache_dir: Directory for disk cache (default: ~/.cache/oscura)
|
|
206
|
+
policy: Cache behavior policy
|
|
207
|
+
redis_url: Redis connection URL (for REDIS backend)
|
|
208
|
+
|
|
209
|
+
Example:
|
|
210
|
+
>>> cache = CacheManager(backend="memory", policy=CachePolicy(max_size_mb=50))
|
|
211
|
+
>>> @cache.cached(ttl=1800)
|
|
212
|
+
... def compute_fft(signal: np.ndarray) -> np.ndarray:
|
|
213
|
+
... return np.fft.fft(signal)
|
|
214
|
+
>>> result = compute_fft(signal) # Cached
|
|
215
|
+
>>> stats = cache.get_stats()
|
|
216
|
+
>>> print(f"Hit rate: {stats.hit_rate:.2%}")
|
|
217
|
+
"""
|
|
218
|
+
|
|
219
|
+
def __init__(
|
|
220
|
+
self,
|
|
221
|
+
backend: str | CacheBackend = CacheBackend.MEMORY,
|
|
222
|
+
cache_dir: str | Path | None = None,
|
|
223
|
+
policy: CachePolicy | None = None,
|
|
224
|
+
redis_url: str | None = None,
|
|
225
|
+
) -> None:
|
|
226
|
+
"""Initialize cache manager.
|
|
227
|
+
|
|
228
|
+
Args:
|
|
229
|
+
backend: Cache storage backend
|
|
230
|
+
cache_dir: Directory for disk cache
|
|
231
|
+
policy: Cache behavior policy
|
|
232
|
+
redis_url: Redis connection URL
|
|
233
|
+
"""
|
|
234
|
+
self.backend = CacheBackend(backend) if isinstance(backend, str) else backend
|
|
235
|
+
self.policy = policy or CachePolicy()
|
|
236
|
+
|
|
237
|
+
# Cache directory setup
|
|
238
|
+
if cache_dir is None:
|
|
239
|
+
self.cache_dir = Path.home() / ".cache" / "oscura" / "performance"
|
|
240
|
+
else:
|
|
241
|
+
self.cache_dir = Path(cache_dir)
|
|
242
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
243
|
+
|
|
244
|
+
# Initialize storage
|
|
245
|
+
self._memory_cache: OrderedDict[str, CacheEntry] = OrderedDict()
|
|
246
|
+
self._disk_cache_index: dict[str, Path] = {}
|
|
247
|
+
self._redis_client: Any = None
|
|
248
|
+
|
|
249
|
+
# Statistics
|
|
250
|
+
self._hits = 0
|
|
251
|
+
self._misses = 0
|
|
252
|
+
self._evictions = 0
|
|
253
|
+
self._expired = 0
|
|
254
|
+
|
|
255
|
+
# Security: HMAC signing key for cache integrity (SEC-003 fix)
|
|
256
|
+
self._cache_key = self._load_or_create_cache_key()
|
|
257
|
+
|
|
258
|
+
# Initialize backend
|
|
259
|
+
if self.backend == CacheBackend.REDIS:
|
|
260
|
+
self._init_redis(redis_url)
|
|
261
|
+
elif self.backend == CacheBackend.DISK:
|
|
262
|
+
self._load_disk_index()
|
|
263
|
+
|
|
264
|
+
def _init_redis(self, redis_url: str | None) -> None:
|
|
265
|
+
"""Initialize Redis connection with graceful degradation.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
redis_url: Redis connection URL
|
|
269
|
+
"""
|
|
270
|
+
try:
|
|
271
|
+
import redis # type: ignore[import-not-found]
|
|
272
|
+
|
|
273
|
+
self._redis_client = redis.from_url(redis_url or "redis://localhost:6379")
|
|
274
|
+
self._redis_client.ping()
|
|
275
|
+
logger.info("Redis cache backend initialized")
|
|
276
|
+
except ImportError:
|
|
277
|
+
logger.warning("Redis not available, falling back to memory cache")
|
|
278
|
+
self.backend = CacheBackend.MEMORY
|
|
279
|
+
except Exception as e:
|
|
280
|
+
logger.warning(f"Redis connection failed: {e}, falling back to memory cache")
|
|
281
|
+
self.backend = CacheBackend.MEMORY
|
|
282
|
+
|
|
283
|
+
def _load_disk_index(self) -> None:
|
|
284
|
+
"""Load disk cache index from cache directory."""
|
|
285
|
+
index_file = self.cache_dir / "cache_index.json"
|
|
286
|
+
if index_file.exists():
|
|
287
|
+
try:
|
|
288
|
+
with open(index_file) as f:
|
|
289
|
+
data = json.load(f)
|
|
290
|
+
self._disk_cache_index = {k: Path(v) for k, v in data.items()}
|
|
291
|
+
logger.info(f"Loaded disk cache index: {len(self._disk_cache_index)} entries")
|
|
292
|
+
except Exception as e:
|
|
293
|
+
logger.warning(f"Failed to load disk cache index: {e}")
|
|
294
|
+
self._disk_cache_index = {}
|
|
295
|
+
|
|
296
|
+
def _save_disk_index(self) -> None:
|
|
297
|
+
"""Save disk cache index to cache directory."""
|
|
298
|
+
index_file = self.cache_dir / "cache_index.json"
|
|
299
|
+
try:
|
|
300
|
+
with open(index_file, "w") as f:
|
|
301
|
+
data = {k: str(v) for k, v in self._disk_cache_index.items()}
|
|
302
|
+
json.dump(data, f, indent=2)
|
|
303
|
+
except Exception as e:
|
|
304
|
+
logger.warning(f"Failed to save disk cache index: {e}")
|
|
305
|
+
|
|
306
|
+
def _load_or_create_cache_key(self) -> bytes:
|
|
307
|
+
"""Load or create HMAC signing key for cache integrity.
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
256-bit signing key.
|
|
311
|
+
|
|
312
|
+
Security:
|
|
313
|
+
SEC-003 fix: Protects cached pickle files from tampering.
|
|
314
|
+
Key is persistent per cache directory and stored with 0o600 permissions.
|
|
315
|
+
Each cache directory has its own unique key.
|
|
316
|
+
|
|
317
|
+
References:
|
|
318
|
+
https://owasp.org/www-project-top-ten/
|
|
319
|
+
"""
|
|
320
|
+
key_file = self.cache_dir / ".cache_key"
|
|
321
|
+
|
|
322
|
+
# Load existing key
|
|
323
|
+
if key_file.exists():
|
|
324
|
+
with open(key_file, "rb") as f:
|
|
325
|
+
return f.read()
|
|
326
|
+
|
|
327
|
+
# Create new 256-bit key
|
|
328
|
+
key = secrets.token_bytes(32)
|
|
329
|
+
|
|
330
|
+
# Save with restrictive permissions
|
|
331
|
+
with open(key_file, "wb") as f:
|
|
332
|
+
f.write(key)
|
|
333
|
+
|
|
334
|
+
# Set owner read/write only (0o600)
|
|
335
|
+
key_file.chmod(0o600)
|
|
336
|
+
|
|
337
|
+
logger.info(f"Created new cache signing key: {key_file}")
|
|
338
|
+
return key
|
|
339
|
+
|
|
340
|
+
def _generate_key(self, func_name: str, args: tuple[Any, ...], kwargs: dict[str, Any]) -> str:
|
|
341
|
+
"""Generate deterministic cache key from function and arguments.
|
|
342
|
+
|
|
343
|
+
Args:
|
|
344
|
+
func_name: Function name
|
|
345
|
+
args: Positional arguments
|
|
346
|
+
kwargs: Keyword arguments
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
SHA256 hex digest cache key
|
|
350
|
+
"""
|
|
351
|
+
key_parts = [self.policy.version, func_name]
|
|
352
|
+
|
|
353
|
+
# Hash arguments
|
|
354
|
+
for arg in args:
|
|
355
|
+
key_parts.append(self._hash_value(arg))
|
|
356
|
+
|
|
357
|
+
for k, v in sorted(kwargs.items()):
|
|
358
|
+
key_parts.append(f"{k}={self._hash_value(v)}")
|
|
359
|
+
|
|
360
|
+
key_str = ":".join(key_parts)
|
|
361
|
+
return hashlib.sha256(key_str.encode()).hexdigest()
|
|
362
|
+
|
|
363
|
+
def _hash_value(self, value: Any) -> str:
|
|
364
|
+
"""Hash a single value (supports numpy arrays).
|
|
365
|
+
|
|
366
|
+
Note:
|
|
367
|
+
Uses MD5 for cache key generation only (not for security).
|
|
368
|
+
MD5 is appropriate here for non-cryptographic checksums.
|
|
369
|
+
|
|
370
|
+
Args:
|
|
371
|
+
value: Value to hash
|
|
372
|
+
|
|
373
|
+
Returns:
|
|
374
|
+
Hex digest of value
|
|
375
|
+
"""
|
|
376
|
+
if isinstance(value, np.ndarray):
|
|
377
|
+
# Hash array data (MD5 used for cache keys only, not security)
|
|
378
|
+
return hashlib.md5(value.tobytes(), usedforsecurity=False).hexdigest()
|
|
379
|
+
elif isinstance(value, (list, tuple)):
|
|
380
|
+
# Hash sequences
|
|
381
|
+
return hashlib.md5(
|
|
382
|
+
str([self._hash_value(v) for v in value]).encode(), usedforsecurity=False
|
|
383
|
+
).hexdigest()
|
|
384
|
+
elif isinstance(value, dict):
|
|
385
|
+
# Hash dicts
|
|
386
|
+
items = sorted((k, self._hash_value(v)) for k, v in value.items())
|
|
387
|
+
return hashlib.md5(str(items).encode(), usedforsecurity=False).hexdigest()
|
|
388
|
+
else:
|
|
389
|
+
# Hash other types via string representation
|
|
390
|
+
return hashlib.md5(str(value).encode(), usedforsecurity=False).hexdigest()
|
|
391
|
+
|
|
392
|
+
def _estimate_size(self, value: Any) -> int:
|
|
393
|
+
"""Estimate memory size of value in bytes.
|
|
394
|
+
|
|
395
|
+
Args:
|
|
396
|
+
value: Value to measure
|
|
397
|
+
|
|
398
|
+
Returns:
|
|
399
|
+
Approximate size in bytes
|
|
400
|
+
"""
|
|
401
|
+
if isinstance(value, np.ndarray):
|
|
402
|
+
return value.nbytes
|
|
403
|
+
elif isinstance(value, (str, bytes)):
|
|
404
|
+
return len(value)
|
|
405
|
+
elif isinstance(value, (list, tuple)):
|
|
406
|
+
return sum(self._estimate_size(v) for v in value)
|
|
407
|
+
elif isinstance(value, dict):
|
|
408
|
+
return sum(self._estimate_size(k) + self._estimate_size(v) for k, v in value.items())
|
|
409
|
+
else:
|
|
410
|
+
# Rough estimate using pickle
|
|
411
|
+
try:
|
|
412
|
+
return len(pickle.dumps(value))
|
|
413
|
+
except Exception:
|
|
414
|
+
return 0
|
|
415
|
+
|
|
416
|
+
def _get_cache_size_mb(self) -> float:
|
|
417
|
+
"""Calculate current cache size in megabytes.
|
|
418
|
+
|
|
419
|
+
Returns:
|
|
420
|
+
Cache size in MB
|
|
421
|
+
"""
|
|
422
|
+
if self.backend == CacheBackend.MEMORY or self.backend == CacheBackend.MULTI_LEVEL:
|
|
423
|
+
total_bytes = sum(entry.size_bytes for entry in self._memory_cache.values())
|
|
424
|
+
return total_bytes / (1024 * 1024)
|
|
425
|
+
elif self.backend == CacheBackend.DISK:
|
|
426
|
+
total_bytes = sum(
|
|
427
|
+
path.stat().st_size for path in self._disk_cache_index.values() if path.exists()
|
|
428
|
+
)
|
|
429
|
+
return total_bytes / (1024 * 1024)
|
|
430
|
+
return 0.0
|
|
431
|
+
|
|
432
|
+
def _evict_if_needed(self) -> None:
|
|
433
|
+
"""Evict entries if cache size exceeds limit."""
|
|
434
|
+
while self._get_cache_size_mb() > self.policy.max_size_mb:
|
|
435
|
+
if self.backend == CacheBackend.MEMORY or self.backend == CacheBackend.MULTI_LEVEL:
|
|
436
|
+
if not self._memory_cache:
|
|
437
|
+
break
|
|
438
|
+
|
|
439
|
+
if self.policy.eviction == EvictionPolicy.LRU:
|
|
440
|
+
# Remove least recently used (first in OrderedDict)
|
|
441
|
+
self._memory_cache.popitem(last=False)
|
|
442
|
+
elif self.policy.eviction == EvictionPolicy.FIFO:
|
|
443
|
+
# Remove oldest entry
|
|
444
|
+
self._memory_cache.popitem(last=False)
|
|
445
|
+
else:
|
|
446
|
+
# Default to LRU
|
|
447
|
+
self._memory_cache.popitem(last=False)
|
|
448
|
+
|
|
449
|
+
self._evictions += 1
|
|
450
|
+
elif self.backend == CacheBackend.DISK:
|
|
451
|
+
if not self._disk_cache_index:
|
|
452
|
+
break
|
|
453
|
+
|
|
454
|
+
# Remove oldest file
|
|
455
|
+
oldest_key = next(iter(self._disk_cache_index))
|
|
456
|
+
oldest_path = self._disk_cache_index[oldest_key]
|
|
457
|
+
if oldest_path.exists():
|
|
458
|
+
oldest_path.unlink()
|
|
459
|
+
del self._disk_cache_index[oldest_key]
|
|
460
|
+
self._evictions += 1
|
|
461
|
+
else:
|
|
462
|
+
break
|
|
463
|
+
|
|
464
|
+
def _memory_get(self, key: str) -> CacheEntry | None:
|
|
465
|
+
"""Get entry from memory cache.
|
|
466
|
+
|
|
467
|
+
Args:
|
|
468
|
+
key: Cache key
|
|
469
|
+
|
|
470
|
+
Returns:
|
|
471
|
+
Cache entry or None if not found/expired
|
|
472
|
+
"""
|
|
473
|
+
if key not in self._memory_cache:
|
|
474
|
+
return None
|
|
475
|
+
|
|
476
|
+
entry = self._memory_cache[key]
|
|
477
|
+
|
|
478
|
+
# Check expiration
|
|
479
|
+
if entry.is_expired():
|
|
480
|
+
del self._memory_cache[key]
|
|
481
|
+
self._expired += 1
|
|
482
|
+
return None
|
|
483
|
+
|
|
484
|
+
# Update LRU order
|
|
485
|
+
self._memory_cache.move_to_end(key)
|
|
486
|
+
entry.touch()
|
|
487
|
+
return entry
|
|
488
|
+
|
|
489
|
+
def _memory_set(self, key: str, entry: CacheEntry) -> None:
|
|
490
|
+
"""Set entry in memory cache.
|
|
491
|
+
|
|
492
|
+
Args:
|
|
493
|
+
key: Cache key
|
|
494
|
+
entry: Cache entry
|
|
495
|
+
"""
|
|
496
|
+
self._memory_cache[key] = entry
|
|
497
|
+
self._memory_cache.move_to_end(key)
|
|
498
|
+
self._evict_if_needed()
|
|
499
|
+
|
|
500
|
+
def _disk_get(self, key: str) -> CacheEntry | None:
|
|
501
|
+
"""Get entry from disk cache with HMAC verification.
|
|
502
|
+
|
|
503
|
+
Args:
|
|
504
|
+
key: Cache key
|
|
505
|
+
|
|
506
|
+
Returns:
|
|
507
|
+
Cache entry or None if not found/expired
|
|
508
|
+
|
|
509
|
+
Raises:
|
|
510
|
+
SecurityError: If HMAC verification fails (tampered cache file)
|
|
511
|
+
|
|
512
|
+
Security:
|
|
513
|
+
SEC-003 fix: Verifies HMAC-SHA256 signature before unpickling.
|
|
514
|
+
Prevents code execution from tampered cache files.
|
|
515
|
+
Uses constant-time comparison (hmac.compare_digest).
|
|
516
|
+
"""
|
|
517
|
+
if key not in self._disk_cache_index:
|
|
518
|
+
return None
|
|
519
|
+
|
|
520
|
+
cache_file = self._disk_cache_index[key]
|
|
521
|
+
if not cache_file.exists():
|
|
522
|
+
del self._disk_cache_index[key]
|
|
523
|
+
return None
|
|
524
|
+
|
|
525
|
+
try:
|
|
526
|
+
with open(cache_file, "rb") as f:
|
|
527
|
+
signature = f.read(32) # SHA256 = 32 bytes
|
|
528
|
+
data = f.read()
|
|
529
|
+
|
|
530
|
+
# Verify HMAC signature
|
|
531
|
+
expected_signature = hmac.new(self._cache_key, data, hashlib.sha256).digest()
|
|
532
|
+
|
|
533
|
+
if not hmac.compare_digest(signature, expected_signature):
|
|
534
|
+
logger.error(f"Cache integrity check failed for {key}")
|
|
535
|
+
# Delete corrupted cache file
|
|
536
|
+
cache_file.unlink()
|
|
537
|
+
del self._disk_cache_index[key]
|
|
538
|
+
raise SecurityError(
|
|
539
|
+
f"Cache file integrity verification failed: {key}. "
|
|
540
|
+
"File may have been tampered with and has been removed."
|
|
541
|
+
)
|
|
542
|
+
|
|
543
|
+
# Deserialize only after HMAC verification
|
|
544
|
+
loaded_entry: CacheEntry = pickle.loads(data)
|
|
545
|
+
|
|
546
|
+
# Check expiration
|
|
547
|
+
if loaded_entry.is_expired():
|
|
548
|
+
cache_file.unlink()
|
|
549
|
+
del self._disk_cache_index[key]
|
|
550
|
+
self._expired += 1
|
|
551
|
+
return None
|
|
552
|
+
|
|
553
|
+
loaded_entry.touch()
|
|
554
|
+
return loaded_entry
|
|
555
|
+
|
|
556
|
+
except SecurityError:
|
|
557
|
+
raise # Re-raise security errors
|
|
558
|
+
except Exception as e:
|
|
559
|
+
logger.warning(f"Failed to load cache entry {key}: {e}")
|
|
560
|
+
if cache_file.exists():
|
|
561
|
+
cache_file.unlink()
|
|
562
|
+
del self._disk_cache_index[key]
|
|
563
|
+
return None
|
|
564
|
+
|
|
565
|
+
def _disk_set(self, key: str, entry: CacheEntry) -> None:
|
|
566
|
+
"""Set entry in disk cache with HMAC signature.
|
|
567
|
+
|
|
568
|
+
Args:
|
|
569
|
+
key: Cache key
|
|
570
|
+
entry: Cache entry
|
|
571
|
+
|
|
572
|
+
Security:
|
|
573
|
+
SEC-003 fix: Writes HMAC-SHA256 signature + pickled data.
|
|
574
|
+
Format: [32 bytes signature][pickled data]
|
|
575
|
+
Signature computed over pickled data using self._cache_key.
|
|
576
|
+
"""
|
|
577
|
+
cache_file = self.cache_dir / f"{key}.pkl"
|
|
578
|
+
try:
|
|
579
|
+
# Serialize entry
|
|
580
|
+
data = pickle.dumps(entry, protocol=pickle.HIGHEST_PROTOCOL)
|
|
581
|
+
|
|
582
|
+
# Compute HMAC-SHA256 signature
|
|
583
|
+
signature = hmac.new(self._cache_key, data, hashlib.sha256).digest()
|
|
584
|
+
|
|
585
|
+
# Write signature + data
|
|
586
|
+
with open(cache_file, "wb") as f:
|
|
587
|
+
f.write(signature) # First 32 bytes
|
|
588
|
+
f.write(data) # Rest is pickled data
|
|
589
|
+
|
|
590
|
+
self._disk_cache_index[key] = cache_file
|
|
591
|
+
self._evict_if_needed()
|
|
592
|
+
self._save_disk_index()
|
|
593
|
+
except Exception as e:
|
|
594
|
+
logger.warning(f"Failed to save cache entry {key}: {e}")
|
|
595
|
+
|
|
596
|
+
def get(self, key: str) -> Any | None:
|
|
597
|
+
"""Retrieve value from cache.
|
|
598
|
+
|
|
599
|
+
Args:
|
|
600
|
+
key: Cache key
|
|
601
|
+
|
|
602
|
+
Returns:
|
|
603
|
+
Cached value or None if not found
|
|
604
|
+
"""
|
|
605
|
+
entry = None
|
|
606
|
+
|
|
607
|
+
if self.backend == CacheBackend.MEMORY:
|
|
608
|
+
entry = self._memory_get(key)
|
|
609
|
+
elif self.backend == CacheBackend.DISK:
|
|
610
|
+
entry = self._disk_get(key)
|
|
611
|
+
elif self.backend == CacheBackend.MULTI_LEVEL:
|
|
612
|
+
# Try memory first, then disk
|
|
613
|
+
entry = self._memory_get(key)
|
|
614
|
+
if entry is None:
|
|
615
|
+
entry = self._disk_get(key)
|
|
616
|
+
# Promote to memory cache
|
|
617
|
+
if entry is not None:
|
|
618
|
+
self._memory_set(key, entry)
|
|
619
|
+
elif self.backend == CacheBackend.REDIS and self._redis_client:
|
|
620
|
+
try:
|
|
621
|
+
data = self._redis_client.get(key)
|
|
622
|
+
if data:
|
|
623
|
+
# Security: Only loading from trusted Redis cache
|
|
624
|
+
loaded_entry: CacheEntry = pickle.loads(data)
|
|
625
|
+
if not loaded_entry.is_expired():
|
|
626
|
+
loaded_entry.touch()
|
|
627
|
+
entry = loaded_entry
|
|
628
|
+
else:
|
|
629
|
+
self._redis_client.delete(key)
|
|
630
|
+
entry = None
|
|
631
|
+
self._expired += 1
|
|
632
|
+
except Exception as e:
|
|
633
|
+
logger.warning(f"Redis get failed: {e}")
|
|
634
|
+
|
|
635
|
+
if entry:
|
|
636
|
+
self._hits += 1
|
|
637
|
+
return entry.value
|
|
638
|
+
else:
|
|
639
|
+
self._misses += 1
|
|
640
|
+
return None
|
|
641
|
+
|
|
642
|
+
def set(self, key: str, value: Any, ttl: float | None = None) -> None:
|
|
643
|
+
"""Store value in cache.
|
|
644
|
+
|
|
645
|
+
Args:
|
|
646
|
+
key: Cache key
|
|
647
|
+
value: Value to cache
|
|
648
|
+
ttl: Time-to-live in seconds (None uses policy default)
|
|
649
|
+
"""
|
|
650
|
+
entry = CacheEntry(
|
|
651
|
+
key=key,
|
|
652
|
+
value=value,
|
|
653
|
+
timestamp=time.time(),
|
|
654
|
+
ttl=ttl if ttl is not None else self.policy.ttl,
|
|
655
|
+
size_bytes=self._estimate_size(value),
|
|
656
|
+
)
|
|
657
|
+
|
|
658
|
+
if self.backend == CacheBackend.MEMORY:
|
|
659
|
+
self._memory_set(key, entry)
|
|
660
|
+
elif self.backend == CacheBackend.DISK:
|
|
661
|
+
self._disk_set(key, entry)
|
|
662
|
+
elif self.backend == CacheBackend.MULTI_LEVEL:
|
|
663
|
+
self._memory_set(key, entry)
|
|
664
|
+
# Also save to disk for persistence
|
|
665
|
+
self._disk_set(key, entry)
|
|
666
|
+
elif self.backend == CacheBackend.REDIS and self._redis_client:
|
|
667
|
+
try:
|
|
668
|
+
data = pickle.dumps(entry)
|
|
669
|
+
if entry.ttl:
|
|
670
|
+
self._redis_client.setex(key, int(entry.ttl), data)
|
|
671
|
+
else:
|
|
672
|
+
self._redis_client.set(key, data)
|
|
673
|
+
except Exception as e:
|
|
674
|
+
logger.warning(f"Redis set failed: {e}")
|
|
675
|
+
|
|
676
|
+
def invalidate(self, pattern: str | None = None) -> int:
|
|
677
|
+
"""Invalidate cache entries by key pattern.
|
|
678
|
+
|
|
679
|
+
Args:
|
|
680
|
+
pattern: Key pattern to match (None = clear all)
|
|
681
|
+
|
|
682
|
+
Returns:
|
|
683
|
+
Number of entries invalidated
|
|
684
|
+
"""
|
|
685
|
+
if pattern is None:
|
|
686
|
+
return self._clear_all_caches()
|
|
687
|
+
else:
|
|
688
|
+
return self._clear_by_pattern(pattern)
|
|
689
|
+
|
|
690
|
+
def _clear_all_caches(self) -> int:
|
|
691
|
+
"""Clear all cache entries.
|
|
692
|
+
|
|
693
|
+
Returns:
|
|
694
|
+
Number of entries invalidated.
|
|
695
|
+
"""
|
|
696
|
+
invalidated = 0
|
|
697
|
+
|
|
698
|
+
# Clear memory cache
|
|
699
|
+
if self.backend in (CacheBackend.MEMORY, CacheBackend.MULTI_LEVEL):
|
|
700
|
+
invalidated += len(self._memory_cache)
|
|
701
|
+
self._memory_cache.clear()
|
|
702
|
+
|
|
703
|
+
# Clear disk cache
|
|
704
|
+
if self.backend in (CacheBackend.DISK, CacheBackend.MULTI_LEVEL):
|
|
705
|
+
invalidated += self._clear_disk_cache()
|
|
706
|
+
|
|
707
|
+
# Clear Redis cache
|
|
708
|
+
if self.backend == CacheBackend.REDIS and self._redis_client:
|
|
709
|
+
self._clear_redis_cache()
|
|
710
|
+
|
|
711
|
+
return invalidated
|
|
712
|
+
|
|
713
|
+
def _clear_disk_cache(self) -> int:
|
|
714
|
+
"""Clear all disk cache entries.
|
|
715
|
+
|
|
716
|
+
Returns:
|
|
717
|
+
Number of entries cleared.
|
|
718
|
+
"""
|
|
719
|
+
for cache_file in self._disk_cache_index.values():
|
|
720
|
+
if cache_file.exists():
|
|
721
|
+
cache_file.unlink()
|
|
722
|
+
|
|
723
|
+
count = len(self._disk_cache_index)
|
|
724
|
+
self._disk_cache_index.clear()
|
|
725
|
+
self._save_disk_index()
|
|
726
|
+
return count
|
|
727
|
+
|
|
728
|
+
def _clear_redis_cache(self) -> None:
|
|
729
|
+
"""Clear all Redis cache entries."""
|
|
730
|
+
try:
|
|
731
|
+
self._redis_client.flushdb()
|
|
732
|
+
except Exception as e:
|
|
733
|
+
logger.warning(f"Redis flush failed: {e}")
|
|
734
|
+
|
|
735
|
+
def _clear_by_pattern(self, pattern: str) -> int:
|
|
736
|
+
"""Clear cache entries matching pattern.
|
|
737
|
+
|
|
738
|
+
Args:
|
|
739
|
+
pattern: Pattern to match in keys.
|
|
740
|
+
|
|
741
|
+
Returns:
|
|
742
|
+
Number of entries invalidated.
|
|
743
|
+
"""
|
|
744
|
+
invalidated = 0
|
|
745
|
+
|
|
746
|
+
# Clear memory cache by pattern
|
|
747
|
+
if self.backend in (CacheBackend.MEMORY, CacheBackend.MULTI_LEVEL):
|
|
748
|
+
invalidated += self._clear_memory_by_pattern(pattern)
|
|
749
|
+
|
|
750
|
+
# Clear disk cache by pattern
|
|
751
|
+
if self.backend in (CacheBackend.DISK, CacheBackend.MULTI_LEVEL):
|
|
752
|
+
invalidated += self._clear_disk_by_pattern(pattern)
|
|
753
|
+
|
|
754
|
+
return invalidated
|
|
755
|
+
|
|
756
|
+
def _clear_memory_by_pattern(self, pattern: str) -> int:
|
|
757
|
+
"""Clear memory cache entries matching pattern.
|
|
758
|
+
|
|
759
|
+
Args:
|
|
760
|
+
pattern: Pattern to match.
|
|
761
|
+
|
|
762
|
+
Returns:
|
|
763
|
+
Number of entries cleared.
|
|
764
|
+
"""
|
|
765
|
+
keys_to_remove = [k for k in self._memory_cache if pattern in k]
|
|
766
|
+
for k in keys_to_remove:
|
|
767
|
+
del self._memory_cache[k]
|
|
768
|
+
return len(keys_to_remove)
|
|
769
|
+
|
|
770
|
+
def _clear_disk_by_pattern(self, pattern: str) -> int:
|
|
771
|
+
"""Clear disk cache entries matching pattern.
|
|
772
|
+
|
|
773
|
+
Args:
|
|
774
|
+
pattern: Pattern to match.
|
|
775
|
+
|
|
776
|
+
Returns:
|
|
777
|
+
Number of entries cleared.
|
|
778
|
+
"""
|
|
779
|
+
keys_to_remove = [k for k in self._disk_cache_index if pattern in k]
|
|
780
|
+
for k in keys_to_remove:
|
|
781
|
+
cache_file = self._disk_cache_index[k]
|
|
782
|
+
if cache_file.exists():
|
|
783
|
+
cache_file.unlink()
|
|
784
|
+
del self._disk_cache_index[k]
|
|
785
|
+
|
|
786
|
+
if keys_to_remove:
|
|
787
|
+
self._save_disk_index()
|
|
788
|
+
|
|
789
|
+
return len(keys_to_remove)
|
|
790
|
+
|
|
791
|
+
def get_stats(self) -> CacheStats:
|
|
792
|
+
"""Get cache performance statistics.
|
|
793
|
+
|
|
794
|
+
Returns:
|
|
795
|
+
Cache statistics including hit rate and size
|
|
796
|
+
"""
|
|
797
|
+
return CacheStats(
|
|
798
|
+
hits=self._hits,
|
|
799
|
+
misses=self._misses,
|
|
800
|
+
size_mb=self._get_cache_size_mb(),
|
|
801
|
+
entry_count=len(self._memory_cache) + len(self._disk_cache_index)
|
|
802
|
+
if self.backend == CacheBackend.MULTI_LEVEL
|
|
803
|
+
else (
|
|
804
|
+
len(self._memory_cache)
|
|
805
|
+
if self.backend == CacheBackend.MEMORY
|
|
806
|
+
else len(self._disk_cache_index)
|
|
807
|
+
),
|
|
808
|
+
evictions=self._evictions,
|
|
809
|
+
expired=self._expired,
|
|
810
|
+
backend=self.backend.value,
|
|
811
|
+
)
|
|
812
|
+
|
|
813
|
+
def cached(
|
|
814
|
+
self, ttl: float | None = None, key_prefix: str = ""
|
|
815
|
+
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
816
|
+
"""Decorator for automatic function result caching.
|
|
817
|
+
|
|
818
|
+
Args:
|
|
819
|
+
ttl: Time-to-live in seconds (None uses policy default)
|
|
820
|
+
key_prefix: Prefix for cache keys (useful for versioning)
|
|
821
|
+
|
|
822
|
+
Returns:
|
|
823
|
+
Decorated function with caching
|
|
824
|
+
|
|
825
|
+
Example:
|
|
826
|
+
>>> cache = CacheManager()
|
|
827
|
+
>>> @cache.cached(ttl=3600)
|
|
828
|
+
... def expensive_computation(x: np.ndarray) -> np.ndarray:
|
|
829
|
+
... return np.fft.fft(x)
|
|
830
|
+
"""
|
|
831
|
+
|
|
832
|
+
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
|
|
833
|
+
@functools.wraps(func)
|
|
834
|
+
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
|
835
|
+
# Generate cache key
|
|
836
|
+
func_name = f"{key_prefix}{func.__module__}.{func.__name__}"
|
|
837
|
+
cache_key = self._generate_key(func_name, args, kwargs)
|
|
838
|
+
|
|
839
|
+
# Try to get from cache
|
|
840
|
+
cached_value = self.get(cache_key)
|
|
841
|
+
if cached_value is not None:
|
|
842
|
+
logger.debug(f"Cache hit for {func_name}")
|
|
843
|
+
return cached_value
|
|
844
|
+
|
|
845
|
+
# Compute and cache
|
|
846
|
+
logger.debug(f"Cache miss for {func_name}, computing...")
|
|
847
|
+
result = func(*args, **kwargs)
|
|
848
|
+
self.set(cache_key, result, ttl=ttl)
|
|
849
|
+
return result
|
|
850
|
+
|
|
851
|
+
return wrapper
|
|
852
|
+
|
|
853
|
+
return decorator
|
|
854
|
+
|
|
855
|
+
|
|
856
|
+
# Global cache instance for convenience
|
|
857
|
+
_global_cache: CacheManager | None = None
|
|
858
|
+
|
|
859
|
+
|
|
860
|
+
def get_global_cache() -> CacheManager:
|
|
861
|
+
"""Get or create global cache instance.
|
|
862
|
+
|
|
863
|
+
Returns:
|
|
864
|
+
Global CacheManager instance
|
|
865
|
+
"""
|
|
866
|
+
global _global_cache
|
|
867
|
+
if _global_cache is None:
|
|
868
|
+
_global_cache = CacheManager()
|
|
869
|
+
return _global_cache
|
|
870
|
+
|
|
871
|
+
|
|
872
|
+
def cache(
|
|
873
|
+
ttl: float | None = None, key_prefix: str = ""
|
|
874
|
+
) -> Callable[[Callable[..., Any]], Callable[..., Any]]:
|
|
875
|
+
"""Convenience decorator using global cache.
|
|
876
|
+
|
|
877
|
+
Args:
|
|
878
|
+
ttl: Time-to-live in seconds
|
|
879
|
+
key_prefix: Prefix for cache keys
|
|
880
|
+
|
|
881
|
+
Returns:
|
|
882
|
+
Decorated function with caching
|
|
883
|
+
|
|
884
|
+
Example:
|
|
885
|
+
>>> @cache(ttl=3600)
|
|
886
|
+
... def expensive_fft(signal: np.ndarray) -> np.ndarray:
|
|
887
|
+
... return np.fft.fft(signal)
|
|
888
|
+
"""
|
|
889
|
+
return get_global_cache().cached(ttl=ttl, key_prefix=key_prefix)
|