rangebar 11.6.1__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rangebar/CLAUDE.md +327 -0
- rangebar/__init__.py +227 -0
- rangebar/__init__.pyi +1089 -0
- rangebar/_core.cpython-313-darwin.so +0 -0
- rangebar/checkpoint.py +472 -0
- rangebar/cli.py +298 -0
- rangebar/clickhouse/CLAUDE.md +139 -0
- rangebar/clickhouse/__init__.py +100 -0
- rangebar/clickhouse/bulk_operations.py +309 -0
- rangebar/clickhouse/cache.py +734 -0
- rangebar/clickhouse/client.py +121 -0
- rangebar/clickhouse/config.py +141 -0
- rangebar/clickhouse/mixin.py +120 -0
- rangebar/clickhouse/preflight.py +504 -0
- rangebar/clickhouse/query_operations.py +345 -0
- rangebar/clickhouse/schema.sql +187 -0
- rangebar/clickhouse/tunnel.py +222 -0
- rangebar/constants.py +288 -0
- rangebar/conversion.py +177 -0
- rangebar/exceptions.py +207 -0
- rangebar/exness.py +364 -0
- rangebar/hooks.py +311 -0
- rangebar/logging.py +171 -0
- rangebar/notify/__init__.py +15 -0
- rangebar/notify/pushover.py +155 -0
- rangebar/notify/telegram.py +271 -0
- rangebar/orchestration/__init__.py +20 -0
- rangebar/orchestration/count_bounded.py +797 -0
- rangebar/orchestration/helpers.py +412 -0
- rangebar/orchestration/models.py +76 -0
- rangebar/orchestration/precompute.py +498 -0
- rangebar/orchestration/range_bars.py +736 -0
- rangebar/orchestration/tick_fetcher.py +226 -0
- rangebar/ouroboros.py +454 -0
- rangebar/processors/__init__.py +22 -0
- rangebar/processors/api.py +383 -0
- rangebar/processors/core.py +522 -0
- rangebar/resource_guard.py +567 -0
- rangebar/storage/__init__.py +22 -0
- rangebar/storage/checksum_registry.py +218 -0
- rangebar/storage/parquet.py +728 -0
- rangebar/streaming.py +300 -0
- rangebar/validation/__init__.py +69 -0
- rangebar/validation/cache_staleness.py +277 -0
- rangebar/validation/continuity.py +664 -0
- rangebar/validation/gap_classification.py +294 -0
- rangebar/validation/post_storage.py +317 -0
- rangebar/validation/tier1.py +175 -0
- rangebar/validation/tier2.py +261 -0
- rangebar-11.6.1.dist-info/METADATA +308 -0
- rangebar-11.6.1.dist-info/RECORD +54 -0
- rangebar-11.6.1.dist-info/WHEEL +4 -0
- rangebar-11.6.1.dist-info/entry_points.txt +2 -0
- rangebar-11.6.1.dist-info/licenses/LICENSE +21 -0
rangebar/hooks.py
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
"""Event hook system for rangebar-py operations.
|
|
2
|
+
|
|
3
|
+
This module provides a publish-subscribe event system for monitoring
|
|
4
|
+
long-running cache operations, validation results, and population progress.
|
|
5
|
+
|
|
6
|
+
Usage
|
|
7
|
+
-----
|
|
8
|
+
>>> from rangebar.hooks import register_hook, HookEvent
|
|
9
|
+
>>>
|
|
10
|
+
>>> def my_callback(payload):
|
|
11
|
+
... print(f"Event: {payload.event.value}, Symbol: {payload.symbol}")
|
|
12
|
+
...
|
|
13
|
+
>>> register_hook(HookEvent.CACHE_WRITE_COMPLETE, my_callback)
|
|
14
|
+
>>> # Later, when bars are cached:
|
|
15
|
+
>>> # Event: cache_write_complete, Symbol: BTCUSDT
|
|
16
|
+
|
|
17
|
+
Events
|
|
18
|
+
------
|
|
19
|
+
- CACHE_WRITE_START: Cache write operation started
|
|
20
|
+
- CACHE_WRITE_COMPLETE: Cache write operation completed successfully
|
|
21
|
+
- CACHE_WRITE_FAILED: Cache write operation failed
|
|
22
|
+
- VALIDATION_COMPLETE: Post-storage validation passed
|
|
23
|
+
- VALIDATION_FAILED: Post-storage validation failed
|
|
24
|
+
- CHECKPOINT_SAVED: Resumable population checkpoint saved
|
|
25
|
+
- POPULATION_COMPLETE: Cache population completed successfully
|
|
26
|
+
- POPULATION_FAILED: Cache population failed
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
from __future__ import annotations
|
|
30
|
+
|
|
31
|
+
import json
|
|
32
|
+
import logging
|
|
33
|
+
from collections import defaultdict
|
|
34
|
+
from dataclasses import asdict, dataclass, field
|
|
35
|
+
from datetime import UTC, datetime
|
|
36
|
+
from enum import Enum
|
|
37
|
+
from typing import TYPE_CHECKING, Any, Callable
|
|
38
|
+
|
|
39
|
+
if TYPE_CHECKING:
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
logger = logging.getLogger(__name__)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class HookEvent(Enum):
|
|
46
|
+
"""Events that can trigger hook callbacks.
|
|
47
|
+
|
|
48
|
+
Attributes
|
|
49
|
+
----------
|
|
50
|
+
CACHE_WRITE_START : str
|
|
51
|
+
Emitted when cache write operation begins.
|
|
52
|
+
CACHE_WRITE_COMPLETE : str
|
|
53
|
+
Emitted when cache write completes successfully.
|
|
54
|
+
CACHE_WRITE_FAILED : str
|
|
55
|
+
Emitted when cache write fails.
|
|
56
|
+
VALIDATION_COMPLETE : str
|
|
57
|
+
Emitted when post-storage validation passes.
|
|
58
|
+
VALIDATION_FAILED : str
|
|
59
|
+
Emitted when post-storage validation fails.
|
|
60
|
+
CHECKPOINT_SAVED : str
|
|
61
|
+
Emitted when a population checkpoint is saved.
|
|
62
|
+
POPULATION_COMPLETE : str
|
|
63
|
+
Emitted when cache population completes.
|
|
64
|
+
POPULATION_FAILED : str
|
|
65
|
+
Emitted when cache population fails.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
CACHE_WRITE_START = "cache_write_start"
|
|
69
|
+
CACHE_WRITE_COMPLETE = "cache_write_complete"
|
|
70
|
+
CACHE_WRITE_FAILED = "cache_write_failed"
|
|
71
|
+
VALIDATION_COMPLETE = "validation_complete"
|
|
72
|
+
VALIDATION_FAILED = "validation_failed"
|
|
73
|
+
CHECKPOINT_SAVED = "checkpoint_saved"
|
|
74
|
+
POPULATION_COMPLETE = "population_complete"
|
|
75
|
+
POPULATION_FAILED = "population_failed"
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@dataclass
|
|
79
|
+
class HookPayload:
|
|
80
|
+
"""Payload delivered to hook callbacks.
|
|
81
|
+
|
|
82
|
+
Attributes
|
|
83
|
+
----------
|
|
84
|
+
event : HookEvent
|
|
85
|
+
The event type that triggered this callback.
|
|
86
|
+
symbol : str
|
|
87
|
+
Trading symbol involved (e.g., "BTCUSDT").
|
|
88
|
+
timestamp : datetime
|
|
89
|
+
When the event occurred (UTC).
|
|
90
|
+
details : dict
|
|
91
|
+
Additional event-specific information.
|
|
92
|
+
is_failure : bool
|
|
93
|
+
True if this event represents a failure.
|
|
94
|
+
"""
|
|
95
|
+
|
|
96
|
+
event: HookEvent
|
|
97
|
+
symbol: str
|
|
98
|
+
timestamp: datetime = field(default_factory=lambda: datetime.now(UTC))
|
|
99
|
+
details: dict[str, Any] = field(default_factory=dict)
|
|
100
|
+
is_failure: bool = False
|
|
101
|
+
|
|
102
|
+
def to_dict(self) -> dict[str, Any]:
|
|
103
|
+
"""Convert payload to dictionary for serialization."""
|
|
104
|
+
d = asdict(self)
|
|
105
|
+
d["event"] = self.event.value
|
|
106
|
+
d["timestamp"] = self.timestamp.isoformat()
|
|
107
|
+
return d
|
|
108
|
+
|
|
109
|
+
def to_json(self) -> str:
|
|
110
|
+
"""Convert payload to JSON string."""
|
|
111
|
+
return json.dumps(self.to_dict(), indent=2)
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
# Global hook registry
|
|
115
|
+
_hooks: dict[HookEvent, list[Callable[[HookPayload], None]]] = defaultdict(list)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def register_hook(
|
|
119
|
+
event: HookEvent,
|
|
120
|
+
callback: Callable[[HookPayload], None],
|
|
121
|
+
) -> None:
|
|
122
|
+
"""Register a callback for a specific event.
|
|
123
|
+
|
|
124
|
+
Parameters
|
|
125
|
+
----------
|
|
126
|
+
event : HookEvent
|
|
127
|
+
The event to subscribe to.
|
|
128
|
+
callback : Callable[[HookPayload], None]
|
|
129
|
+
Function to call when the event occurs. Receives a HookPayload.
|
|
130
|
+
|
|
131
|
+
Examples
|
|
132
|
+
--------
|
|
133
|
+
>>> def log_completion(payload):
|
|
134
|
+
... print(f"Completed: {payload.symbol} at {payload.timestamp}")
|
|
135
|
+
...
|
|
136
|
+
>>> register_hook(HookEvent.CACHE_WRITE_COMPLETE, log_completion)
|
|
137
|
+
"""
|
|
138
|
+
_hooks[event].append(callback)
|
|
139
|
+
logger.debug("Registered hook for %s: %s", event.value, callback.__name__)
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def unregister_hook(
|
|
143
|
+
event: HookEvent,
|
|
144
|
+
callback: Callable[[HookPayload], None],
|
|
145
|
+
) -> bool:
|
|
146
|
+
"""Unregister a callback for a specific event.
|
|
147
|
+
|
|
148
|
+
Parameters
|
|
149
|
+
----------
|
|
150
|
+
event : HookEvent
|
|
151
|
+
The event to unsubscribe from.
|
|
152
|
+
callback : Callable[[HookPayload], None]
|
|
153
|
+
The callback to remove.
|
|
154
|
+
|
|
155
|
+
Returns
|
|
156
|
+
-------
|
|
157
|
+
bool
|
|
158
|
+
True if the callback was found and removed, False otherwise.
|
|
159
|
+
"""
|
|
160
|
+
if callback in _hooks[event]:
|
|
161
|
+
_hooks[event].remove(callback)
|
|
162
|
+
logger.debug("Unregistered hook for %s: %s", event.value, callback.__name__)
|
|
163
|
+
return True
|
|
164
|
+
return False
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def clear_hooks(event: HookEvent | None = None) -> None:
|
|
168
|
+
"""Clear all hooks for a specific event or all events.
|
|
169
|
+
|
|
170
|
+
Parameters
|
|
171
|
+
----------
|
|
172
|
+
event : HookEvent | None
|
|
173
|
+
Event to clear hooks for. If None, clears all hooks.
|
|
174
|
+
"""
|
|
175
|
+
if event is None:
|
|
176
|
+
_hooks.clear()
|
|
177
|
+
logger.debug("Cleared all hooks")
|
|
178
|
+
else:
|
|
179
|
+
_hooks[event].clear()
|
|
180
|
+
logger.debug("Cleared hooks for %s", event.value)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def emit_hook(
|
|
184
|
+
event: HookEvent,
|
|
185
|
+
symbol: str,
|
|
186
|
+
**details: Any,
|
|
187
|
+
) -> None:
|
|
188
|
+
"""Emit an event to all registered callbacks.
|
|
189
|
+
|
|
190
|
+
Parameters
|
|
191
|
+
----------
|
|
192
|
+
event : HookEvent
|
|
193
|
+
The event type to emit.
|
|
194
|
+
symbol : str
|
|
195
|
+
Trading symbol involved.
|
|
196
|
+
**details
|
|
197
|
+
Additional event-specific information.
|
|
198
|
+
|
|
199
|
+
Notes
|
|
200
|
+
-----
|
|
201
|
+
Callback exceptions are caught and logged but do not propagate.
|
|
202
|
+
This ensures one failing callback doesn't break the main operation.
|
|
203
|
+
|
|
204
|
+
Examples
|
|
205
|
+
--------
|
|
206
|
+
>>> emit_hook(
|
|
207
|
+
... HookEvent.CACHE_WRITE_COMPLETE,
|
|
208
|
+
... symbol="BTCUSDT",
|
|
209
|
+
... bars_written=1500,
|
|
210
|
+
... threshold_bps=250,
|
|
211
|
+
... )
|
|
212
|
+
"""
|
|
213
|
+
# Add memory snapshot to all hook payloads (Issue #49 T2.3)
|
|
214
|
+
try:
|
|
215
|
+
from rangebar.resource_guard import get_memory_info
|
|
216
|
+
|
|
217
|
+
mem = get_memory_info()
|
|
218
|
+
details["memory_rss_mb"] = mem.process_rss_mb
|
|
219
|
+
details["memory_pct"] = round(mem.usage_pct, 3)
|
|
220
|
+
except Exception:
|
|
221
|
+
logger.debug("Memory snapshot unavailable for hook payload", exc_info=True)
|
|
222
|
+
|
|
223
|
+
is_failure = "FAILED" in event.name
|
|
224
|
+
payload = HookPayload(
|
|
225
|
+
event=event,
|
|
226
|
+
symbol=symbol,
|
|
227
|
+
timestamp=datetime.now(UTC),
|
|
228
|
+
details=details,
|
|
229
|
+
is_failure=is_failure,
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
callbacks = _hooks.get(event, [])
|
|
233
|
+
if not callbacks:
|
|
234
|
+
logger.debug("No hooks registered for %s", event.value)
|
|
235
|
+
return
|
|
236
|
+
|
|
237
|
+
logger.debug(
|
|
238
|
+
"Emitting %s for %s to %d callback(s)",
|
|
239
|
+
event.value,
|
|
240
|
+
symbol,
|
|
241
|
+
len(callbacks),
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
for callback in callbacks:
|
|
245
|
+
try:
|
|
246
|
+
callback(payload)
|
|
247
|
+
except (OSError, RuntimeError, ValueError, TypeError) as e:
|
|
248
|
+
# Log but don't propagate - callbacks shouldn't break main flow
|
|
249
|
+
logger.warning(
|
|
250
|
+
"Hook callback %s failed for %s: %s",
|
|
251
|
+
callback.__name__,
|
|
252
|
+
event.value,
|
|
253
|
+
e,
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
def register_for_failures(
|
|
258
|
+
callback: Callable[[HookPayload], None],
|
|
259
|
+
) -> None:
|
|
260
|
+
"""Register a callback for all failure events.
|
|
261
|
+
|
|
262
|
+
Convenience function to subscribe to all *_FAILED events.
|
|
263
|
+
|
|
264
|
+
Parameters
|
|
265
|
+
----------
|
|
266
|
+
callback : Callable[[HookPayload], None]
|
|
267
|
+
Function to call when any failure event occurs.
|
|
268
|
+
|
|
269
|
+
Examples
|
|
270
|
+
--------
|
|
271
|
+
>>> def alert_on_failure(payload):
|
|
272
|
+
... send_slack_alert(f"FAILURE: {payload.event.value}")
|
|
273
|
+
...
|
|
274
|
+
>>> register_for_failures(alert_on_failure)
|
|
275
|
+
"""
|
|
276
|
+
for event in HookEvent:
|
|
277
|
+
if "FAILED" in event.name:
|
|
278
|
+
register_hook(event, callback)
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def register_for_all(
|
|
282
|
+
callback: Callable[[HookPayload], None],
|
|
283
|
+
) -> None:
|
|
284
|
+
"""Register a callback for all events.
|
|
285
|
+
|
|
286
|
+
Parameters
|
|
287
|
+
----------
|
|
288
|
+
callback : Callable[[HookPayload], None]
|
|
289
|
+
Function to call when any event occurs.
|
|
290
|
+
|
|
291
|
+
Examples
|
|
292
|
+
--------
|
|
293
|
+
>>> def log_all(payload):
|
|
294
|
+
... print(f"{payload.event.value}: {payload.symbol}")
|
|
295
|
+
...
|
|
296
|
+
>>> register_for_all(log_all)
|
|
297
|
+
"""
|
|
298
|
+
for event in HookEvent:
|
|
299
|
+
register_hook(event, callback)
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
__all__ = [
|
|
303
|
+
"HookEvent",
|
|
304
|
+
"HookPayload",
|
|
305
|
+
"clear_hooks",
|
|
306
|
+
"emit_hook",
|
|
307
|
+
"register_for_all",
|
|
308
|
+
"register_for_failures",
|
|
309
|
+
"register_hook",
|
|
310
|
+
"unregister_hook",
|
|
311
|
+
]
|
rangebar/logging.py
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
"""Centralized NDJSON logging configuration for rangebar-py.
|
|
2
|
+
|
|
3
|
+
Implements GitHub Issue #43: Structured logging for checksum verification
|
|
4
|
+
and other observability events.
|
|
5
|
+
|
|
6
|
+
Logs are stored in the repository tree under `logs/` directory.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from __future__ import annotations
|
|
10
|
+
|
|
11
|
+
import os
|
|
12
|
+
import sys
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
from typing import TYPE_CHECKING
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from loguru import Logger
|
|
18
|
+
|
|
19
|
+
# Logs in repository tree (not platformdirs)
|
|
20
|
+
PROJECT_ROOT = Path(__file__).parent.parent.parent # rangebar-py/
|
|
21
|
+
LOG_DIR = PROJECT_ROOT / "logs"
|
|
22
|
+
NDJSON_FILE = LOG_DIR / "events.jsonl"
|
|
23
|
+
CHECKSUM_REGISTRY_FILE = LOG_DIR / "checksum_registry.jsonl"
|
|
24
|
+
|
|
25
|
+
# Lazy initialization flag
|
|
26
|
+
_logger_initialized = False
|
|
27
|
+
_logger: Logger | None = None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _get_context_extra() -> dict:
|
|
31
|
+
"""Get context fields added to every log entry."""
|
|
32
|
+
import socket
|
|
33
|
+
|
|
34
|
+
return {
|
|
35
|
+
"service": "rangebar-py",
|
|
36
|
+
"environment": os.environ.get("RANGEBAR_ENV", "development"),
|
|
37
|
+
"git_sha": os.environ.get("RANGEBAR_GIT_SHA", "unknown"),
|
|
38
|
+
"pid": os.getpid(),
|
|
39
|
+
"host": socket.gethostname(),
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def get_logger() -> Logger:
|
|
44
|
+
"""Get the configured logger instance.
|
|
45
|
+
|
|
46
|
+
Lazy initialization to avoid import-time side effects.
|
|
47
|
+
"""
|
|
48
|
+
global _logger_initialized, _logger
|
|
49
|
+
|
|
50
|
+
if _logger_initialized and _logger is not None:
|
|
51
|
+
return _logger
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
from loguru import logger
|
|
55
|
+
except ImportError:
|
|
56
|
+
# Fallback to standard logging if loguru not installed
|
|
57
|
+
import logging
|
|
58
|
+
|
|
59
|
+
logging.basicConfig(
|
|
60
|
+
level=logging.INFO,
|
|
61
|
+
format="%(asctime)s | %(levelname)-8s | %(name)s - %(message)s",
|
|
62
|
+
)
|
|
63
|
+
# Return a minimal logger-like object
|
|
64
|
+
return logging.getLogger("rangebar") # type: ignore[return-value]
|
|
65
|
+
|
|
66
|
+
# Ensure log directory exists
|
|
67
|
+
LOG_DIR.mkdir(parents=True, exist_ok=True)
|
|
68
|
+
|
|
69
|
+
# Remove default handler
|
|
70
|
+
logger.remove()
|
|
71
|
+
|
|
72
|
+
# Add context to every log entry
|
|
73
|
+
context = _get_context_extra()
|
|
74
|
+
logger = logger.bind(**context)
|
|
75
|
+
|
|
76
|
+
# NDJSON file sink (in repo tree)
|
|
77
|
+
logger.add(
|
|
78
|
+
NDJSON_FILE,
|
|
79
|
+
format="{message}",
|
|
80
|
+
serialize=True, # NDJSON output
|
|
81
|
+
rotation="10 MB",
|
|
82
|
+
retention="7 days",
|
|
83
|
+
compression="gz",
|
|
84
|
+
enqueue=True,
|
|
85
|
+
level="DEBUG",
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
# Console (human-readable, INFO+)
|
|
89
|
+
console_format = (
|
|
90
|
+
"<green>{time:HH:mm:ss}</green> | <level>{level: <8}</level> | "
|
|
91
|
+
"<cyan>{extra[component]}</cyan> - <level>{message}</level>"
|
|
92
|
+
)
|
|
93
|
+
logger.add(
|
|
94
|
+
sys.stderr,
|
|
95
|
+
format=console_format,
|
|
96
|
+
level="INFO",
|
|
97
|
+
colorize=True,
|
|
98
|
+
filter=lambda record: "component" in record["extra"],
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
_logger = logger
|
|
102
|
+
_logger_initialized = True
|
|
103
|
+
return logger
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def log_checksum_event(
|
|
107
|
+
event_type: str,
|
|
108
|
+
symbol: str,
|
|
109
|
+
date: str,
|
|
110
|
+
trace_id: str,
|
|
111
|
+
**kwargs: object,
|
|
112
|
+
) -> None:
|
|
113
|
+
"""Log a checksum-related event.
|
|
114
|
+
|
|
115
|
+
Args:
|
|
116
|
+
event_type: Type of event (checksum_fetch_start, checksum_verify_success, etc.)
|
|
117
|
+
symbol: Trading symbol (e.g., "BTCUSDT")
|
|
118
|
+
date: Date being processed (YYYY-MM-DD)
|
|
119
|
+
trace_id: Correlation ID for the request chain
|
|
120
|
+
**kwargs: Additional event-specific fields
|
|
121
|
+
"""
|
|
122
|
+
logger = get_logger()
|
|
123
|
+
logger.bind(
|
|
124
|
+
component="checksum",
|
|
125
|
+
event_type=event_type,
|
|
126
|
+
symbol=symbol,
|
|
127
|
+
date=date,
|
|
128
|
+
trace_id=trace_id,
|
|
129
|
+
**kwargs,
|
|
130
|
+
).info(f"{event_type}: {symbol} {date}")
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def log_download_event(
|
|
134
|
+
event_type: str,
|
|
135
|
+
symbol: str,
|
|
136
|
+
date: str,
|
|
137
|
+
trace_id: str,
|
|
138
|
+
**kwargs: object,
|
|
139
|
+
) -> None:
|
|
140
|
+
"""Log a download-related event.
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
event_type: Type of event (download_start, download_complete, etc.)
|
|
144
|
+
symbol: Trading symbol (e.g., "BTCUSDT")
|
|
145
|
+
date: Date being processed (YYYY-MM-DD)
|
|
146
|
+
trace_id: Correlation ID for the request chain
|
|
147
|
+
**kwargs: Additional event-specific fields
|
|
148
|
+
"""
|
|
149
|
+
logger = get_logger()
|
|
150
|
+
logger.bind(
|
|
151
|
+
component="download",
|
|
152
|
+
event_type=event_type,
|
|
153
|
+
symbol=symbol,
|
|
154
|
+
date=date,
|
|
155
|
+
trace_id=trace_id,
|
|
156
|
+
**kwargs,
|
|
157
|
+
).info(f"{event_type}: {symbol} {date}")
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def generate_trace_id(prefix: str = "rb") -> str:
|
|
161
|
+
"""Generate a unique trace ID for request correlation.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
prefix: Prefix for the trace ID (default: "rb" for rangebar)
|
|
165
|
+
|
|
166
|
+
Returns:
|
|
167
|
+
Trace ID in format "{prefix}-{hex8}" (e.g., "rb-a1b2c3d4")
|
|
168
|
+
"""
|
|
169
|
+
import uuid
|
|
170
|
+
|
|
171
|
+
return f"{prefix}-{uuid.uuid4().hex[:8]}"
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
"""Notification integrations for rangebar-py.
|
|
2
|
+
|
|
3
|
+
This package provides notification backends for the hooks system.
|
|
4
|
+
Currently supported: Telegram.
|
|
5
|
+
|
|
6
|
+
Usage
|
|
7
|
+
-----
|
|
8
|
+
>>> from rangebar.notify.telegram import enable_telegram_notifications
|
|
9
|
+
>>> enable_telegram_notifications()
|
|
10
|
+
>>> # Now all hook events will be sent to Telegram
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
__all__: list[str] = []
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
"""Pushover critical alerts for rangebar-py.
|
|
2
|
+
|
|
3
|
+
Implements GitHub Issue #43: Loud alerting for checksum verification failures.
|
|
4
|
+
|
|
5
|
+
Pushover alerts are used for CRITICAL data integrity issues that require
|
|
6
|
+
immediate attention, such as:
|
|
7
|
+
- SHA-256 checksum mismatches (data corruption detected)
|
|
8
|
+
- Tier 1 cache integrity failures
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import sys
|
|
14
|
+
|
|
15
|
+
import requests
|
|
16
|
+
|
|
17
|
+
# Pushover API configuration
|
|
18
|
+
# These credentials are for the "RB Checksum Fail" app
|
|
19
|
+
PUSHOVER_APP_TOKEN = "asxuepwiaqkwc5e749xj1qx2eg1e3b"
|
|
20
|
+
PUSHOVER_USER_KEY = "ury88s1def6v16seeueoefqn1zbua1"
|
|
21
|
+
PUSHOVER_API_URL = "https://api.pushover.net/1/messages.json"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def send_critical_alert(
|
|
25
|
+
title: str,
|
|
26
|
+
message: str,
|
|
27
|
+
url: str | None = None,
|
|
28
|
+
url_title: str | None = None,
|
|
29
|
+
) -> bool:
|
|
30
|
+
"""Send LOUD critical alert via Pushover.
|
|
31
|
+
|
|
32
|
+
Uses:
|
|
33
|
+
- Priority 2 (emergency) - requires acknowledgment
|
|
34
|
+
- Dune custom sound for maximum attention
|
|
35
|
+
- Retry every 60s for 1 hour until acknowledged
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
title: Alert title (e.g., "🚨 CHECKSUM FAIL: BTCUSDT")
|
|
39
|
+
message: Alert body with details
|
|
40
|
+
url: Optional URL for more information
|
|
41
|
+
url_title: Display title for the URL
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
True if alert was sent successfully, False otherwise
|
|
45
|
+
"""
|
|
46
|
+
payload = {
|
|
47
|
+
"token": PUSHOVER_APP_TOKEN,
|
|
48
|
+
"user": PUSHOVER_USER_KEY,
|
|
49
|
+
"title": title,
|
|
50
|
+
"message": message,
|
|
51
|
+
"priority": 2, # Emergency - requires acknowledgment
|
|
52
|
+
"retry": 60, # Retry every 60 seconds
|
|
53
|
+
"expire": 3600, # Stop retrying after 1 hour
|
|
54
|
+
"sound": "dune", # Dune custom sound for maximum attention
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
if url:
|
|
58
|
+
payload["url"] = url
|
|
59
|
+
payload["url_title"] = url_title or "View Details"
|
|
60
|
+
|
|
61
|
+
try:
|
|
62
|
+
response = requests.post(PUSHOVER_API_URL, data=payload, timeout=10)
|
|
63
|
+
response.raise_for_status()
|
|
64
|
+
return True
|
|
65
|
+
except requests.RequestException as e:
|
|
66
|
+
# Log failure but don't crash - alerting is secondary
|
|
67
|
+
_log_alert_failure(str(e))
|
|
68
|
+
return False
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _log_alert_failure(error: str) -> None:
|
|
72
|
+
"""Log Pushover alert failure without crashing."""
|
|
73
|
+
try:
|
|
74
|
+
from ..logging import get_logger
|
|
75
|
+
|
|
76
|
+
logger = get_logger()
|
|
77
|
+
logger.bind(component="pushover").error(f"Pushover alert failed: {error}")
|
|
78
|
+
except ImportError:
|
|
79
|
+
# Fallback - print to stderr if logging module not available
|
|
80
|
+
print(f"Pushover alert failed: {error}", file=sys.stderr)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def alert_checksum_failure(
|
|
84
|
+
symbol: str,
|
|
85
|
+
date: str,
|
|
86
|
+
expected_hash: str,
|
|
87
|
+
actual_hash: str,
|
|
88
|
+
data_source: str = "binance",
|
|
89
|
+
) -> None:
|
|
90
|
+
"""Alert on checksum mismatch - CRITICAL data integrity issue.
|
|
91
|
+
|
|
92
|
+
This function sends an emergency Pushover alert when a downloaded file's
|
|
93
|
+
SHA-256 hash does not match the expected value from Binance.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
symbol: Trading symbol (e.g., "BTCUSDT")
|
|
97
|
+
date: Date of the corrupted data (YYYY-MM-DD)
|
|
98
|
+
expected_hash: Expected SHA-256 hash from Binance
|
|
99
|
+
actual_hash: Actual computed hash of downloaded data
|
|
100
|
+
data_source: Data source identifier (default: "binance")
|
|
101
|
+
"""
|
|
102
|
+
title = f"🚨 CHECKSUM FAIL: {symbol}"
|
|
103
|
+
message = f"""Data corruption detected!
|
|
104
|
+
|
|
105
|
+
Symbol: {symbol}
|
|
106
|
+
Date: {date}
|
|
107
|
+
Source: {data_source}
|
|
108
|
+
Expected: {expected_hash[:16]}...
|
|
109
|
+
Actual: {actual_hash[:16]}...
|
|
110
|
+
|
|
111
|
+
ACTION REQUIRED: Investigate immediately.
|
|
112
|
+
Data may be corrupted or tampered with."""
|
|
113
|
+
|
|
114
|
+
send_critical_alert(title, message)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def alert_tier1_cache_unverified(
|
|
118
|
+
symbol: str,
|
|
119
|
+
date_range: str,
|
|
120
|
+
unverified_count: int,
|
|
121
|
+
total_count: int,
|
|
122
|
+
) -> None:
|
|
123
|
+
"""Alert when Tier 1 cache contains unverified files.
|
|
124
|
+
|
|
125
|
+
Args:
|
|
126
|
+
symbol: Trading symbol (e.g., "BTCUSDT")
|
|
127
|
+
date_range: Date range being audited (e.g., "2024-01-01 to 2024-01-07")
|
|
128
|
+
unverified_count: Number of unverified dates
|
|
129
|
+
total_count: Total number of dates in range
|
|
130
|
+
"""
|
|
131
|
+
title = f"⚠️ CACHE AUDIT: {symbol}"
|
|
132
|
+
message = f"""Tier 1 cache audit found unverified files.
|
|
133
|
+
|
|
134
|
+
Symbol: {symbol}
|
|
135
|
+
Date Range: {date_range}
|
|
136
|
+
Unverified: {unverified_count}/{total_count} dates
|
|
137
|
+
|
|
138
|
+
Consider re-downloading with verify_checksum=True
|
|
139
|
+
to ensure data integrity."""
|
|
140
|
+
|
|
141
|
+
# Use lower priority (1) for audit warnings vs checksum failures (2)
|
|
142
|
+
payload = {
|
|
143
|
+
"token": PUSHOVER_APP_TOKEN,
|
|
144
|
+
"user": PUSHOVER_USER_KEY,
|
|
145
|
+
"title": title,
|
|
146
|
+
"message": message,
|
|
147
|
+
"priority": 1, # High priority but not emergency
|
|
148
|
+
"sound": "siren",
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
try:
|
|
152
|
+
response = requests.post(PUSHOVER_API_URL, data=payload, timeout=10)
|
|
153
|
+
response.raise_for_status()
|
|
154
|
+
except requests.RequestException as e:
|
|
155
|
+
_log_alert_failure(str(e))
|