rangebar 11.6.1__cp313-cp313-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rangebar/CLAUDE.md +327 -0
- rangebar/__init__.py +227 -0
- rangebar/__init__.pyi +1089 -0
- rangebar/_core.cpython-313-darwin.so +0 -0
- rangebar/checkpoint.py +472 -0
- rangebar/cli.py +298 -0
- rangebar/clickhouse/CLAUDE.md +139 -0
- rangebar/clickhouse/__init__.py +100 -0
- rangebar/clickhouse/bulk_operations.py +309 -0
- rangebar/clickhouse/cache.py +734 -0
- rangebar/clickhouse/client.py +121 -0
- rangebar/clickhouse/config.py +141 -0
- rangebar/clickhouse/mixin.py +120 -0
- rangebar/clickhouse/preflight.py +504 -0
- rangebar/clickhouse/query_operations.py +345 -0
- rangebar/clickhouse/schema.sql +187 -0
- rangebar/clickhouse/tunnel.py +222 -0
- rangebar/constants.py +288 -0
- rangebar/conversion.py +177 -0
- rangebar/exceptions.py +207 -0
- rangebar/exness.py +364 -0
- rangebar/hooks.py +311 -0
- rangebar/logging.py +171 -0
- rangebar/notify/__init__.py +15 -0
- rangebar/notify/pushover.py +155 -0
- rangebar/notify/telegram.py +271 -0
- rangebar/orchestration/__init__.py +20 -0
- rangebar/orchestration/count_bounded.py +797 -0
- rangebar/orchestration/helpers.py +412 -0
- rangebar/orchestration/models.py +76 -0
- rangebar/orchestration/precompute.py +498 -0
- rangebar/orchestration/range_bars.py +736 -0
- rangebar/orchestration/tick_fetcher.py +226 -0
- rangebar/ouroboros.py +454 -0
- rangebar/processors/__init__.py +22 -0
- rangebar/processors/api.py +383 -0
- rangebar/processors/core.py +522 -0
- rangebar/resource_guard.py +567 -0
- rangebar/storage/__init__.py +22 -0
- rangebar/storage/checksum_registry.py +218 -0
- rangebar/storage/parquet.py +728 -0
- rangebar/streaming.py +300 -0
- rangebar/validation/__init__.py +69 -0
- rangebar/validation/cache_staleness.py +277 -0
- rangebar/validation/continuity.py +664 -0
- rangebar/validation/gap_classification.py +294 -0
- rangebar/validation/post_storage.py +317 -0
- rangebar/validation/tier1.py +175 -0
- rangebar/validation/tier2.py +261 -0
- rangebar-11.6.1.dist-info/METADATA +308 -0
- rangebar-11.6.1.dist-info/RECORD +54 -0
- rangebar-11.6.1.dist-info/WHEEL +4 -0
- rangebar-11.6.1.dist-info/entry_points.txt +2 -0
- rangebar-11.6.1.dist-info/licenses/LICENSE +21 -0
rangebar/exceptions.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
"""Exception hierarchy for rangebar-py.
|
|
2
|
+
|
|
3
|
+
This module defines a structured exception hierarchy for consistent error
|
|
4
|
+
handling across the rangebar library. All rangebar-specific exceptions
|
|
5
|
+
inherit from RangeBarError.
|
|
6
|
+
|
|
7
|
+
Exception Hierarchy
|
|
8
|
+
-------------------
|
|
9
|
+
RangeBarError (base)
|
|
10
|
+
├── CacheError (base for cache operations)
|
|
11
|
+
│ ├── CacheConnectionError (connection failures)
|
|
12
|
+
│ ├── CacheReadError (read operation failures)
|
|
13
|
+
│ ├── CacheWriteError (write operation failures)
|
|
14
|
+
│ └── CacheSchemaError (schema mismatches)
|
|
15
|
+
├── ValidationError (data validation failures)
|
|
16
|
+
└── ProcessingError (range bar computation failures)
|
|
17
|
+
|
|
18
|
+
Usage
|
|
19
|
+
-----
|
|
20
|
+
>>> from rangebar.exceptions import CacheReadError, CacheWriteError
|
|
21
|
+
>>>
|
|
22
|
+
>>> try:
|
|
23
|
+
... bars = cache.get_range_bars(key)
|
|
24
|
+
... except CacheReadError as e:
|
|
25
|
+
... logger.error(f"Cache read failed: {e}")
|
|
26
|
+
... # Fall back to computation
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
from __future__ import annotations
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class RangeBarError(Exception):
|
|
33
|
+
"""Base exception for all rangebar-py errors.
|
|
34
|
+
|
|
35
|
+
All exceptions raised by the rangebar library inherit from this class,
|
|
36
|
+
making it easy to catch any rangebar-related error.
|
|
37
|
+
|
|
38
|
+
Examples
|
|
39
|
+
--------
|
|
40
|
+
>>> try:
|
|
41
|
+
... df = get_range_bars("BTCUSDT", "2024-01-01", "2024-01-31")
|
|
42
|
+
... except RangeBarError as e:
|
|
43
|
+
... print(f"Rangebar error: {e}")
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class CacheError(RangeBarError):
|
|
48
|
+
"""Base exception for cache-related errors.
|
|
49
|
+
|
|
50
|
+
All ClickHouse cache operations that fail raise a subclass of this
|
|
51
|
+
exception. Use this to catch any cache-related error.
|
|
52
|
+
|
|
53
|
+
Attributes
|
|
54
|
+
----------
|
|
55
|
+
symbol : str | None
|
|
56
|
+
Trading symbol involved in the operation, if applicable.
|
|
57
|
+
operation : str | None
|
|
58
|
+
The cache operation that failed (e.g., "read", "write").
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
def __init__(
|
|
62
|
+
self,
|
|
63
|
+
message: str,
|
|
64
|
+
*,
|
|
65
|
+
symbol: str | None = None,
|
|
66
|
+
operation: str | None = None,
|
|
67
|
+
) -> None:
|
|
68
|
+
super().__init__(message)
|
|
69
|
+
self.symbol = symbol
|
|
70
|
+
self.operation = operation
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class CacheConnectionError(CacheError):
|
|
74
|
+
"""Raised when ClickHouse connection fails.
|
|
75
|
+
|
|
76
|
+
This exception is raised when:
|
|
77
|
+
- ClickHouse server is unreachable
|
|
78
|
+
- SSH tunnel cannot be established
|
|
79
|
+
- Authentication fails
|
|
80
|
+
|
|
81
|
+
Examples
|
|
82
|
+
--------
|
|
83
|
+
>>> try:
|
|
84
|
+
... cache = RangeBarCache()
|
|
85
|
+
... except CacheConnectionError as e:
|
|
86
|
+
... print(f"Cannot connect to ClickHouse: {e}")
|
|
87
|
+
"""
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class CacheReadError(CacheError):
|
|
91
|
+
"""Raised when reading from cache fails.
|
|
92
|
+
|
|
93
|
+
This exception is raised when a cache read operation fails after
|
|
94
|
+
a successful connection. Possible causes include:
|
|
95
|
+
- Query syntax errors
|
|
96
|
+
- Schema mismatches
|
|
97
|
+
- Query timeout
|
|
98
|
+
|
|
99
|
+
Examples
|
|
100
|
+
--------
|
|
101
|
+
>>> try:
|
|
102
|
+
... bars = cache.get_range_bars(key)
|
|
103
|
+
... except CacheReadError as e:
|
|
104
|
+
... logger.warning(f"Cache miss: {e}")
|
|
105
|
+
... # Fall back to computation
|
|
106
|
+
"""
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class CacheWriteError(CacheError):
|
|
110
|
+
"""Raised when writing to cache fails.
|
|
111
|
+
|
|
112
|
+
This exception is raised when a cache write operation fails.
|
|
113
|
+
Possible causes include:
|
|
114
|
+
- Disk space exhaustion
|
|
115
|
+
- Network interruption during write
|
|
116
|
+
- Schema mismatch on insert
|
|
117
|
+
|
|
118
|
+
Note: Cache write failures are typically non-fatal. The computation
|
|
119
|
+
result is still valid; it just won't be cached for next time.
|
|
120
|
+
|
|
121
|
+
Examples
|
|
122
|
+
--------
|
|
123
|
+
>>> try:
|
|
124
|
+
... cache.store_range_bars(key, bars)
|
|
125
|
+
... except CacheWriteError as e:
|
|
126
|
+
... logger.warning(f"Cache write failed (non-fatal): {e}")
|
|
127
|
+
... # Continue - computation succeeded
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
class CacheSchemaError(CacheError):
|
|
132
|
+
"""Raised when there's a schema mismatch between code and database.
|
|
133
|
+
|
|
134
|
+
This exception indicates the ClickHouse table schema doesn't match
|
|
135
|
+
what the code expects. This typically happens when:
|
|
136
|
+
- New columns added to code but not migrated in DB
|
|
137
|
+
- Database migrated but code not updated
|
|
138
|
+
- Different rangebar versions writing to same cache
|
|
139
|
+
|
|
140
|
+
Resolution: Run schema migration or clear cache for affected symbol.
|
|
141
|
+
|
|
142
|
+
Examples
|
|
143
|
+
--------
|
|
144
|
+
>>> try:
|
|
145
|
+
... cache.store_range_bars(key, bars)
|
|
146
|
+
... except CacheSchemaError as e:
|
|
147
|
+
... print(f"Schema mismatch: {e}")
|
|
148
|
+
... print("Run: ALTER TABLE rangebar_cache.range_bars ADD COLUMN ...")
|
|
149
|
+
"""
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class ValidationError(RangeBarError):
|
|
153
|
+
"""Raised when data validation fails.
|
|
154
|
+
|
|
155
|
+
This exception is raised when:
|
|
156
|
+
- Post-storage validation detects data corruption
|
|
157
|
+
- Microstructure feature values are out of expected bounds
|
|
158
|
+
- Input data fails sanity checks
|
|
159
|
+
|
|
160
|
+
Attributes
|
|
161
|
+
----------
|
|
162
|
+
validation_tier : int | None
|
|
163
|
+
The validation tier that failed (1 or 2).
|
|
164
|
+
failed_checks : dict | None
|
|
165
|
+
Dictionary of failed validation checks.
|
|
166
|
+
"""
|
|
167
|
+
|
|
168
|
+
def __init__(
|
|
169
|
+
self,
|
|
170
|
+
message: str,
|
|
171
|
+
*,
|
|
172
|
+
validation_tier: int | None = None,
|
|
173
|
+
failed_checks: dict | None = None,
|
|
174
|
+
) -> None:
|
|
175
|
+
super().__init__(message)
|
|
176
|
+
self.validation_tier = validation_tier
|
|
177
|
+
self.failed_checks = failed_checks
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class ProcessingError(RangeBarError):
|
|
181
|
+
"""Raised when range bar computation fails.
|
|
182
|
+
|
|
183
|
+
This exception is raised when the Rust backend fails to process
|
|
184
|
+
trades into range bars. Possible causes include:
|
|
185
|
+
- Invalid trade data (missing fields, wrong types)
|
|
186
|
+
- Trades not sorted chronologically
|
|
187
|
+
- Internal processing error
|
|
188
|
+
|
|
189
|
+
Examples
|
|
190
|
+
--------
|
|
191
|
+
>>> try:
|
|
192
|
+
... bars = processor.process_trades(trades)
|
|
193
|
+
... except ProcessingError as e:
|
|
194
|
+
... print(f"Processing failed: {e}")
|
|
195
|
+
"""
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
__all__ = [
|
|
199
|
+
"CacheConnectionError",
|
|
200
|
+
"CacheError",
|
|
201
|
+
"CacheReadError",
|
|
202
|
+
"CacheSchemaError",
|
|
203
|
+
"CacheWriteError",
|
|
204
|
+
"ProcessingError",
|
|
205
|
+
"RangeBarError",
|
|
206
|
+
"ValidationError",
|
|
207
|
+
]
|
rangebar/exness.py
ADDED
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
# polars-exception: backtesting.py requires Pandas DataFrames for OHLCV data
|
|
2
|
+
"""Exness forex data provider integration.
|
|
3
|
+
|
|
4
|
+
This module provides Python bindings for processing Exness Raw_Spread tick data
|
|
5
|
+
into range bars. It is only available when the 'exness' feature is enabled.
|
|
6
|
+
|
|
7
|
+
Usage:
|
|
8
|
+
from rangebar.exness import (
|
|
9
|
+
ExnessInstrument,
|
|
10
|
+
ExnessRangeBarBuilder,
|
|
11
|
+
ValidationStrictness,
|
|
12
|
+
process_exness_ticks_to_dataframe,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
# Create builder for EURUSD with 25bps threshold
|
|
16
|
+
builder = ExnessRangeBarBuilder(
|
|
17
|
+
ExnessInstrument.EURUSD,
|
|
18
|
+
threshold_decimal_bps=250,
|
|
19
|
+
strictness=ValidationStrictness.Strict,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
# Process tick data
|
|
23
|
+
for tick in ticks:
|
|
24
|
+
bar = builder.process_tick(tick)
|
|
25
|
+
if bar:
|
|
26
|
+
print(f"Bar closed: {bar}")
|
|
27
|
+
|
|
28
|
+
Note:
|
|
29
|
+
- Volume is always 0 (Exness Raw_Spread has no volume data)
|
|
30
|
+
- SpreadStats capture market stress signals via spread dynamics
|
|
31
|
+
- JPY pairs (USDJPY, EURJPY, GBPJPY) use different pip values
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
from __future__ import annotations
|
|
35
|
+
|
|
36
|
+
from typing import TYPE_CHECKING
|
|
37
|
+
|
|
38
|
+
import pandas as pd
|
|
39
|
+
|
|
40
|
+
if TYPE_CHECKING:
|
|
41
|
+
from collections.abc import Sequence
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _check_exness_available() -> None:
|
|
45
|
+
"""Check if Exness feature is available."""
|
|
46
|
+
try:
|
|
47
|
+
from rangebar._core import ExnessInstrument as _ # noqa: F401
|
|
48
|
+
except ImportError as e:
|
|
49
|
+
msg = (
|
|
50
|
+
"Exness support not available. "
|
|
51
|
+
"Install with: pip install rangebar[exness] "
|
|
52
|
+
"or build with: maturin develop --features exness"
|
|
53
|
+
)
|
|
54
|
+
raise ImportError(msg) from e
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
# Import Exness types from Rust bindings (feature-gated)
|
|
58
|
+
try:
|
|
59
|
+
from rangebar._core import (
|
|
60
|
+
ExnessInstrument,
|
|
61
|
+
ExnessRangeBarBuilder,
|
|
62
|
+
ValidationStrictness,
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
_EXNESS_AVAILABLE = True
|
|
66
|
+
except ImportError:
|
|
67
|
+
_EXNESS_AVAILABLE = False
|
|
68
|
+
# Define placeholder types for type checking
|
|
69
|
+
ExnessInstrument = None # type: ignore[misc,assignment]
|
|
70
|
+
ExnessRangeBarBuilder = None # type: ignore[misc,assignment]
|
|
71
|
+
ValidationStrictness = None # type: ignore[misc,assignment]
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def process_exness_ticks_to_dataframe(
|
|
75
|
+
ticks: pd.DataFrame | Sequence[dict[str, float | int]],
|
|
76
|
+
instrument: ExnessInstrument, # type: ignore[valid-type]
|
|
77
|
+
threshold_decimal_bps: int = 250,
|
|
78
|
+
strictness: ValidationStrictness = None, # type: ignore[assignment]
|
|
79
|
+
) -> pd.DataFrame:
|
|
80
|
+
"""Process Exness tick data to range bars DataFrame.
|
|
81
|
+
|
|
82
|
+
Parameters
|
|
83
|
+
----------
|
|
84
|
+
ticks : pd.DataFrame or Sequence[Dict]
|
|
85
|
+
Tick data with columns/keys: bid, ask, timestamp_ms
|
|
86
|
+
instrument : ExnessInstrument
|
|
87
|
+
Exness instrument enum value (e.g., ExnessInstrument.EURUSD)
|
|
88
|
+
threshold_decimal_bps : int, default=250
|
|
89
|
+
Threshold in decimal basis points (250 = 25bps = 0.25%)
|
|
90
|
+
strictness : ValidationStrictness, optional
|
|
91
|
+
Validation strictness level (default: Strict)
|
|
92
|
+
|
|
93
|
+
Returns
|
|
94
|
+
-------
|
|
95
|
+
pd.DataFrame
|
|
96
|
+
OHLCV DataFrame with DatetimeIndex, compatible with backtesting.py.
|
|
97
|
+
Additional column: spread_stats (dict with min/max/avg spread)
|
|
98
|
+
|
|
99
|
+
Raises
|
|
100
|
+
------
|
|
101
|
+
ImportError
|
|
102
|
+
If Exness feature is not enabled
|
|
103
|
+
ValueError
|
|
104
|
+
If required columns are missing or threshold is invalid
|
|
105
|
+
RuntimeError
|
|
106
|
+
If tick validation fails (crossed market, excessive spread)
|
|
107
|
+
|
|
108
|
+
Examples
|
|
109
|
+
--------
|
|
110
|
+
>>> from rangebar.exness import (
|
|
111
|
+
... ExnessInstrument,
|
|
112
|
+
... process_exness_ticks_to_dataframe,
|
|
113
|
+
... )
|
|
114
|
+
>>> import pandas as pd
|
|
115
|
+
>>> ticks = pd.DataFrame({
|
|
116
|
+
... "bid": [1.0800, 1.0810, 1.0830],
|
|
117
|
+
... "ask": [1.0805, 1.0815, 1.0835],
|
|
118
|
+
... "timestamp_ms": [1600000000000, 1600001000000, 1600002000000],
|
|
119
|
+
... })
|
|
120
|
+
>>> bars = process_exness_ticks_to_dataframe(
|
|
121
|
+
... ticks, ExnessInstrument.EURUSD, threshold_decimal_bps=250
|
|
122
|
+
... )
|
|
123
|
+
"""
|
|
124
|
+
_check_exness_available()
|
|
125
|
+
|
|
126
|
+
# Set default strictness
|
|
127
|
+
if strictness is None:
|
|
128
|
+
strictness = ValidationStrictness.Strict # type: ignore[attr-defined]
|
|
129
|
+
|
|
130
|
+
# Create builder
|
|
131
|
+
builder = ExnessRangeBarBuilder(instrument, threshold_decimal_bps, strictness) # type: ignore[misc]
|
|
132
|
+
|
|
133
|
+
# Convert DataFrame to list of dicts if needed
|
|
134
|
+
if isinstance(ticks, pd.DataFrame):
|
|
135
|
+
required_cols = {"bid", "ask", "timestamp_ms"}
|
|
136
|
+
missing = required_cols - set(ticks.columns)
|
|
137
|
+
if missing:
|
|
138
|
+
msg = f"Missing required columns: {missing}"
|
|
139
|
+
raise ValueError(msg)
|
|
140
|
+
tick_dicts = ticks[["bid", "ask", "timestamp_ms"]].to_dict("records")
|
|
141
|
+
else:
|
|
142
|
+
tick_dicts = list(ticks)
|
|
143
|
+
|
|
144
|
+
# Process all ticks
|
|
145
|
+
bars = builder.process_ticks(tick_dicts)
|
|
146
|
+
|
|
147
|
+
# Include incomplete bar if exists
|
|
148
|
+
incomplete = builder.get_incomplete_bar()
|
|
149
|
+
if incomplete:
|
|
150
|
+
bars.append(incomplete)
|
|
151
|
+
|
|
152
|
+
if not bars:
|
|
153
|
+
# Return empty DataFrame with correct structure
|
|
154
|
+
return pd.DataFrame(
|
|
155
|
+
columns=["Open", "High", "Low", "Close", "Volume", "spread_stats"]
|
|
156
|
+
).set_index(pd.DatetimeIndex([], name="timestamp"))
|
|
157
|
+
|
|
158
|
+
# Convert to DataFrame
|
|
159
|
+
df = pd.DataFrame(bars)
|
|
160
|
+
|
|
161
|
+
# Parse timestamps and set as index
|
|
162
|
+
df["timestamp"] = pd.to_datetime(df["timestamp"])
|
|
163
|
+
df = df.set_index("timestamp")
|
|
164
|
+
|
|
165
|
+
# Rename columns for backtesting.py compatibility
|
|
166
|
+
df = df.rename(
|
|
167
|
+
columns={
|
|
168
|
+
"open": "Open",
|
|
169
|
+
"high": "High",
|
|
170
|
+
"low": "Low",
|
|
171
|
+
"close": "Close",
|
|
172
|
+
"volume": "Volume",
|
|
173
|
+
}
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
# Select columns in standard order
|
|
177
|
+
result = df[["Open", "High", "Low", "Close", "Volume", "spread_stats"]]
|
|
178
|
+
|
|
179
|
+
return result
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def is_exness_available() -> bool:
|
|
183
|
+
"""Check if Exness feature is available.
|
|
184
|
+
|
|
185
|
+
Returns
|
|
186
|
+
-------
|
|
187
|
+
bool
|
|
188
|
+
True if Exness bindings are available, False otherwise
|
|
189
|
+
"""
|
|
190
|
+
return _EXNESS_AVAILABLE
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def get_range_bars_exness(
|
|
194
|
+
instrument: str,
|
|
195
|
+
start_date: str,
|
|
196
|
+
end_date: str,
|
|
197
|
+
_threshold_decimal_bps: int = 250, # Reserved for implementation
|
|
198
|
+
*,
|
|
199
|
+
_include_orphaned_bars: bool = False, # Reserved for implementation
|
|
200
|
+
_include_incomplete: bool = False, # Reserved for implementation
|
|
201
|
+
strictness: ValidationStrictness = None, # type: ignore[assignment]
|
|
202
|
+
) -> pd.DataFrame:
|
|
203
|
+
"""Get range bars for Forex instruments with dynamic ouroboros.
|
|
204
|
+
|
|
205
|
+
This is the Forex-specific API that uses **dynamic ouroboros** - the first
|
|
206
|
+
tick after each weekend gap becomes the ouroboros point. This handles DST
|
|
207
|
+
automatically since we use actual data gaps, not calendar calculations.
|
|
208
|
+
|
|
209
|
+
For Forex, ouroboros is implicitly "week" aligned with market structure:
|
|
210
|
+
- Markets close Friday ~21:00 UTC
|
|
211
|
+
- Markets reopen Sunday ~17:00 UTC (shifts with DST)
|
|
212
|
+
- First tick after weekend = ouroboros point
|
|
213
|
+
|
|
214
|
+
Parameters
|
|
215
|
+
----------
|
|
216
|
+
instrument : str
|
|
217
|
+
Forex instrument (e.g., "EURUSD", "GBPUSD", "XAUUSD").
|
|
218
|
+
Must match ExnessInstrument enum values.
|
|
219
|
+
start_date : str
|
|
220
|
+
Start date in YYYY-MM-DD format.
|
|
221
|
+
end_date : str
|
|
222
|
+
End date in YYYY-MM-DD format.
|
|
223
|
+
threshold_decimal_bps : int, default=250
|
|
224
|
+
Threshold in decimal basis points (250 dbps = 0.25%).
|
|
225
|
+
include_orphaned_bars : bool, default=False
|
|
226
|
+
Include incomplete bars from weekend boundaries.
|
|
227
|
+
If True, orphaned bars are included with ``is_orphan=True`` column.
|
|
228
|
+
include_incomplete : bool, default=False
|
|
229
|
+
Include the final incomplete bar (if any).
|
|
230
|
+
strictness : ValidationStrictness, optional
|
|
231
|
+
Tick validation strictness level (default: Strict).
|
|
232
|
+
- Permissive: Accept all ticks
|
|
233
|
+
- Strict: Reject crossed markets (bid > ask)
|
|
234
|
+
- Paranoid: Reject excessive spreads
|
|
235
|
+
|
|
236
|
+
Returns
|
|
237
|
+
-------
|
|
238
|
+
pd.DataFrame
|
|
239
|
+
OHLCV DataFrame with DatetimeIndex, compatible with backtesting.py.
|
|
240
|
+
Additional columns:
|
|
241
|
+
- spread_stats: dict with min/max/avg spread
|
|
242
|
+
- is_orphan: bool (if include_orphaned_bars=True)
|
|
243
|
+
- ouroboros_boundary: datetime (if include_orphaned_bars=True)
|
|
244
|
+
|
|
245
|
+
Raises
|
|
246
|
+
------
|
|
247
|
+
ImportError
|
|
248
|
+
If Exness feature is not enabled.
|
|
249
|
+
ValueError
|
|
250
|
+
If instrument is invalid or dates are malformed.
|
|
251
|
+
FileNotFoundError
|
|
252
|
+
If tick data is not available for the date range.
|
|
253
|
+
|
|
254
|
+
Notes
|
|
255
|
+
-----
|
|
256
|
+
- Volume is always 0 (Exness Raw_Spread has no volume data)
|
|
257
|
+
- Ouroboros is always "week" for Forex (dynamic, based on actual gaps)
|
|
258
|
+
- Weekend boundaries are detected from gaps > 40 hours in tick data
|
|
259
|
+
|
|
260
|
+
Examples
|
|
261
|
+
--------
|
|
262
|
+
>>> from rangebar.exness import get_range_bars_exness
|
|
263
|
+
>>> df = get_range_bars_exness("EURUSD", "2024-01-15", "2024-01-19")
|
|
264
|
+
>>> print(f"Generated {len(df)} bars")
|
|
265
|
+
Generated 150 bars
|
|
266
|
+
|
|
267
|
+
See Also
|
|
268
|
+
--------
|
|
269
|
+
rangebar.get_range_bars : Generic API for all data sources
|
|
270
|
+
process_exness_ticks_to_dataframe : Low-level tick processing
|
|
271
|
+
"""
|
|
272
|
+
_check_exness_available()
|
|
273
|
+
|
|
274
|
+
from datetime import datetime as dt
|
|
275
|
+
|
|
276
|
+
# These imports are used in the implementation below (after NotImplementedError)
|
|
277
|
+
from .ouroboros import ( # noqa: F401
|
|
278
|
+
detect_forex_weekend_boundaries,
|
|
279
|
+
iter_forex_ouroboros_segments,
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
# Set default strictness
|
|
283
|
+
if strictness is None:
|
|
284
|
+
strictness = ValidationStrictness.Strict # type: ignore[attr-defined]
|
|
285
|
+
|
|
286
|
+
# Parse instrument (validation happens before NotImplementedError)
|
|
287
|
+
try:
|
|
288
|
+
exness_instrument = ExnessInstrument[instrument.upper()] # type: ignore[index]
|
|
289
|
+
_ = exness_instrument # Used in implementation below
|
|
290
|
+
except (KeyError, TypeError) as e:
|
|
291
|
+
valid = [i.name for i in ExnessInstrument] if ExnessInstrument else [] # type: ignore[attr-defined]
|
|
292
|
+
msg = f"Invalid instrument: {instrument!r}. Valid: {valid}"
|
|
293
|
+
raise ValueError(msg) from e
|
|
294
|
+
|
|
295
|
+
# Parse dates (validation happens before NotImplementedError)
|
|
296
|
+
# Note: Timezone not needed - we extract .date() which is timezone-naive by design
|
|
297
|
+
try:
|
|
298
|
+
start_dt = dt.strptime(start_date, "%Y-%m-%d").date() # noqa: DTZ007
|
|
299
|
+
end_dt = dt.strptime(end_date, "%Y-%m-%d").date() # noqa: DTZ007
|
|
300
|
+
_ = (start_dt, end_dt) # Used in implementation below
|
|
301
|
+
except ValueError as e:
|
|
302
|
+
msg = f"Invalid date format. Use YYYY-MM-DD. Error: {e}"
|
|
303
|
+
raise ValueError(msg) from e
|
|
304
|
+
|
|
305
|
+
# TODO: Fetch tick data from Exness API or local cache
|
|
306
|
+
# For now, this requires pre-downloaded tick data
|
|
307
|
+
# Future: Integrate with ExnessFetcher from Rust
|
|
308
|
+
msg = (
|
|
309
|
+
"get_range_bars_exness() requires tick data to be pre-loaded. "
|
|
310
|
+
"Use process_exness_ticks_to_dataframe() with your tick data, "
|
|
311
|
+
"or wait for ExnessFetcher integration (coming soon)."
|
|
312
|
+
)
|
|
313
|
+
raise NotImplementedError(msg)
|
|
314
|
+
|
|
315
|
+
# The implementation below shows the intended flow once tick fetching is available:
|
|
316
|
+
#
|
|
317
|
+
# # Fetch ticks for date range
|
|
318
|
+
# ticks = fetch_exness_ticks(instrument, start_dt, end_dt)
|
|
319
|
+
#
|
|
320
|
+
# # Extract timestamps for weekend detection
|
|
321
|
+
# timestamps_ms = [t["timestamp_ms"] for t in ticks]
|
|
322
|
+
#
|
|
323
|
+
# # Detect weekend boundaries (dynamic ouroboros)
|
|
324
|
+
# boundaries = detect_forex_weekend_boundaries(timestamps_ms)
|
|
325
|
+
#
|
|
326
|
+
# # Create builder
|
|
327
|
+
# builder = ExnessRangeBarBuilder(
|
|
328
|
+
# exness_instrument, threshold_decimal_bps, strictness
|
|
329
|
+
# )
|
|
330
|
+
#
|
|
331
|
+
# all_bars = []
|
|
332
|
+
# for start_idx, end_idx, boundary in iter_forex_ouroboros_segments(
|
|
333
|
+
# timestamps_ms, start_dt, end_dt
|
|
334
|
+
# ):
|
|
335
|
+
# # Reset at weekend boundary
|
|
336
|
+
# if boundary is not None:
|
|
337
|
+
# orphaned = builder.reset() # Need to add reset method
|
|
338
|
+
# if include_orphaned_bars and orphaned:
|
|
339
|
+
# orphaned["is_orphan"] = True
|
|
340
|
+
# orphaned["ouroboros_boundary"] = boundary.timestamp
|
|
341
|
+
# all_bars.append(orphaned)
|
|
342
|
+
#
|
|
343
|
+
# # Process segment ticks
|
|
344
|
+
# segment_ticks = ticks[start_idx:end_idx + 1]
|
|
345
|
+
# bars = builder.process_ticks(segment_ticks)
|
|
346
|
+
# all_bars.extend(bars)
|
|
347
|
+
#
|
|
348
|
+
# # Handle incomplete bar
|
|
349
|
+
# if include_incomplete:
|
|
350
|
+
# incomplete = builder.get_incomplete_bar()
|
|
351
|
+
# if incomplete:
|
|
352
|
+
# all_bars.append(incomplete)
|
|
353
|
+
#
|
|
354
|
+
# return pd.DataFrame(all_bars)
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
__all__ = [
|
|
358
|
+
"ExnessInstrument",
|
|
359
|
+
"ExnessRangeBarBuilder",
|
|
360
|
+
"ValidationStrictness",
|
|
361
|
+
"get_range_bars_exness",
|
|
362
|
+
"is_exness_available",
|
|
363
|
+
"process_exness_ticks_to_dataframe",
|
|
364
|
+
]
|