rangebar 11.6.1__cp313-cp313-macosx_11_0_arm64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. rangebar/CLAUDE.md +327 -0
  2. rangebar/__init__.py +227 -0
  3. rangebar/__init__.pyi +1089 -0
  4. rangebar/_core.cpython-313-darwin.so +0 -0
  5. rangebar/checkpoint.py +472 -0
  6. rangebar/cli.py +298 -0
  7. rangebar/clickhouse/CLAUDE.md +139 -0
  8. rangebar/clickhouse/__init__.py +100 -0
  9. rangebar/clickhouse/bulk_operations.py +309 -0
  10. rangebar/clickhouse/cache.py +734 -0
  11. rangebar/clickhouse/client.py +121 -0
  12. rangebar/clickhouse/config.py +141 -0
  13. rangebar/clickhouse/mixin.py +120 -0
  14. rangebar/clickhouse/preflight.py +504 -0
  15. rangebar/clickhouse/query_operations.py +345 -0
  16. rangebar/clickhouse/schema.sql +187 -0
  17. rangebar/clickhouse/tunnel.py +222 -0
  18. rangebar/constants.py +288 -0
  19. rangebar/conversion.py +177 -0
  20. rangebar/exceptions.py +207 -0
  21. rangebar/exness.py +364 -0
  22. rangebar/hooks.py +311 -0
  23. rangebar/logging.py +171 -0
  24. rangebar/notify/__init__.py +15 -0
  25. rangebar/notify/pushover.py +155 -0
  26. rangebar/notify/telegram.py +271 -0
  27. rangebar/orchestration/__init__.py +20 -0
  28. rangebar/orchestration/count_bounded.py +797 -0
  29. rangebar/orchestration/helpers.py +412 -0
  30. rangebar/orchestration/models.py +76 -0
  31. rangebar/orchestration/precompute.py +498 -0
  32. rangebar/orchestration/range_bars.py +736 -0
  33. rangebar/orchestration/tick_fetcher.py +226 -0
  34. rangebar/ouroboros.py +454 -0
  35. rangebar/processors/__init__.py +22 -0
  36. rangebar/processors/api.py +383 -0
  37. rangebar/processors/core.py +522 -0
  38. rangebar/resource_guard.py +567 -0
  39. rangebar/storage/__init__.py +22 -0
  40. rangebar/storage/checksum_registry.py +218 -0
  41. rangebar/storage/parquet.py +728 -0
  42. rangebar/streaming.py +300 -0
  43. rangebar/validation/__init__.py +69 -0
  44. rangebar/validation/cache_staleness.py +277 -0
  45. rangebar/validation/continuity.py +664 -0
  46. rangebar/validation/gap_classification.py +294 -0
  47. rangebar/validation/post_storage.py +317 -0
  48. rangebar/validation/tier1.py +175 -0
  49. rangebar/validation/tier2.py +261 -0
  50. rangebar-11.6.1.dist-info/METADATA +308 -0
  51. rangebar-11.6.1.dist-info/RECORD +54 -0
  52. rangebar-11.6.1.dist-info/WHEEL +4 -0
  53. rangebar-11.6.1.dist-info/entry_points.txt +2 -0
  54. rangebar-11.6.1.dist-info/licenses/LICENSE +21 -0
rangebar/CLAUDE.md ADDED
@@ -0,0 +1,327 @@
1
+ # Python Layer
2
+
3
+ **Parent**: [/CLAUDE.md](/CLAUDE.md) | **API Reference**: [/docs/api.md](/docs/api.md)
4
+
5
+ This directory contains the Python API layer for rangebar-py.
6
+
7
+ ---
8
+
9
+ ## AI Agent Quick Reference
10
+
11
+ ### Common Tasks & Entry Points
12
+
13
+ | When Claude is asked to... | Primary File | Function/Class |
14
+ |---------------------------|--------------|----------------|
15
+ | Generate range bars (date-bounded) | `__init__.py` | `get_range_bars()` |
16
+ | Generate range bars (count-bounded, ML) | `__init__.py` | `get_n_range_bars()` |
17
+ | Generate range bars (existing data) | `__init__.py` | `process_trades_to_dataframe()` |
18
+ | Generate range bars (Polars) | `__init__.py` | `process_trades_polars()` |
19
+ | Process large datasets | `__init__.py` | `process_trades_chunked()` |
20
+ | Read/write tick data | `storage/parquet.py` | `TickStorage` class |
21
+ | Bar-count cache operations | `clickhouse/cache.py` | `count_bars()`, `get_n_bars()` |
22
+ | Validate microstructure features | `validation/tier1.py` | `validate_tier1()` |
23
+
24
+ ### API Selection Guide
25
+
26
+ ```
27
+ Starting Point?
28
+ ├── Need data fetching (date range)? → get_range_bars() [DATE-BOUNDED]
29
+ ├── Need exactly N bars (ML/walk-forward)? → get_n_range_bars() [COUNT-BOUNDED]
30
+ ├── Have pandas DataFrame → process_trades_to_dataframe()
31
+ ├── Have Polars DataFrame/LazyFrame → process_trades_polars() [2-3x faster]
32
+ └── Have Iterator (large data) → process_trades_chunked()
33
+ ```
34
+
35
+ ### File-to-Responsibility Mapping
36
+
37
+ | File | Responsibility |
38
+ |------|----------------|
39
+ | `__init__.py` | Public Python API |
40
+ | `__init__.pyi` | Type stubs for IDE/AI |
41
+ | `storage/parquet.py` | Tier 1 cache (local Parquet) |
42
+ | `clickhouse/cache.py` | Tier 2 cache (ClickHouse) |
43
+ | `clickhouse/schema.sql` | ClickHouse table schema |
44
+ | `validation/tier1.py` | Fast validation (<30 sec) |
45
+ | `validation/tier2.py` | Statistical validation (~10 min) |
46
+ | `exness.py` | Exness data source utilities |
47
+
48
+ ### Performance Optimization Checklist
49
+
50
+ When optimizing data processing:
51
+ 1. Use `pl.scan_parquet()` instead of `pl.read_parquet()` (lazy loading)
52
+ 2. Apply filters on LazyFrame before `.collect()` (predicate pushdown)
53
+ 3. Select only required columns before `.to_dicts()` (minimal conversion)
54
+ 4. Use `process_trades_chunked()` for datasets >10M trades
55
+
56
+ ---
57
+
58
+ ## Structure
59
+
60
+ ```
61
+ python/rangebar/
62
+ ├── __init__.py # Public API (get_range_bars, process_trades_*)
63
+ ├── __init__.pyi # Type stubs for IDE/AI
64
+ ├── _core.abi3.so # PyO3 binary extension (built by maturin)
65
+ ├── clickhouse/ # Tier 2 cache (ClickHouse Cloud)
66
+ │ ├── cache.py # Range bar cache operations
67
+ │ └── schema.sql # Table schema (v7.0: 10 microstructure columns)
68
+ ├── storage/ # Tier 1 cache (local Parquet)
69
+ │ └── parquet.py # TickStorage class
70
+ ├── validation/ # Microstructure feature validation (v7.0+)
71
+ │ ├── tier1.py # Fast validation (<30 sec)
72
+ │ └── tier2.py # Statistical validation (~10 min)
73
+ └── exness.py # Exness data source utilities
74
+ ```
75
+
76
+ ---
77
+
78
+ ## Key Files
79
+
80
+ ### `__init__.py` - Public API
81
+
82
+ **Entry points**:
83
+
84
+ | Function | Purpose |
85
+ |----------|---------|
86
+ | `get_range_bars()` | Date-bounded, auto-fetch, caching |
87
+ | `get_n_range_bars()` | Count-bounded, ML training |
88
+ | `process_trades_to_dataframe()` | From existing pandas DataFrame |
89
+ | `process_trades_polars()` | From Polars, 2-3x faster |
90
+ | `process_trades_chunked()` | Streaming, memory-safe |
91
+ | `precompute_range_bars()` | Batch precompute to ClickHouse |
92
+
93
+ **Constants**:
94
+ - `TIER1_SYMBOLS` - 18 high-liquidity symbols
95
+ - `THRESHOLD_PRESETS` - Named thresholds (micro, tight, standard, etc.)
96
+
97
+ ### `__init__.pyi` - Type Stubs
98
+
99
+ Provides type hints for IDE autocompletion and AI assistants. Keep in sync with `__init__.py`.
100
+
101
+ ### `_core.abi3.so` - PyO3 Extension
102
+
103
+ Binary built by `maturin develop`. Contains:
104
+ - `PyRangeBarProcessor` - Wraps Rust processor
105
+ - `PyAggTrade` - Trade data type
106
+ - Data fetching (when providers feature enabled)
107
+
108
+ **Rebuild after Rust changes**: `maturin develop`
109
+
110
+ ---
111
+
112
+ ## Caching Architecture
113
+
114
+ ### Tier 1: Local Parquet
115
+
116
+ **Location**: `storage/parquet.py`
117
+
118
+ - Stores raw tick data locally
119
+ - Used by `get_range_bars()` with `use_cache=True`
120
+ - Fast retrieval, no network dependency
121
+
122
+ ```python
123
+ from rangebar.storage.parquet import TickStorage
124
+ storage = TickStorage()
125
+ ticks = storage.get_ticks("BTCUSDT", start, end)
126
+ ```
127
+
128
+ ### Tier 2: ClickHouse
129
+
130
+ **Location**: `clickhouse/cache.py`
131
+
132
+ - Stores precomputed range bars
133
+ - Used by `get_range_bars()` and `get_n_range_bars()`
134
+ - Requires ClickHouse Cloud connection
135
+
136
+ ```python
137
+ from rangebar.clickhouse.cache import get_cached_bars, count_bars
138
+ bars_df = get_cached_bars("BTCUSDT", threshold=250, start, end)
139
+ n = count_bars("BTCUSDT", threshold=250)
140
+ ```
141
+
142
+ **Schema**: `clickhouse/schema.sql` (v7.0 adds 10 microstructure columns)
143
+
144
+ ---
145
+
146
+ ## Validation Framework (v7.0+)
147
+
148
+ **Location**: `validation/`
149
+
150
+ Validates microstructure features for ML readiness.
151
+
152
+ ### Tier 1: Smoke Test (`tier1.py`)
153
+
154
+ - Runtime: <30 seconds
155
+ - Runs automatically on every `precompute_range_bars()`
156
+ - Checks: NaN, Inf, bounds, basic correlations
157
+
158
+ ```python
159
+ from rangebar.validation.tier1 import validate_tier1
160
+ result = validate_tier1(df)
161
+ assert result["tier1_passed"]
162
+ ```
163
+
164
+ ### Tier 2: Statistical (`tier2.py`)
165
+
166
+ - Runtime: ~10 minutes
167
+ - **Mandatory before production ML training**
168
+ - Checks: Stationarity, predictive power, mutual information
169
+
170
+ ```python
171
+ from rangebar.validation.tier2 import validate_tier2
172
+ df["forward_return"] = df["Close"].shift(-1) / df["Close"] - 1
173
+ result = validate_tier2(df)
174
+ assert result["tier2_passed"]
175
+ ```
176
+
177
+ ---
178
+
179
+ ## Common Patterns
180
+
181
+ ### Date-Bounded Bars (Backtesting)
182
+
183
+ ```python
184
+ from rangebar import get_range_bars
185
+
186
+ df = get_range_bars(
187
+ "BTCUSDT",
188
+ "2024-01-01",
189
+ "2024-06-30",
190
+ threshold_decimal_bps=250, # 0.25%
191
+ )
192
+ ```
193
+
194
+ ### Count-Bounded Bars (ML)
195
+
196
+ ```python
197
+ from rangebar import get_n_range_bars
198
+
199
+ df = get_n_range_bars(
200
+ "BTCUSDT",
201
+ n_bars=10000,
202
+ threshold_decimal_bps=250,
203
+ )
204
+ assert len(df) == 10000 # Exact count guaranteed
205
+ ```
206
+
207
+ ### With Microstructure Features (v7.0+)
208
+
209
+ ```python
210
+ df = get_range_bars(
211
+ "BTCUSDT",
212
+ "2024-01-01",
213
+ "2024-06-30",
214
+ include_microstructure=True,
215
+ )
216
+ # Includes: ofi, vwap_close_deviation, kyle_lambda_proxy, etc.
217
+ ```
218
+
219
+ ### Large Dataset Processing
220
+
221
+ ```python
222
+ from rangebar import process_trades_chunked
223
+
224
+ for bars_chunk in process_trades_chunked(trades_iterator, chunk_size=100_000):
225
+ # Process incrementally, bounded memory
226
+ save_to_database(bars_chunk)
227
+ ```
228
+
229
+ ### backtesting.py Integration
230
+
231
+ ```python
232
+ from backtesting import Backtest, Strategy
233
+ from rangebar import get_range_bars
234
+
235
+ # Fetch data and generate range bars in one call
236
+ data = get_range_bars("BTCUSDT", "2024-01-01", "2024-06-30")
237
+
238
+ # Use directly with backtesting.py
239
+ bt = Backtest(data, MyStrategy, cash=10000, commission=0.0002)
240
+ stats = bt.run()
241
+ bt.plot()
242
+ ```
243
+
244
+ **Output Format** (backtesting.py compatible):
245
+ ```python
246
+ # DataFrame with DatetimeIndex and OHLCV columns
247
+ Open High Low Close Volume
248
+ timestamp
249
+ 2024-01-01 00:00:15 42000.00 42105.00 41980.00 42100.00 15.43
250
+ 2024-01-01 00:03:42 42100.00 42220.00 42100.00 42215.00 8.72
251
+ ```
252
+
253
+ ### backtesting.py Compatibility Checklist
254
+
255
+ - [x] **OHLCV column names**: Capitalized (Open, High, Low, Close, Volume)
256
+ - [x] **DatetimeIndex**: Pandas DatetimeIndex with timezone-naive timestamps
257
+ - [x] **No NaN values**: All bars complete (backtesting.py raises on NaN)
258
+ - [x] **Sorted chronologically**: Timestamps in ascending order
259
+ - [x] **OHLC invariants**: High ≥ max(Open, Close), Low ≤ min(Open, Close)
260
+
261
+ **Validation Script**:
262
+ ```python
263
+ def validate_for_backtesting_py(df: pd.DataFrame) -> bool:
264
+ """Validate DataFrame is compatible with backtesting.py."""
265
+ assert list(df.columns) == ["Open", "High", "Low", "Close", "Volume"]
266
+ assert isinstance(df.index, pd.DatetimeIndex)
267
+ assert df.index.is_monotonic_increasing
268
+ assert not df.isnull().any().any()
269
+ assert (df["High"] >= df["Open"]).all()
270
+ assert (df["High"] >= df["Close"]).all()
271
+ assert (df["Low"] <= df["Open"]).all()
272
+ assert (df["Low"] <= df["Close"]).all()
273
+ return True
274
+ ```
275
+
276
+ ---
277
+
278
+ ## Error Handling
279
+
280
+ | Exception | When | Fix |
281
+ |-----------|------|-----|
282
+ | `ValueError` | Invalid threshold, bad dates | Check parameters |
283
+ | `RuntimeError` | Processing failure | Check data sorting |
284
+ | `ConnectionError` | ClickHouse unavailable | Check network/credentials |
285
+ | `FileNotFoundError` | Tick data not cached | Set `use_cache=False` |
286
+ | `AttributeError: RangeBarProcessor has no attribute X` | Rust binding outdated | Run `maturin develop` |
287
+ | `AssertionError: High < Low` | OHLC invariant violation | Check input data sorting |
288
+
289
+ ---
290
+
291
+ ## Development
292
+
293
+ ### Adding New Features
294
+
295
+ 1. Update `__init__.py` with new function
296
+ 2. Update `__init__.pyi` with type stub
297
+ 3. Add tests in `/tests/`
298
+ 4. Update `/docs/api.md`
299
+
300
+ ### Testing
301
+
302
+ ```bash
303
+ # Run Python tests
304
+ mise run test-py
305
+
306
+ # Test specific file
307
+ pytest tests/test_microstructure_features.py -v
308
+
309
+ # E2E tests
310
+ pytest tests/test_e2e_optimized.py -v
311
+ pytest tests/test_get_n_range_bars.py -v
312
+ ```
313
+
314
+ ### Portable Validation Scripts
315
+
316
+ For GPU workstations without full dev environment:
317
+ - `scripts/validate_n_range_bars.py` - Count-bounded API validation
318
+ - `scripts/validate_microstructure_features.py` - v7.0 feature validation
319
+
320
+ ---
321
+
322
+ ## Related
323
+
324
+ - [/CLAUDE.md](/CLAUDE.md) - Project hub
325
+ - [/docs/api.md](/docs/api.md) - Full API reference
326
+ - [/crates/CLAUDE.md](/crates/CLAUDE.md) - Rust crate details
327
+ - [/src/lib.rs](/src/lib.rs) - PyO3 bindings source
rangebar/__init__.py ADDED
@@ -0,0 +1,227 @@
1
+ # polars-exception: backtesting.py requires Pandas DataFrames for OHLCV data
2
+ """rangebar: Python bindings for range bar construction.
3
+
4
+ This package provides high-performance range bar construction for cryptocurrency
5
+ trading backtesting, with non-lookahead bias guarantees and temporal integrity.
6
+
7
+ Examples
8
+ --------
9
+ Basic usage:
10
+
11
+ >>> from rangebar import process_trades_to_dataframe
12
+ >>> import pandas as pd
13
+ >>>
14
+ >>> # Load Binance aggTrades data
15
+ >>> trades = pd.read_csv("BTCUSDT-aggTrades-2024-01.csv")
16
+ >>>
17
+ >>> # Convert to range bars (25 basis points = 0.25%)
18
+ >>> df = process_trades_to_dataframe(trades, threshold_decimal_bps=250)
19
+ >>>
20
+ >>> # Use with backtesting.py
21
+ >>> from backtesting import Backtest, Strategy
22
+ >>> bt = Backtest(df, MyStrategy, cash=10000, commission=0.0002)
23
+ >>> stats = bt.run()
24
+ """
25
+
26
+ from __future__ import annotations
27
+
28
+ from ._core import PositionVerification, __version__
29
+
30
+ __all__ = [
31
+ # Sorted alphabetically for RUF022 compliance
32
+ "ALL_OPTIONAL_COLUMNS",
33
+ "ASSET_CLASS_MULTIPLIERS",
34
+ "EXCHANGE_SESSION_COLUMNS",
35
+ "INTER_BAR_FEATURE_COLUMNS", # Issue #59
36
+ "MICROSTRUCTURE_COLUMNS",
37
+ "MIN_VERSION_FOR_MICROSTRUCTURE",
38
+ "MIN_VERSION_FOR_OUROBOROS",
39
+ "SCHEMA_VERSION_MICROSTRUCTURE",
40
+ "SCHEMA_VERSION_OHLCV_ONLY",
41
+ "SCHEMA_VERSION_OUROBOROS",
42
+ "THRESHOLD_DECIMAL_MAX",
43
+ "THRESHOLD_DECIMAL_MIN",
44
+ "THRESHOLD_PRESETS",
45
+ "TIER1_SYMBOLS",
46
+ "VALIDATION_PRESETS",
47
+ "AssetClass",
48
+ "AsyncStreamingProcessor",
49
+ "BinanceLiveStream",
50
+ "ContinuityError",
51
+ "ContinuityWarning",
52
+ "GapInfo",
53
+ "GapTier",
54
+ "OrphanedBarMetadata",
55
+ "OuroborosBoundary",
56
+ "OuroborosMode",
57
+ "PositionVerification",
58
+ "PrecomputeProgress",
59
+ "PrecomputeResult",
60
+ "RangeBarProcessor",
61
+ "ReconnectionConfig",
62
+ "StalenessResult",
63
+ "StreamingConfig",
64
+ "StreamingError",
65
+ "StreamingMetrics",
66
+ "StreamingRangeBarProcessor",
67
+ "TierSummary",
68
+ "TierThresholds",
69
+ "TieredValidationResult",
70
+ "ValidationPreset",
71
+ "__version__",
72
+ "auto_memory_guard",
73
+ "detect_asset_class",
74
+ "detect_staleness",
75
+ "ensure_memory_limit",
76
+ "get_n_range_bars",
77
+ "get_ouroboros_boundaries",
78
+ "get_range_bars",
79
+ "get_range_bars_pandas",
80
+ "normalize_arrow_dtypes",
81
+ "normalize_temporal_precision",
82
+ "populate_cache_resumable",
83
+ "precompute_range_bars",
84
+ "process_trades_polars",
85
+ "process_trades_to_dataframe",
86
+ "stream_binance_live",
87
+ "validate_continuity",
88
+ "validate_continuity_tiered",
89
+ ]
90
+
91
+ # Re-export checkpoint API per plan (#40)
92
+ from .checkpoint import populate_cache_resumable
93
+
94
+ # Import constants from centralized module (SSoT)
95
+ from .constants import (
96
+ ALL_OPTIONAL_COLUMNS,
97
+ EXCHANGE_SESSION_COLUMNS,
98
+ INTER_BAR_FEATURE_COLUMNS, # Issue #59
99
+ MICROSTRUCTURE_COLUMNS,
100
+ MIN_VERSION_FOR_MICROSTRUCTURE,
101
+ MIN_VERSION_FOR_OUROBOROS,
102
+ SCHEMA_VERSION_MICROSTRUCTURE,
103
+ SCHEMA_VERSION_OHLCV_ONLY,
104
+ SCHEMA_VERSION_OUROBOROS,
105
+ THRESHOLD_DECIMAL_MAX,
106
+ THRESHOLD_DECIMAL_MIN,
107
+ THRESHOLD_PRESETS,
108
+ TIER1_SYMBOLS,
109
+ )
110
+
111
+ # Import conversion utilities from centralized module (SSoT)
112
+ from .conversion import normalize_arrow_dtypes, normalize_temporal_precision
113
+
114
+ # Import orchestration functions from extracted module (M4 modularization)
115
+ from .orchestration.count_bounded import get_n_range_bars
116
+ from .orchestration.models import PrecomputeProgress, PrecomputeResult
117
+ from .orchestration.precompute import precompute_range_bars
118
+ from .orchestration.range_bars import get_range_bars, get_range_bars_pandas
119
+
120
+ # Re-export ouroboros API (cyclical reset boundaries for reproducibility)
121
+ from .ouroboros import (
122
+ OrphanedBarMetadata,
123
+ OuroborosBoundary,
124
+ OuroborosMode,
125
+ get_ouroboros_boundaries,
126
+ )
127
+
128
+ # Import RangeBarProcessor from extracted module (M2 modularization)
129
+ # Import process_trades_* functions from extracted module (M3 modularization)
130
+ from .processors.api import (
131
+ process_trades_chunked,
132
+ process_trades_polars,
133
+ process_trades_to_dataframe,
134
+ process_trades_to_dataframe_cached,
135
+ )
136
+ from .processors.core import RangeBarProcessor
137
+
138
+ # Memory safety guards (Issue #49, MEM-011)
139
+ # ensure_memory_limit() provides idempotent memory cap with env var support
140
+ # auto_memory_guard() is called at import to enable default 70% RAM limit
141
+ from .resource_guard import auto_memory_guard, ensure_memory_limit
142
+
143
+ # Enable memory guard by default on import (can be disabled with RANGEBAR_NO_MEMORY_GUARD=1)
144
+ auto_memory_guard()
145
+
146
+ # Streaming API (ADR: docs/adr/2026-01-31-realtime-streaming-api.md)
147
+ from .streaming import (
148
+ AsyncStreamingProcessor,
149
+ BinanceLiveStream,
150
+ ReconnectionConfig,
151
+ StreamingConfig,
152
+ StreamingError,
153
+ StreamingMetrics,
154
+ StreamingRangeBarProcessor,
155
+ stream_binance_live,
156
+ )
157
+
158
+ # Import staleness detection for cache validation (Issue #39: Schema Evolution)
159
+ from .validation.cache_staleness import StalenessResult, detect_staleness
160
+
161
+ # Import continuity validation from extracted module (M1 modularization)
162
+ from .validation.continuity import (
163
+ ASSET_CLASS_MULTIPLIERS,
164
+ VALIDATION_PRESETS,
165
+ AssetClass,
166
+ ContinuityError,
167
+ ContinuityWarning,
168
+ GapInfo,
169
+ GapTier,
170
+ TieredValidationResult,
171
+ TierSummary,
172
+ TierThresholds,
173
+ ValidationPreset,
174
+ detect_asset_class,
175
+ validate_continuity,
176
+ validate_continuity_tiered,
177
+ validate_junction_continuity,
178
+ )
179
+
180
+ # Re-export ClickHouse components for convenience
181
+ # NOTE: TIER1_SYMBOLS, THRESHOLD_PRESETS, THRESHOLD_DECIMAL_MIN/MAX
182
+ # are imported from constants.py (SSoT) at the top of this file
183
+
184
+
185
+ # =============================================================================
186
+ # Tiered Validation System (Issue #19 - v6.2.0+)
187
+ # =============================================================================
188
+ # MOVED to rangebar.validation.continuity (M1 modularization)
189
+ # All types, presets, and functions are imported at the top of this file.
190
+
191
+
192
+ # The following block was removed during M1 modularization.
193
+ # detect_asset_class, GapTier, AssetClass, TierThresholds, ValidationPreset,
194
+ # VALIDATION_PRESETS, ASSET_CLASS_MULTIPLIERS, GapInfo, TierSummary,
195
+ # TieredValidationResult, _resolve_validation, _classify_gap,
196
+ # validate_continuity_tiered are now imported from rangebar.validation.continuity.
197
+
198
+ # The following blocks were removed during M4 modularization:
199
+ # PrecomputeProgress, PrecomputeResult → rangebar.orchestration.models
200
+ # get_range_bars, get_range_bars_pandas → rangebar.orchestration.range_bars
201
+ # precompute_range_bars → rangebar.orchestration.precompute
202
+ # get_n_range_bars, _fill_gap_and_cache, _fetch_and_compute_bars → rangebar.orchestration.count_bounded
203
+ # _stream_range_bars_binance, _fetch_binance, _fetch_exness → rangebar.orchestration.helpers
204
+ # _process_binance_trades, _process_exness_ticks → rangebar.orchestration.helpers
205
+
206
+
207
+
208
+ def __getattr__(name: str) -> object:
209
+ """Lazy attribute access for ClickHouse and Exness components."""
210
+ if name in {
211
+ "RangeBarCache",
212
+ "CacheKey",
213
+ "get_available_clickhouse_host",
214
+ "detect_clickhouse_state",
215
+ "ClickHouseNotConfiguredError",
216
+ }:
217
+ from . import clickhouse
218
+
219
+ return getattr(clickhouse, name)
220
+
221
+ if name == "is_exness_available":
222
+ from .exness import is_exness_available
223
+
224
+ return is_exness_available
225
+
226
+ msg = f"module {__name__!r} has no attribute {name!r}"
227
+ raise AttributeError(msg)