signalflow-trading 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- signalflow/__init__.py +21 -0
- signalflow/analytics/__init__.py +0 -0
- signalflow/core/__init__.py +46 -0
- signalflow/core/base_mixin.py +232 -0
- signalflow/core/containers/__init__.py +21 -0
- signalflow/core/containers/order.py +216 -0
- signalflow/core/containers/portfolio.py +211 -0
- signalflow/core/containers/position.py +296 -0
- signalflow/core/containers/raw_data.py +167 -0
- signalflow/core/containers/raw_data_view.py +169 -0
- signalflow/core/containers/signals.py +198 -0
- signalflow/core/containers/strategy_state.py +147 -0
- signalflow/core/containers/trade.py +112 -0
- signalflow/core/decorators.py +103 -0
- signalflow/core/enums.py +270 -0
- signalflow/core/registry.py +322 -0
- signalflow/core/rolling_aggregator.py +362 -0
- signalflow/core/signal_transforms/__init__.py +5 -0
- signalflow/core/signal_transforms/base_signal_transform.py +186 -0
- signalflow/data/__init__.py +11 -0
- signalflow/data/raw_data_factory.py +225 -0
- signalflow/data/raw_store/__init__.py +7 -0
- signalflow/data/raw_store/base.py +271 -0
- signalflow/data/raw_store/duckdb_stores.py +696 -0
- signalflow/data/source/__init__.py +10 -0
- signalflow/data/source/base.py +300 -0
- signalflow/data/source/binance.py +442 -0
- signalflow/data/strategy_store/__init__.py +8 -0
- signalflow/data/strategy_store/base.py +278 -0
- signalflow/data/strategy_store/duckdb.py +409 -0
- signalflow/data/strategy_store/schema.py +36 -0
- signalflow/detector/__init__.py +7 -0
- signalflow/detector/adapter/__init__.py +5 -0
- signalflow/detector/adapter/pandas_detector.py +46 -0
- signalflow/detector/base.py +390 -0
- signalflow/detector/sma_cross.py +105 -0
- signalflow/feature/__init__.py +16 -0
- signalflow/feature/adapter/__init__.py +5 -0
- signalflow/feature/adapter/pandas_feature_extractor.py +54 -0
- signalflow/feature/base.py +330 -0
- signalflow/feature/feature_set.py +286 -0
- signalflow/feature/oscillator/__init__.py +5 -0
- signalflow/feature/oscillator/rsi_extractor.py +42 -0
- signalflow/feature/pandasta/__init__.py +10 -0
- signalflow/feature/pandasta/pandas_ta_extractor.py +141 -0
- signalflow/feature/pandasta/top_pandasta_extractors.py +64 -0
- signalflow/feature/smoother/__init__.py +5 -0
- signalflow/feature/smoother/sma_extractor.py +46 -0
- signalflow/strategy/__init__.py +9 -0
- signalflow/strategy/broker/__init__.py +15 -0
- signalflow/strategy/broker/backtest.py +172 -0
- signalflow/strategy/broker/base.py +186 -0
- signalflow/strategy/broker/executor/__init__.py +9 -0
- signalflow/strategy/broker/executor/base.py +35 -0
- signalflow/strategy/broker/executor/binance_spot.py +12 -0
- signalflow/strategy/broker/executor/virtual_spot.py +81 -0
- signalflow/strategy/broker/realtime_spot.py +12 -0
- signalflow/strategy/component/__init__.py +9 -0
- signalflow/strategy/component/base.py +65 -0
- signalflow/strategy/component/entry/__init__.py +7 -0
- signalflow/strategy/component/entry/fixed_size.py +57 -0
- signalflow/strategy/component/entry/signal.py +127 -0
- signalflow/strategy/component/exit/__init__.py +5 -0
- signalflow/strategy/component/exit/time_based.py +47 -0
- signalflow/strategy/component/exit/tp_sl.py +80 -0
- signalflow/strategy/component/metric/__init__.py +8 -0
- signalflow/strategy/component/metric/main_metrics.py +181 -0
- signalflow/strategy/runner/__init__.py +8 -0
- signalflow/strategy/runner/backtest_runner.py +208 -0
- signalflow/strategy/runner/base.py +19 -0
- signalflow/strategy/runner/optimized_backtest_runner.py +178 -0
- signalflow/strategy/runner/realtime_runner.py +0 -0
- signalflow/target/__init__.py +14 -0
- signalflow/target/adapter/__init__.py +5 -0
- signalflow/target/adapter/pandas_labeler.py +45 -0
- signalflow/target/base.py +409 -0
- signalflow/target/fixed_horizon_labeler.py +93 -0
- signalflow/target/static_triple_barrier.py +162 -0
- signalflow/target/triple_barrier.py +188 -0
- signalflow/utils/__init__.py +7 -0
- signalflow/utils/import_utils.py +11 -0
- signalflow/utils/tune_utils.py +19 -0
- signalflow/validator/__init__.py +6 -0
- signalflow/validator/base.py +139 -0
- signalflow/validator/sklearn_validator.py +527 -0
- signalflow_trading-0.2.1.dist-info/METADATA +149 -0
- signalflow_trading-0.2.1.dist-info/RECORD +90 -0
- signalflow_trading-0.2.1.dist-info/WHEEL +5 -0
- signalflow_trading-0.2.1.dist-info/licenses/LICENSE +21 -0
- signalflow_trading-0.2.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,442 @@
|
|
|
1
|
+
# IMPORTANT
|
|
2
|
+
import asyncio
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from datetime import datetime, timedelta, timezone
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
import aiohttp
|
|
9
|
+
from loguru import logger
|
|
10
|
+
|
|
11
|
+
from signalflow.data.raw_store import DuckDbSpotStore
|
|
12
|
+
from signalflow.core import sf_component
|
|
13
|
+
from signalflow.data.source.base import RawDataSource, RawDataLoader
|
|
14
|
+
|
|
15
|
+
_TIMEFRAME_MS: dict[str, int] = {
|
|
16
|
+
"1m": 60_000,
|
|
17
|
+
"3m": 3 * 60_000,
|
|
18
|
+
"5m": 5 * 60_000,
|
|
19
|
+
"15m": 15 * 60_000,
|
|
20
|
+
"30m": 30 * 60_000,
|
|
21
|
+
"1h": 60 * 60_000,
|
|
22
|
+
"2h": 2 * 60 * 60_000,
|
|
23
|
+
"4h": 4 * 60 * 60_000,
|
|
24
|
+
"6h": 6 * 60 * 60_000,
|
|
25
|
+
"8h": 8 * 60 * 60_000,
|
|
26
|
+
"12h": 12 * 60 * 60_000,
|
|
27
|
+
"1d": 24 * 60 * 60_000,
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _dt_to_ms_utc(dt: datetime) -> int:
|
|
32
|
+
"""Convert datetime to UNIX milliseconds in UTC.
|
|
33
|
+
|
|
34
|
+
Accepts naive (assumed UTC) or aware (converted to UTC) datetimes.
|
|
35
|
+
|
|
36
|
+
Args:
|
|
37
|
+
dt (datetime): Input datetime.
|
|
38
|
+
|
|
39
|
+
Returns:
|
|
40
|
+
int: UNIX timestamp in milliseconds (UTC).
|
|
41
|
+
"""
|
|
42
|
+
if dt.tzinfo is None:
|
|
43
|
+
dt = dt.replace(tzinfo=timezone.utc)
|
|
44
|
+
else:
|
|
45
|
+
dt = dt.astimezone(timezone.utc)
|
|
46
|
+
return int(dt.timestamp() * 1000)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _ms_to_dt_utc_naive(ms: int) -> datetime:
|
|
50
|
+
"""Convert UNIX milliseconds to UTC-naive datetime.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
ms (int): UNIX timestamp in milliseconds.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
datetime: UTC datetime without timezone info.
|
|
57
|
+
"""
|
|
58
|
+
return datetime.fromtimestamp(ms / 1000, tz=timezone.utc).replace(tzinfo=None)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _ensure_utc_naive(dt: datetime) -> datetime:
|
|
62
|
+
"""Normalize to UTC-naive datetime.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
dt (datetime): Input datetime (naive or aware).
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
datetime: UTC-naive datetime.
|
|
69
|
+
"""
|
|
70
|
+
if dt.tzinfo is None:
|
|
71
|
+
return dt
|
|
72
|
+
return dt.astimezone(timezone.utc).replace(tzinfo=None)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
@sf_component(name="binance")
|
|
77
|
+
class BinanceClient(RawDataSource):
|
|
78
|
+
"""Async client for Binance REST API.
|
|
79
|
+
|
|
80
|
+
Provides async methods for fetching OHLCV candlestick data with automatic
|
|
81
|
+
retries, rate limit handling, and pagination.
|
|
82
|
+
|
|
83
|
+
IMPORTANT: Returned timestamps are candle CLOSE times (Binance k[6]), UTC-naive.
|
|
84
|
+
|
|
85
|
+
Attributes:
|
|
86
|
+
base_url (str): Binance API base URL. Default: "https://api.binance.com".
|
|
87
|
+
max_retries (int): Maximum retry attempts. Default: 3.
|
|
88
|
+
timeout_sec (int): Request timeout in seconds. Default: 30.
|
|
89
|
+
min_delay_sec (float): Minimum delay between requests. Default: 0.05.
|
|
90
|
+
"""
|
|
91
|
+
|
|
92
|
+
base_url: str = "https://api.binance.com"
|
|
93
|
+
max_retries: int = 3
|
|
94
|
+
timeout_sec: int = 30
|
|
95
|
+
min_delay_sec: float = 0.05
|
|
96
|
+
|
|
97
|
+
_session: Optional[aiohttp.ClientSession] = field(default=None, init=False)
|
|
98
|
+
|
|
99
|
+
async def __aenter__(self) -> "BinanceClient":
|
|
100
|
+
"""Enter async context - creates session."""
|
|
101
|
+
timeout = aiohttp.ClientTimeout(total=self.timeout_sec)
|
|
102
|
+
self._session = aiohttp.ClientSession(timeout=timeout)
|
|
103
|
+
return self
|
|
104
|
+
|
|
105
|
+
async def __aexit__(self, *args) -> None:
|
|
106
|
+
"""Exit async context - closes session."""
|
|
107
|
+
if self._session:
|
|
108
|
+
await self._session.close()
|
|
109
|
+
self._session = None
|
|
110
|
+
|
|
111
|
+
async def get_klines(
|
|
112
|
+
self,
|
|
113
|
+
pair: str,
|
|
114
|
+
timeframe: str = "1m",
|
|
115
|
+
*,
|
|
116
|
+
start_time: Optional[datetime] = None,
|
|
117
|
+
end_time: Optional[datetime] = None,
|
|
118
|
+
limit: int = 1000,
|
|
119
|
+
) -> list[dict]:
|
|
120
|
+
"""Fetch OHLCV klines from Binance.
|
|
121
|
+
|
|
122
|
+
IMPORTANT: Returned "timestamp" is CANDLE CLOSE TIME (UTC-naive).
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
pair (str): Trading pair (e.g., "BTCUSDT").
|
|
126
|
+
timeframe (str): Interval (1m, 5m, 1h, 1d, etc.). Default: "1m".
|
|
127
|
+
start_time (datetime | None): Range start (naive=UTC or aware).
|
|
128
|
+
end_time (datetime | None): Range end (naive=UTC or aware).
|
|
129
|
+
limit (int): Max candles (max 1000). Default: 1000.
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
list[dict]: OHLCV dicts with keys: timestamp, open, high, low,
|
|
133
|
+
close, volume, trades.
|
|
134
|
+
|
|
135
|
+
Raises:
|
|
136
|
+
RuntimeError: If not in async context or API error.
|
|
137
|
+
"""
|
|
138
|
+
if self._session is None:
|
|
139
|
+
raise RuntimeError("BinanceClient must be used as an async context manager.")
|
|
140
|
+
|
|
141
|
+
params: dict[str, object] = {"symbol": pair, "interval": timeframe, "limit": int(limit)}
|
|
142
|
+
if start_time is not None:
|
|
143
|
+
params["startTime"] = _dt_to_ms_utc(start_time)
|
|
144
|
+
if end_time is not None:
|
|
145
|
+
params["endTime"] = _dt_to_ms_utc(end_time)
|
|
146
|
+
|
|
147
|
+
url = f"{self.base_url}/api/v3/klines"
|
|
148
|
+
last_err: Optional[Exception] = None
|
|
149
|
+
|
|
150
|
+
for attempt in range(self.max_retries):
|
|
151
|
+
try:
|
|
152
|
+
async with self._session.get(url, params=params) as resp:
|
|
153
|
+
if resp.status == 429:
|
|
154
|
+
retry_after = int(resp.headers.get("Retry-After", 60))
|
|
155
|
+
logger.warning(f"Rate limited, waiting {retry_after}s (pair={pair}, tf={timeframe})")
|
|
156
|
+
await asyncio.sleep(retry_after)
|
|
157
|
+
continue
|
|
158
|
+
|
|
159
|
+
if resp.status != 200:
|
|
160
|
+
text = await resp.text()
|
|
161
|
+
raise RuntimeError(f"Binance API error {resp.status}: {text}")
|
|
162
|
+
|
|
163
|
+
data = await resp.json()
|
|
164
|
+
|
|
165
|
+
out: list[dict] = []
|
|
166
|
+
for k in data:
|
|
167
|
+
close_ms = int(k[6])
|
|
168
|
+
out.append(
|
|
169
|
+
{
|
|
170
|
+
"timestamp": _ms_to_dt_utc_naive(close_ms),
|
|
171
|
+
"open": float(k[1]),
|
|
172
|
+
"high": float(k[2]),
|
|
173
|
+
"low": float(k[3]),
|
|
174
|
+
"close": float(k[4]),
|
|
175
|
+
"volume": float(k[7]),
|
|
176
|
+
"trades": int(k[8]),
|
|
177
|
+
}
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
return out
|
|
181
|
+
|
|
182
|
+
except (aiohttp.ClientError, asyncio.TimeoutError, RuntimeError) as e:
|
|
183
|
+
last_err = e
|
|
184
|
+
if attempt < self.max_retries - 1:
|
|
185
|
+
wait = 2**attempt
|
|
186
|
+
logger.warning(f"Request failed, retrying in {wait}s (pair={pair}, tf={timeframe}): {e}")
|
|
187
|
+
await asyncio.sleep(wait)
|
|
188
|
+
else:
|
|
189
|
+
break
|
|
190
|
+
|
|
191
|
+
raise last_err or RuntimeError("Unknown error while fetching klines.")
|
|
192
|
+
|
|
193
|
+
async def get_klines_range(
|
|
194
|
+
self,
|
|
195
|
+
pair: str,
|
|
196
|
+
timeframe: str,
|
|
197
|
+
start_time: datetime,
|
|
198
|
+
end_time: datetime,
|
|
199
|
+
*,
|
|
200
|
+
limit: int = 1000,
|
|
201
|
+
) -> list[dict]:
|
|
202
|
+
"""Download all klines for period with automatic pagination.
|
|
203
|
+
|
|
204
|
+
Semantics:
|
|
205
|
+
- Range by CANDLE CLOSE TIME: [start_time, end_time] inclusive
|
|
206
|
+
- Returns UTC-naive timestamps
|
|
207
|
+
- Automatic deduplication
|
|
208
|
+
|
|
209
|
+
Pagination strategy:
|
|
210
|
+
- Request windows of size limit * timeframe
|
|
211
|
+
- Advance based on last returned close time + 1ms
|
|
212
|
+
- Additional dedup at end for safety
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
pair (str): Trading pair.
|
|
216
|
+
timeframe (str): Interval (must be in _TIMEFRAME_MS).
|
|
217
|
+
start_time (datetime): Range start (inclusive).
|
|
218
|
+
end_time (datetime): Range end (inclusive).
|
|
219
|
+
limit (int): Candles per request. Default: 1000.
|
|
220
|
+
|
|
221
|
+
Returns:
|
|
222
|
+
list[dict]: Deduplicated, sorted OHLCV dicts.
|
|
223
|
+
|
|
224
|
+
Raises:
|
|
225
|
+
ValueError: If timeframe unsupported.
|
|
226
|
+
RuntimeError: If pagination exceeds safety limit (2M loops).
|
|
227
|
+
"""
|
|
228
|
+
if timeframe not in _TIMEFRAME_MS:
|
|
229
|
+
raise ValueError(f"Unsupported timeframe: {timeframe}")
|
|
230
|
+
|
|
231
|
+
start_time = _ensure_utc_naive(start_time)
|
|
232
|
+
end_time = _ensure_utc_naive(end_time)
|
|
233
|
+
|
|
234
|
+
if start_time >= end_time:
|
|
235
|
+
return []
|
|
236
|
+
|
|
237
|
+
tf_ms = _TIMEFRAME_MS[timeframe]
|
|
238
|
+
window = timedelta(milliseconds=tf_ms * limit)
|
|
239
|
+
|
|
240
|
+
all_klines: list[dict] = []
|
|
241
|
+
current_start = start_time
|
|
242
|
+
|
|
243
|
+
max_loops = 2_000_000
|
|
244
|
+
loops = 0
|
|
245
|
+
|
|
246
|
+
while current_start < end_time:
|
|
247
|
+
loops += 1
|
|
248
|
+
if loops > max_loops:
|
|
249
|
+
raise RuntimeError("Pagination guard triggered (too many loops).")
|
|
250
|
+
|
|
251
|
+
req_end = min(current_start + window, end_time)
|
|
252
|
+
|
|
253
|
+
klines = await self.get_klines(
|
|
254
|
+
pair=pair,
|
|
255
|
+
timeframe=timeframe,
|
|
256
|
+
start_time=current_start,
|
|
257
|
+
end_time=req_end,
|
|
258
|
+
limit=limit,
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
if not klines:
|
|
262
|
+
current_start = req_end + timedelta(milliseconds=1)
|
|
263
|
+
await asyncio.sleep(self.min_delay_sec)
|
|
264
|
+
continue
|
|
265
|
+
|
|
266
|
+
klines.sort(key=lambda x: x["timestamp"])
|
|
267
|
+
|
|
268
|
+
for k in klines:
|
|
269
|
+
ts = k["timestamp"]
|
|
270
|
+
if start_time <= ts <= end_time:
|
|
271
|
+
all_klines.append(k)
|
|
272
|
+
|
|
273
|
+
last_close = klines[-1]["timestamp"]
|
|
274
|
+
next_start = last_close + timedelta(milliseconds=1)
|
|
275
|
+
|
|
276
|
+
if next_start <= current_start:
|
|
277
|
+
current_start = current_start + timedelta(milliseconds=1)
|
|
278
|
+
else:
|
|
279
|
+
current_start = next_start
|
|
280
|
+
|
|
281
|
+
if len(all_klines) and len(all_klines) % 10000 == 0:
|
|
282
|
+
logger.info(f"{pair}: loaded {len(all_klines):,} candles...")
|
|
283
|
+
|
|
284
|
+
await asyncio.sleep(self.min_delay_sec)
|
|
285
|
+
|
|
286
|
+
uniq: dict[datetime, dict] = {}
|
|
287
|
+
for k in all_klines:
|
|
288
|
+
uniq[k["timestamp"]] = k
|
|
289
|
+
|
|
290
|
+
out = list(uniq.values())
|
|
291
|
+
out.sort(key=lambda x: x["timestamp"])
|
|
292
|
+
return out
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
@dataclass
|
|
296
|
+
@sf_component(name="binance/spot")
|
|
297
|
+
class BinanceSpotLoader(RawDataLoader):
|
|
298
|
+
"""Downloads and stores Binance spot OHLCV data for fixed timeframe.
|
|
299
|
+
|
|
300
|
+
Combines BinanceClient (source) and DuckDbSpotStore (storage) to provide
|
|
301
|
+
complete data pipeline with gap filling and incremental updates.
|
|
302
|
+
|
|
303
|
+
Attributes:
|
|
304
|
+
store (DuckDbSpotStore): Storage backend. Default: raw_data.duckdb.
|
|
305
|
+
timeframe (str): Fixed timeframe for all data. Default: "1m".
|
|
306
|
+
"""
|
|
307
|
+
|
|
308
|
+
store: DuckDbSpotStore = field(default_factory=lambda: DuckDbSpotStore(db_path=Path("raw_data.duckdb")))
|
|
309
|
+
timeframe: str = "1m"
|
|
310
|
+
|
|
311
|
+
async def download(
|
|
312
|
+
self,
|
|
313
|
+
pairs: list[str],
|
|
314
|
+
days: Optional[int] = None,
|
|
315
|
+
start: Optional[datetime] = None,
|
|
316
|
+
end: Optional[datetime] = None,
|
|
317
|
+
fill_gaps: bool = True,
|
|
318
|
+
) -> None:
|
|
319
|
+
"""Download historical data with intelligent range detection.
|
|
320
|
+
|
|
321
|
+
Automatically determines what to download:
|
|
322
|
+
- If no existing data: download full range
|
|
323
|
+
- If data exists: download before/after existing range
|
|
324
|
+
- If fill_gaps=True: detect and fill gaps in existing range
|
|
325
|
+
|
|
326
|
+
Args:
|
|
327
|
+
pairs (list[str]): Trading pairs to download.
|
|
328
|
+
days (int | None): Number of days back from end. Default: 7.
|
|
329
|
+
start (datetime | None): Range start (overrides days).
|
|
330
|
+
end (datetime | None): Range end. Default: now.
|
|
331
|
+
fill_gaps (bool): Detect and fill gaps. Default: True.
|
|
332
|
+
|
|
333
|
+
Note:
|
|
334
|
+
Runs async download for all pairs concurrently.
|
|
335
|
+
Logs progress for large downloads.
|
|
336
|
+
Errors logged but don't stop other pairs.
|
|
337
|
+
"""
|
|
338
|
+
|
|
339
|
+
now = datetime.now(timezone.utc).replace(tzinfo=None)
|
|
340
|
+
if end is None:
|
|
341
|
+
end = now
|
|
342
|
+
else:
|
|
343
|
+
end = _ensure_utc_naive(end)
|
|
344
|
+
|
|
345
|
+
if start is None:
|
|
346
|
+
start = end - timedelta(days=days if days else 7)
|
|
347
|
+
else:
|
|
348
|
+
start = _ensure_utc_naive(start)
|
|
349
|
+
|
|
350
|
+
tf_minutes = {
|
|
351
|
+
"1m": 1,
|
|
352
|
+
"3m": 3,
|
|
353
|
+
"5m": 5,
|
|
354
|
+
"15m": 15,
|
|
355
|
+
"30m": 30,
|
|
356
|
+
"1h": 60,
|
|
357
|
+
"2h": 120,
|
|
358
|
+
"4h": 240,
|
|
359
|
+
"6h": 360,
|
|
360
|
+
"8h": 480,
|
|
361
|
+
"12h": 720,
|
|
362
|
+
"1d": 1440,
|
|
363
|
+
}.get(self.timeframe, 1)
|
|
364
|
+
|
|
365
|
+
async def download_pair(client: BinanceClient, pair: str) -> None:
|
|
366
|
+
logger.info(f"Processing {pair} from {start} to {end}")
|
|
367
|
+
|
|
368
|
+
db_min, db_max = self.store.get_time_bounds(pair)
|
|
369
|
+
ranges_to_download: list[tuple[datetime, datetime]] = []
|
|
370
|
+
|
|
371
|
+
if db_min is None:
|
|
372
|
+
ranges_to_download.append((start, end))
|
|
373
|
+
else:
|
|
374
|
+
if start < db_min:
|
|
375
|
+
ranges_to_download.append((start, db_min - timedelta(minutes=tf_minutes)))
|
|
376
|
+
if end > db_max:
|
|
377
|
+
ranges_to_download.append((db_max + timedelta(minutes=tf_minutes), end))
|
|
378
|
+
|
|
379
|
+
if fill_gaps:
|
|
380
|
+
overlap_start = max(start, db_min)
|
|
381
|
+
overlap_end = min(end, db_max)
|
|
382
|
+
if overlap_start < overlap_end:
|
|
383
|
+
gaps = self.store.find_gaps(pair, overlap_start, overlap_end, tf_minutes)
|
|
384
|
+
ranges_to_download.extend(gaps)
|
|
385
|
+
|
|
386
|
+
for range_start, range_end in ranges_to_download:
|
|
387
|
+
if range_start >= range_end:
|
|
388
|
+
continue
|
|
389
|
+
|
|
390
|
+
logger.info(f"{pair}: downloading {range_start} -> {range_end}")
|
|
391
|
+
|
|
392
|
+
try:
|
|
393
|
+
klines = await client.get_klines_range(
|
|
394
|
+
pair=pair,
|
|
395
|
+
timeframe=self.timeframe,
|
|
396
|
+
start_time=range_start,
|
|
397
|
+
end_time=range_end,
|
|
398
|
+
)
|
|
399
|
+
self.store.insert_klines(pair, klines)
|
|
400
|
+
except Exception as e:
|
|
401
|
+
logger.error(f"Error downloading {pair}: {e}")
|
|
402
|
+
|
|
403
|
+
async with BinanceClient() as client:
|
|
404
|
+
await asyncio.gather(*[download_pair(client, pair) for pair in pairs])
|
|
405
|
+
|
|
406
|
+
self.store.close()
|
|
407
|
+
|
|
408
|
+
async def sync(
|
|
409
|
+
self,
|
|
410
|
+
pairs: list[str],
|
|
411
|
+
update_interval_sec: int = 60,
|
|
412
|
+
) -> None:
|
|
413
|
+
"""Real-time sync - continuously update with latest data.
|
|
414
|
+
|
|
415
|
+
Runs indefinitely, fetching latest candles at specified interval.
|
|
416
|
+
Useful for live trading or monitoring.
|
|
417
|
+
|
|
418
|
+
Args:
|
|
419
|
+
pairs (list[str]): Trading pairs to sync.
|
|
420
|
+
update_interval_sec (int): Update interval in seconds. Default: 60.
|
|
421
|
+
|
|
422
|
+
Note:
|
|
423
|
+
Runs forever - use Ctrl+C to stop or run in background task.
|
|
424
|
+
Fetches last 5 candles per update (ensures no gaps).
|
|
425
|
+
Errors logged but sync continues.
|
|
426
|
+
"""
|
|
427
|
+
|
|
428
|
+
logger.info(f"Starting real-time sync for {pairs}")
|
|
429
|
+
logger.info(f"Update interval: {update_interval_sec}s (timeframe={self.timeframe})")
|
|
430
|
+
|
|
431
|
+
async def fetch_and_store(client: BinanceClient, pair: str) -> None:
|
|
432
|
+
try:
|
|
433
|
+
klines = await client.get_klines(pair=pair, timeframe=self.timeframe, limit=5)
|
|
434
|
+
self.store.insert_klines(pair, klines)
|
|
435
|
+
except Exception as e:
|
|
436
|
+
logger.error(f"Error syncing {pair}: {e}")
|
|
437
|
+
|
|
438
|
+
async with BinanceClient() as client:
|
|
439
|
+
while True:
|
|
440
|
+
await asyncio.gather(*[fetch_and_store(client, pair) for pair in pairs])
|
|
441
|
+
logger.debug(f"Synced {len(pairs)} pairs")
|
|
442
|
+
await asyncio.sleep(update_interval_sec)
|