oxarchive 0.4.4__tar.gz → 0.5.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {oxarchive-0.4.4 → oxarchive-0.5.2}/PKG-INFO +110 -7
- {oxarchive-0.4.4 → oxarchive-0.5.2}/README.md +109 -6
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/__init__.py +5 -1
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/exchanges.py +11 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/resources/__init__.py +4 -0
- oxarchive-0.5.2/oxarchive/resources/candles.py +121 -0
- oxarchive-0.5.2/oxarchive/resources/liquidations.py +198 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/types.py +119 -2
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/websocket.py +42 -3
- {oxarchive-0.4.4 → oxarchive-0.5.2}/pyproject.toml +1 -1
- {oxarchive-0.4.4 → oxarchive-0.5.2}/.gitignore +0 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/client.py +0 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/http.py +0 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/resources/funding.py +0 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/resources/instruments.py +0 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/resources/openinterest.py +0 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/resources/orderbook.py +0 -0
- {oxarchive-0.4.4 → oxarchive-0.5.2}/oxarchive/resources/trades.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: oxarchive
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.5.2
|
|
4
4
|
Summary: Official Python SDK for 0xarchive - Hyperliquid Historical Data API
|
|
5
5
|
Project-URL: Homepage, https://0xarchive.io
|
|
6
6
|
Project-URL: Documentation, https://0xarchive.io/docs/sdks
|
|
@@ -317,6 +317,57 @@ current = await client.hyperliquid.open_interest.acurrent("BTC")
|
|
|
317
317
|
history = await client.hyperliquid.open_interest.ahistory("ETH", start=..., end=...)
|
|
318
318
|
```
|
|
319
319
|
|
|
320
|
+
### Candles (OHLCV)
|
|
321
|
+
|
|
322
|
+
Get historical OHLCV candle data aggregated from trades.
|
|
323
|
+
|
|
324
|
+
```python
|
|
325
|
+
# Get candle history (start is required)
|
|
326
|
+
candles = client.hyperliquid.candles.history(
|
|
327
|
+
"BTC",
|
|
328
|
+
start="2024-01-01",
|
|
329
|
+
end="2024-01-02",
|
|
330
|
+
interval="1h", # 1m, 5m, 15m, 30m, 1h, 4h, 1d, 1w
|
|
331
|
+
limit=100
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
# Iterate through candles
|
|
335
|
+
for candle in candles.data:
|
|
336
|
+
print(f"{candle.timestamp}: O={candle.open} H={candle.high} L={candle.low} C={candle.close} V={candle.volume}")
|
|
337
|
+
|
|
338
|
+
# Cursor-based pagination for large datasets
|
|
339
|
+
result = client.hyperliquid.candles.history("BTC", start=..., end=..., interval="1m", limit=1000)
|
|
340
|
+
while result.next_cursor:
|
|
341
|
+
result = client.hyperliquid.candles.history(
|
|
342
|
+
"BTC", start=..., end=..., interval="1m",
|
|
343
|
+
cursor=result.next_cursor, limit=1000
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
# Lighter.xyz candles
|
|
347
|
+
lighter_candles = client.lighter.candles.history(
|
|
348
|
+
"BTC",
|
|
349
|
+
start="2024-01-01",
|
|
350
|
+
end="2024-01-02",
|
|
351
|
+
interval="15m"
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
# Async versions
|
|
355
|
+
candles = await client.hyperliquid.candles.ahistory("BTC", start=..., end=..., interval="1h")
|
|
356
|
+
```
|
|
357
|
+
|
|
358
|
+
#### Available Intervals
|
|
359
|
+
|
|
360
|
+
| Interval | Description |
|
|
361
|
+
|----------|-------------|
|
|
362
|
+
| `1m` | 1 minute |
|
|
363
|
+
| `5m` | 5 minutes |
|
|
364
|
+
| `15m` | 15 minutes |
|
|
365
|
+
| `30m` | 30 minutes |
|
|
366
|
+
| `1h` | 1 hour (default) |
|
|
367
|
+
| `4h` | 4 hours |
|
|
368
|
+
| `1d` | 1 day |
|
|
369
|
+
| `1w` | 1 week |
|
|
370
|
+
|
|
320
371
|
### Legacy API (Deprecated)
|
|
321
372
|
|
|
322
373
|
The following legacy methods are deprecated and will be removed in v2.0. They default to Hyperliquid data:
|
|
@@ -418,6 +469,19 @@ async def main():
|
|
|
418
469
|
speed=10 # Optional, defaults to 1x
|
|
419
470
|
)
|
|
420
471
|
|
|
472
|
+
# Lighter.xyz replay with granularity (tier restrictions apply)
|
|
473
|
+
await ws.replay(
|
|
474
|
+
"orderbook", "BTC",
|
|
475
|
+
start=int(time.time() * 1000) - 86400000,
|
|
476
|
+
speed=10,
|
|
477
|
+
granularity="10s" # Options: 'checkpoint', '30s', '10s', '1s', 'tick'
|
|
478
|
+
)
|
|
479
|
+
|
|
480
|
+
# Handle tick-level data (granularity='tick', Enterprise tier)
|
|
481
|
+
ws.on_historical_tick_data(lambda coin, checkpoint, deltas:
|
|
482
|
+
print(f"Checkpoint: {len(checkpoint['bids'])} bids, Deltas: {len(deltas)}")
|
|
483
|
+
)
|
|
484
|
+
|
|
421
485
|
# Control playback
|
|
422
486
|
await ws.replay_pause()
|
|
423
487
|
await ws.replay_resume()
|
|
@@ -463,6 +527,14 @@ async def main():
|
|
|
463
527
|
batch_size=1000 # Optional, defaults to 1000
|
|
464
528
|
)
|
|
465
529
|
|
|
530
|
+
# Lighter.xyz stream with granularity (tier restrictions apply)
|
|
531
|
+
await ws.stream(
|
|
532
|
+
"orderbook", "BTC",
|
|
533
|
+
start=int(time.time() * 1000) - 3600000,
|
|
534
|
+
end=int(time.time() * 1000),
|
|
535
|
+
granularity="10s" # Options: 'checkpoint', '30s', '10s', '1s', 'tick'
|
|
536
|
+
)
|
|
537
|
+
|
|
466
538
|
# Stop if needed
|
|
467
539
|
await ws.stream_stop()
|
|
468
540
|
|
|
@@ -484,12 +556,43 @@ ws = OxArchiveWs(WsOptions(
|
|
|
484
556
|
|
|
485
557
|
### Available Channels
|
|
486
558
|
|
|
487
|
-
| Channel | Description | Requires Coin |
|
|
488
|
-
|
|
489
|
-
| `orderbook` | L2 order book updates | Yes |
|
|
490
|
-
| `trades` | Trade/fill updates | Yes |
|
|
491
|
-
| `
|
|
492
|
-
| `
|
|
559
|
+
| Channel | Description | Requires Coin | Historical Support |
|
|
560
|
+
|---------|-------------|---------------|-------------------|
|
|
561
|
+
| `orderbook` | L2 order book updates | Yes | Yes |
|
|
562
|
+
| `trades` | Trade/fill updates | Yes | Yes |
|
|
563
|
+
| `candles` | OHLCV candle data | Yes | Yes (replay/stream only) |
|
|
564
|
+
| `ticker` | Price and 24h volume | Yes | Real-time only |
|
|
565
|
+
| `all_tickers` | All market tickers | No | Real-time only |
|
|
566
|
+
|
|
567
|
+
#### Candle Replay/Stream
|
|
568
|
+
|
|
569
|
+
```python
|
|
570
|
+
# Replay candles at 10x speed
|
|
571
|
+
await ws.replay(
|
|
572
|
+
"candles", "BTC",
|
|
573
|
+
start=int(time.time() * 1000) - 86400000,
|
|
574
|
+
end=int(time.time() * 1000),
|
|
575
|
+
speed=10,
|
|
576
|
+
interval="15m" # 1m, 5m, 15m, 30m, 1h, 4h, 1d, 1w
|
|
577
|
+
)
|
|
578
|
+
|
|
579
|
+
# Bulk stream candles
|
|
580
|
+
await ws.stream(
|
|
581
|
+
"candles", "ETH",
|
|
582
|
+
start=int(time.time() * 1000) - 3600000,
|
|
583
|
+
end=int(time.time() * 1000),
|
|
584
|
+
batch_size=1000,
|
|
585
|
+
interval="1h"
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
# Lighter.xyz candles
|
|
589
|
+
await ws.replay(
|
|
590
|
+
"lighter_candles", "BTC",
|
|
591
|
+
start=...,
|
|
592
|
+
speed=10,
|
|
593
|
+
interval="5m"
|
|
594
|
+
)
|
|
595
|
+
```
|
|
493
596
|
|
|
494
597
|
## Timestamp Formats
|
|
495
598
|
|
|
@@ -280,6 +280,57 @@ current = await client.hyperliquid.open_interest.acurrent("BTC")
|
|
|
280
280
|
history = await client.hyperliquid.open_interest.ahistory("ETH", start=..., end=...)
|
|
281
281
|
```
|
|
282
282
|
|
|
283
|
+
### Candles (OHLCV)
|
|
284
|
+
|
|
285
|
+
Get historical OHLCV candle data aggregated from trades.
|
|
286
|
+
|
|
287
|
+
```python
|
|
288
|
+
# Get candle history (start is required)
|
|
289
|
+
candles = client.hyperliquid.candles.history(
|
|
290
|
+
"BTC",
|
|
291
|
+
start="2024-01-01",
|
|
292
|
+
end="2024-01-02",
|
|
293
|
+
interval="1h", # 1m, 5m, 15m, 30m, 1h, 4h, 1d, 1w
|
|
294
|
+
limit=100
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
# Iterate through candles
|
|
298
|
+
for candle in candles.data:
|
|
299
|
+
print(f"{candle.timestamp}: O={candle.open} H={candle.high} L={candle.low} C={candle.close} V={candle.volume}")
|
|
300
|
+
|
|
301
|
+
# Cursor-based pagination for large datasets
|
|
302
|
+
result = client.hyperliquid.candles.history("BTC", start=..., end=..., interval="1m", limit=1000)
|
|
303
|
+
while result.next_cursor:
|
|
304
|
+
result = client.hyperliquid.candles.history(
|
|
305
|
+
"BTC", start=..., end=..., interval="1m",
|
|
306
|
+
cursor=result.next_cursor, limit=1000
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
# Lighter.xyz candles
|
|
310
|
+
lighter_candles = client.lighter.candles.history(
|
|
311
|
+
"BTC",
|
|
312
|
+
start="2024-01-01",
|
|
313
|
+
end="2024-01-02",
|
|
314
|
+
interval="15m"
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
# Async versions
|
|
318
|
+
candles = await client.hyperliquid.candles.ahistory("BTC", start=..., end=..., interval="1h")
|
|
319
|
+
```
|
|
320
|
+
|
|
321
|
+
#### Available Intervals
|
|
322
|
+
|
|
323
|
+
| Interval | Description |
|
|
324
|
+
|----------|-------------|
|
|
325
|
+
| `1m` | 1 minute |
|
|
326
|
+
| `5m` | 5 minutes |
|
|
327
|
+
| `15m` | 15 minutes |
|
|
328
|
+
| `30m` | 30 minutes |
|
|
329
|
+
| `1h` | 1 hour (default) |
|
|
330
|
+
| `4h` | 4 hours |
|
|
331
|
+
| `1d` | 1 day |
|
|
332
|
+
| `1w` | 1 week |
|
|
333
|
+
|
|
283
334
|
### Legacy API (Deprecated)
|
|
284
335
|
|
|
285
336
|
The following legacy methods are deprecated and will be removed in v2.0. They default to Hyperliquid data:
|
|
@@ -381,6 +432,19 @@ async def main():
|
|
|
381
432
|
speed=10 # Optional, defaults to 1x
|
|
382
433
|
)
|
|
383
434
|
|
|
435
|
+
# Lighter.xyz replay with granularity (tier restrictions apply)
|
|
436
|
+
await ws.replay(
|
|
437
|
+
"orderbook", "BTC",
|
|
438
|
+
start=int(time.time() * 1000) - 86400000,
|
|
439
|
+
speed=10,
|
|
440
|
+
granularity="10s" # Options: 'checkpoint', '30s', '10s', '1s', 'tick'
|
|
441
|
+
)
|
|
442
|
+
|
|
443
|
+
# Handle tick-level data (granularity='tick', Enterprise tier)
|
|
444
|
+
ws.on_historical_tick_data(lambda coin, checkpoint, deltas:
|
|
445
|
+
print(f"Checkpoint: {len(checkpoint['bids'])} bids, Deltas: {len(deltas)}")
|
|
446
|
+
)
|
|
447
|
+
|
|
384
448
|
# Control playback
|
|
385
449
|
await ws.replay_pause()
|
|
386
450
|
await ws.replay_resume()
|
|
@@ -426,6 +490,14 @@ async def main():
|
|
|
426
490
|
batch_size=1000 # Optional, defaults to 1000
|
|
427
491
|
)
|
|
428
492
|
|
|
493
|
+
# Lighter.xyz stream with granularity (tier restrictions apply)
|
|
494
|
+
await ws.stream(
|
|
495
|
+
"orderbook", "BTC",
|
|
496
|
+
start=int(time.time() * 1000) - 3600000,
|
|
497
|
+
end=int(time.time() * 1000),
|
|
498
|
+
granularity="10s" # Options: 'checkpoint', '30s', '10s', '1s', 'tick'
|
|
499
|
+
)
|
|
500
|
+
|
|
429
501
|
# Stop if needed
|
|
430
502
|
await ws.stream_stop()
|
|
431
503
|
|
|
@@ -447,12 +519,43 @@ ws = OxArchiveWs(WsOptions(
|
|
|
447
519
|
|
|
448
520
|
### Available Channels
|
|
449
521
|
|
|
450
|
-
| Channel | Description | Requires Coin |
|
|
451
|
-
|
|
452
|
-
| `orderbook` | L2 order book updates | Yes |
|
|
453
|
-
| `trades` | Trade/fill updates | Yes |
|
|
454
|
-
| `
|
|
455
|
-
| `
|
|
522
|
+
| Channel | Description | Requires Coin | Historical Support |
|
|
523
|
+
|---------|-------------|---------------|-------------------|
|
|
524
|
+
| `orderbook` | L2 order book updates | Yes | Yes |
|
|
525
|
+
| `trades` | Trade/fill updates | Yes | Yes |
|
|
526
|
+
| `candles` | OHLCV candle data | Yes | Yes (replay/stream only) |
|
|
527
|
+
| `ticker` | Price and 24h volume | Yes | Real-time only |
|
|
528
|
+
| `all_tickers` | All market tickers | No | Real-time only |
|
|
529
|
+
|
|
530
|
+
#### Candle Replay/Stream
|
|
531
|
+
|
|
532
|
+
```python
|
|
533
|
+
# Replay candles at 10x speed
|
|
534
|
+
await ws.replay(
|
|
535
|
+
"candles", "BTC",
|
|
536
|
+
start=int(time.time() * 1000) - 86400000,
|
|
537
|
+
end=int(time.time() * 1000),
|
|
538
|
+
speed=10,
|
|
539
|
+
interval="15m" # 1m, 5m, 15m, 30m, 1h, 4h, 1d, 1w
|
|
540
|
+
)
|
|
541
|
+
|
|
542
|
+
# Bulk stream candles
|
|
543
|
+
await ws.stream(
|
|
544
|
+
"candles", "ETH",
|
|
545
|
+
start=int(time.time() * 1000) - 3600000,
|
|
546
|
+
end=int(time.time() * 1000),
|
|
547
|
+
batch_size=1000,
|
|
548
|
+
interval="1h"
|
|
549
|
+
)
|
|
550
|
+
|
|
551
|
+
# Lighter.xyz candles
|
|
552
|
+
await ws.replay(
|
|
553
|
+
"lighter_candles", "BTC",
|
|
554
|
+
start=...,
|
|
555
|
+
speed=10,
|
|
556
|
+
interval="5m"
|
|
557
|
+
)
|
|
558
|
+
```
|
|
456
559
|
|
|
457
560
|
## Timestamp Formats
|
|
458
561
|
|
|
@@ -31,6 +31,8 @@ from .types import (
|
|
|
31
31
|
LighterInstrument,
|
|
32
32
|
FundingRate,
|
|
33
33
|
OpenInterest,
|
|
34
|
+
Candle,
|
|
35
|
+
CandleInterval,
|
|
34
36
|
OxArchiveError,
|
|
35
37
|
# WebSocket types
|
|
36
38
|
WsChannel,
|
|
@@ -65,7 +67,7 @@ except ImportError:
|
|
|
65
67
|
OxArchiveWs = None # type: ignore
|
|
66
68
|
WsOptions = None # type: ignore
|
|
67
69
|
|
|
68
|
-
__version__ = "0.
|
|
70
|
+
__version__ = "0.5.2"
|
|
69
71
|
|
|
70
72
|
__all__ = [
|
|
71
73
|
# Client
|
|
@@ -84,6 +86,8 @@ __all__ = [
|
|
|
84
86
|
"LighterGranularity",
|
|
85
87
|
"FundingRate",
|
|
86
88
|
"OpenInterest",
|
|
89
|
+
"Candle",
|
|
90
|
+
"CandleInterval",
|
|
87
91
|
"OxArchiveError",
|
|
88
92
|
# WebSocket Types
|
|
89
93
|
"WsChannel",
|
|
@@ -10,6 +10,8 @@ from .resources import (
|
|
|
10
10
|
LighterInstrumentsResource,
|
|
11
11
|
FundingResource,
|
|
12
12
|
OpenInterestResource,
|
|
13
|
+
CandlesResource,
|
|
14
|
+
LiquidationsResource,
|
|
13
15
|
)
|
|
14
16
|
|
|
15
17
|
|
|
@@ -44,6 +46,12 @@ class HyperliquidClient:
|
|
|
44
46
|
self.open_interest = OpenInterestResource(http, base_path)
|
|
45
47
|
"""Open interest"""
|
|
46
48
|
|
|
49
|
+
self.candles = CandlesResource(http, base_path)
|
|
50
|
+
"""OHLCV candle data"""
|
|
51
|
+
|
|
52
|
+
self.liquidations = LiquidationsResource(http, base_path)
|
|
53
|
+
"""Liquidation events (May 2025+)"""
|
|
54
|
+
|
|
47
55
|
|
|
48
56
|
class LighterClient:
|
|
49
57
|
"""
|
|
@@ -77,3 +85,6 @@ class LighterClient:
|
|
|
77
85
|
|
|
78
86
|
self.open_interest = OpenInterestResource(http, base_path)
|
|
79
87
|
"""Open interest"""
|
|
88
|
+
|
|
89
|
+
self.candles = CandlesResource(http, base_path)
|
|
90
|
+
"""OHLCV candle data"""
|
|
@@ -5,6 +5,8 @@ from .trades import TradesResource
|
|
|
5
5
|
from .instruments import InstrumentsResource, LighterInstrumentsResource
|
|
6
6
|
from .funding import FundingResource
|
|
7
7
|
from .openinterest import OpenInterestResource
|
|
8
|
+
from .candles import CandlesResource
|
|
9
|
+
from .liquidations import LiquidationsResource
|
|
8
10
|
|
|
9
11
|
__all__ = [
|
|
10
12
|
"OrderBookResource",
|
|
@@ -13,4 +15,6 @@ __all__ = [
|
|
|
13
15
|
"LighterInstrumentsResource",
|
|
14
16
|
"FundingResource",
|
|
15
17
|
"OpenInterestResource",
|
|
18
|
+
"CandlesResource",
|
|
19
|
+
"LiquidationsResource",
|
|
16
20
|
]
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
"""Candles (OHLCV) API resource."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from ..http import HttpClient
|
|
9
|
+
from ..types import Candle, CandleInterval, CursorResponse, Timestamp
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CandlesResource:
|
|
13
|
+
"""
|
|
14
|
+
Candles (OHLCV) API resource.
|
|
15
|
+
|
|
16
|
+
Example:
|
|
17
|
+
>>> # Get candle history
|
|
18
|
+
>>> result = client.candles.history("BTC", start=start, end=end, interval="1h")
|
|
19
|
+
>>> for candle in result.data:
|
|
20
|
+
... print(f"{candle.timestamp}: O={candle.open} H={candle.high} L={candle.low} C={candle.close}")
|
|
21
|
+
>>>
|
|
22
|
+
>>> # Paginate through large datasets
|
|
23
|
+
>>> all_candles = result.data
|
|
24
|
+
>>> while result.next_cursor:
|
|
25
|
+
... result = client.candles.history("BTC", start=start, end=end, cursor=result.next_cursor)
|
|
26
|
+
... all_candles.extend(result.data)
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, http: HttpClient, base_path: str = "/v1"):
|
|
30
|
+
self._http = http
|
|
31
|
+
self._base_path = base_path
|
|
32
|
+
|
|
33
|
+
def _convert_timestamp(self, ts: Optional[Timestamp]) -> Optional[int]:
|
|
34
|
+
"""Convert timestamp to Unix milliseconds."""
|
|
35
|
+
if ts is None:
|
|
36
|
+
return None
|
|
37
|
+
if isinstance(ts, int):
|
|
38
|
+
return ts
|
|
39
|
+
if isinstance(ts, datetime):
|
|
40
|
+
return int(ts.timestamp() * 1000)
|
|
41
|
+
if isinstance(ts, str):
|
|
42
|
+
try:
|
|
43
|
+
dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
44
|
+
return int(dt.timestamp() * 1000)
|
|
45
|
+
except ValueError:
|
|
46
|
+
return int(ts)
|
|
47
|
+
return None
|
|
48
|
+
|
|
49
|
+
def history(
|
|
50
|
+
self,
|
|
51
|
+
coin: str,
|
|
52
|
+
*,
|
|
53
|
+
start: Timestamp,
|
|
54
|
+
end: Timestamp,
|
|
55
|
+
interval: Optional[CandleInterval] = None,
|
|
56
|
+
cursor: Optional[Timestamp] = None,
|
|
57
|
+
limit: Optional[int] = None,
|
|
58
|
+
) -> CursorResponse[list[Candle]]:
|
|
59
|
+
"""
|
|
60
|
+
Get historical OHLCV candle data with cursor-based pagination.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
coin: The coin symbol (e.g., 'BTC', 'ETH')
|
|
64
|
+
start: Start timestamp (required)
|
|
65
|
+
end: End timestamp (required)
|
|
66
|
+
interval: Candle interval (1m, 5m, 15m, 30m, 1h, 4h, 1d, 1w). Default: 1h
|
|
67
|
+
cursor: Cursor from previous response's next_cursor
|
|
68
|
+
limit: Maximum number of results (default: 100, max: 1000)
|
|
69
|
+
|
|
70
|
+
Returns:
|
|
71
|
+
CursorResponse with candle records and next_cursor for pagination
|
|
72
|
+
|
|
73
|
+
Example:
|
|
74
|
+
>>> result = client.candles.history("BTC", start=start, end=end, interval="1h", limit=1000)
|
|
75
|
+
>>> candles = result.data
|
|
76
|
+
>>> while result.next_cursor:
|
|
77
|
+
... result = client.candles.history(
|
|
78
|
+
... "BTC", start=start, end=end, interval="1h", cursor=result.next_cursor, limit=1000
|
|
79
|
+
... )
|
|
80
|
+
... candles.extend(result.data)
|
|
81
|
+
"""
|
|
82
|
+
data = self._http.get(
|
|
83
|
+
f"{self._base_path}/candles/{coin.upper()}",
|
|
84
|
+
params={
|
|
85
|
+
"start": self._convert_timestamp(start),
|
|
86
|
+
"end": self._convert_timestamp(end),
|
|
87
|
+
"interval": interval,
|
|
88
|
+
"cursor": self._convert_timestamp(cursor),
|
|
89
|
+
"limit": limit,
|
|
90
|
+
},
|
|
91
|
+
)
|
|
92
|
+
return CursorResponse(
|
|
93
|
+
data=[Candle.model_validate(item) for item in data["data"]],
|
|
94
|
+
next_cursor=data.get("meta", {}).get("next_cursor"),
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
async def ahistory(
|
|
98
|
+
self,
|
|
99
|
+
coin: str,
|
|
100
|
+
*,
|
|
101
|
+
start: Timestamp,
|
|
102
|
+
end: Timestamp,
|
|
103
|
+
interval: Optional[CandleInterval] = None,
|
|
104
|
+
cursor: Optional[Timestamp] = None,
|
|
105
|
+
limit: Optional[int] = None,
|
|
106
|
+
) -> CursorResponse[list[Candle]]:
|
|
107
|
+
"""Async version of history(). start and end are required."""
|
|
108
|
+
data = await self._http.aget(
|
|
109
|
+
f"{self._base_path}/candles/{coin.upper()}",
|
|
110
|
+
params={
|
|
111
|
+
"start": self._convert_timestamp(start),
|
|
112
|
+
"end": self._convert_timestamp(end),
|
|
113
|
+
"interval": interval,
|
|
114
|
+
"cursor": self._convert_timestamp(cursor),
|
|
115
|
+
"limit": limit,
|
|
116
|
+
},
|
|
117
|
+
)
|
|
118
|
+
return CursorResponse(
|
|
119
|
+
data=[Candle.model_validate(item) for item in data["data"]],
|
|
120
|
+
next_cursor=data.get("meta", {}).get("next_cursor"),
|
|
121
|
+
)
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
"""Liquidations API resource."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from ..http import HttpClient
|
|
9
|
+
from ..types import CursorResponse, Liquidation, Timestamp
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class LiquidationsResource:
|
|
13
|
+
"""
|
|
14
|
+
Liquidations API resource.
|
|
15
|
+
|
|
16
|
+
Retrieve historical liquidation events from Hyperliquid.
|
|
17
|
+
|
|
18
|
+
Note: Liquidation data is available from May 25, 2025 onwards.
|
|
19
|
+
|
|
20
|
+
Example:
|
|
21
|
+
>>> # Get recent liquidations
|
|
22
|
+
>>> liquidations = client.hyperliquid.liquidations.history(
|
|
23
|
+
... "BTC",
|
|
24
|
+
... start="2025-06-01",
|
|
25
|
+
... end="2025-06-02"
|
|
26
|
+
... )
|
|
27
|
+
>>>
|
|
28
|
+
>>> # Get liquidations for a specific user
|
|
29
|
+
>>> user_liquidations = client.hyperliquid.liquidations.by_user(
|
|
30
|
+
... "0x1234...",
|
|
31
|
+
... start="2025-06-01",
|
|
32
|
+
... end="2025-06-02"
|
|
33
|
+
... )
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(self, http: HttpClient, base_path: str = "/v1"):
|
|
37
|
+
self._http = http
|
|
38
|
+
self._base_path = base_path
|
|
39
|
+
|
|
40
|
+
def _convert_timestamp(self, ts: Optional[Timestamp]) -> Optional[int]:
|
|
41
|
+
"""Convert timestamp to Unix milliseconds."""
|
|
42
|
+
if ts is None:
|
|
43
|
+
return None
|
|
44
|
+
if isinstance(ts, int):
|
|
45
|
+
return ts
|
|
46
|
+
if isinstance(ts, datetime):
|
|
47
|
+
return int(ts.timestamp() * 1000)
|
|
48
|
+
if isinstance(ts, str):
|
|
49
|
+
try:
|
|
50
|
+
dt = datetime.fromisoformat(ts.replace("Z", "+00:00"))
|
|
51
|
+
return int(dt.timestamp() * 1000)
|
|
52
|
+
except ValueError:
|
|
53
|
+
return int(ts)
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
def history(
|
|
57
|
+
self,
|
|
58
|
+
coin: str,
|
|
59
|
+
*,
|
|
60
|
+
start: Timestamp,
|
|
61
|
+
end: Timestamp,
|
|
62
|
+
cursor: Optional[str] = None,
|
|
63
|
+
limit: Optional[int] = None,
|
|
64
|
+
) -> CursorResponse[list[Liquidation]]:
|
|
65
|
+
"""
|
|
66
|
+
Get liquidation history for a coin with cursor-based pagination.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
coin: The coin symbol (e.g., 'BTC', 'ETH')
|
|
70
|
+
start: Start timestamp (required)
|
|
71
|
+
end: End timestamp (required)
|
|
72
|
+
cursor: Cursor from previous response's next_cursor
|
|
73
|
+
limit: Maximum number of results (default: 100, max: 1000)
|
|
74
|
+
|
|
75
|
+
Returns:
|
|
76
|
+
CursorResponse with liquidation records and next_cursor for pagination
|
|
77
|
+
|
|
78
|
+
Example:
|
|
79
|
+
>>> result = client.hyperliquid.liquidations.history("BTC", start=start, end=end, limit=1000)
|
|
80
|
+
>>> liquidations = result.data
|
|
81
|
+
>>> while result.next_cursor:
|
|
82
|
+
... result = client.hyperliquid.liquidations.history(
|
|
83
|
+
... "BTC", start=start, end=end, cursor=result.next_cursor, limit=1000
|
|
84
|
+
... )
|
|
85
|
+
... liquidations.extend(result.data)
|
|
86
|
+
"""
|
|
87
|
+
data = self._http.get(
|
|
88
|
+
f"{self._base_path}/liquidations/{coin.upper()}",
|
|
89
|
+
params={
|
|
90
|
+
"start": self._convert_timestamp(start),
|
|
91
|
+
"end": self._convert_timestamp(end),
|
|
92
|
+
"cursor": cursor,
|
|
93
|
+
"limit": limit,
|
|
94
|
+
},
|
|
95
|
+
)
|
|
96
|
+
return CursorResponse(
|
|
97
|
+
data=[Liquidation.model_validate(item) for item in data["data"]],
|
|
98
|
+
next_cursor=data.get("meta", {}).get("next_cursor"),
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
async def ahistory(
|
|
102
|
+
self,
|
|
103
|
+
coin: str,
|
|
104
|
+
*,
|
|
105
|
+
start: Timestamp,
|
|
106
|
+
end: Timestamp,
|
|
107
|
+
cursor: Optional[str] = None,
|
|
108
|
+
limit: Optional[int] = None,
|
|
109
|
+
) -> CursorResponse[list[Liquidation]]:
|
|
110
|
+
"""Async version of history(). start and end are required."""
|
|
111
|
+
data = await self._http.aget(
|
|
112
|
+
f"{self._base_path}/liquidations/{coin.upper()}",
|
|
113
|
+
params={
|
|
114
|
+
"start": self._convert_timestamp(start),
|
|
115
|
+
"end": self._convert_timestamp(end),
|
|
116
|
+
"cursor": cursor,
|
|
117
|
+
"limit": limit,
|
|
118
|
+
},
|
|
119
|
+
)
|
|
120
|
+
return CursorResponse(
|
|
121
|
+
data=[Liquidation.model_validate(item) for item in data["data"]],
|
|
122
|
+
next_cursor=data.get("meta", {}).get("next_cursor"),
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
def by_user(
|
|
126
|
+
self,
|
|
127
|
+
user_address: str,
|
|
128
|
+
*,
|
|
129
|
+
start: Timestamp,
|
|
130
|
+
end: Timestamp,
|
|
131
|
+
coin: Optional[str] = None,
|
|
132
|
+
cursor: Optional[str] = None,
|
|
133
|
+
limit: Optional[int] = None,
|
|
134
|
+
) -> CursorResponse[list[Liquidation]]:
|
|
135
|
+
"""
|
|
136
|
+
Get liquidation history for a specific user.
|
|
137
|
+
|
|
138
|
+
This returns liquidations where the user was either:
|
|
139
|
+
- The liquidated party (their position was liquidated)
|
|
140
|
+
- The liquidator (they executed the liquidation)
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
user_address: User's wallet address (e.g., '0x1234...')
|
|
144
|
+
start: Start timestamp (required)
|
|
145
|
+
end: End timestamp (required)
|
|
146
|
+
coin: Optional coin filter (e.g., 'BTC', 'ETH')
|
|
147
|
+
cursor: Cursor from previous response's next_cursor
|
|
148
|
+
limit: Maximum number of results (default: 100, max: 1000)
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
CursorResponse with liquidation records and next_cursor for pagination
|
|
152
|
+
"""
|
|
153
|
+
params = {
|
|
154
|
+
"start": self._convert_timestamp(start),
|
|
155
|
+
"end": self._convert_timestamp(end),
|
|
156
|
+
"cursor": cursor,
|
|
157
|
+
"limit": limit,
|
|
158
|
+
}
|
|
159
|
+
if coin:
|
|
160
|
+
params["coin"] = coin.upper()
|
|
161
|
+
|
|
162
|
+
data = self._http.get(
|
|
163
|
+
f"{self._base_path}/liquidations/user/{user_address}",
|
|
164
|
+
params=params,
|
|
165
|
+
)
|
|
166
|
+
return CursorResponse(
|
|
167
|
+
data=[Liquidation.model_validate(item) for item in data["data"]],
|
|
168
|
+
next_cursor=data.get("meta", {}).get("next_cursor"),
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
async def aby_user(
|
|
172
|
+
self,
|
|
173
|
+
user_address: str,
|
|
174
|
+
*,
|
|
175
|
+
start: Timestamp,
|
|
176
|
+
end: Timestamp,
|
|
177
|
+
coin: Optional[str] = None,
|
|
178
|
+
cursor: Optional[str] = None,
|
|
179
|
+
limit: Optional[int] = None,
|
|
180
|
+
) -> CursorResponse[list[Liquidation]]:
|
|
181
|
+
"""Async version of by_user()."""
|
|
182
|
+
params = {
|
|
183
|
+
"start": self._convert_timestamp(start),
|
|
184
|
+
"end": self._convert_timestamp(end),
|
|
185
|
+
"cursor": cursor,
|
|
186
|
+
"limit": limit,
|
|
187
|
+
}
|
|
188
|
+
if coin:
|
|
189
|
+
params["coin"] = coin.upper()
|
|
190
|
+
|
|
191
|
+
data = await self._http.aget(
|
|
192
|
+
f"{self._base_path}/liquidations/user/{user_address}",
|
|
193
|
+
params=params,
|
|
194
|
+
)
|
|
195
|
+
return CursorResponse(
|
|
196
|
+
data=[Liquidation.model_validate(item) for item in data["data"]],
|
|
197
|
+
next_cursor=data.get("meta", {}).get("next_cursor"),
|
|
198
|
+
)
|
|
@@ -270,12 +270,94 @@ class OpenInterest(BaseModel):
|
|
|
270
270
|
"""Impact ask price for liquidations."""
|
|
271
271
|
|
|
272
272
|
|
|
273
|
+
# =============================================================================
|
|
274
|
+
# Liquidation Types
|
|
275
|
+
# =============================================================================
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
class Liquidation(BaseModel):
|
|
279
|
+
"""Liquidation event record."""
|
|
280
|
+
|
|
281
|
+
coin: str
|
|
282
|
+
"""Trading pair symbol."""
|
|
283
|
+
|
|
284
|
+
timestamp: datetime
|
|
285
|
+
"""Liquidation timestamp (UTC)."""
|
|
286
|
+
|
|
287
|
+
liquidated_user: str
|
|
288
|
+
"""Address of the liquidated user."""
|
|
289
|
+
|
|
290
|
+
liquidator_user: str
|
|
291
|
+
"""Address of the liquidator."""
|
|
292
|
+
|
|
293
|
+
price: str
|
|
294
|
+
"""Liquidation execution price."""
|
|
295
|
+
|
|
296
|
+
size: str
|
|
297
|
+
"""Liquidation size."""
|
|
298
|
+
|
|
299
|
+
side: Literal["B", "S"]
|
|
300
|
+
"""Side: 'B' (buy) or 'S' (sell)."""
|
|
301
|
+
|
|
302
|
+
mark_price: Optional[str] = None
|
|
303
|
+
"""Mark price at time of liquidation."""
|
|
304
|
+
|
|
305
|
+
closed_pnl: Optional[str] = None
|
|
306
|
+
"""Realized PnL from the liquidation."""
|
|
307
|
+
|
|
308
|
+
direction: Optional[str] = None
|
|
309
|
+
"""Position direction (e.g., 'Open Long', 'Close Short')."""
|
|
310
|
+
|
|
311
|
+
trade_id: Optional[int] = None
|
|
312
|
+
"""Unique trade ID."""
|
|
313
|
+
|
|
314
|
+
tx_hash: Optional[str] = None
|
|
315
|
+
"""Blockchain transaction hash."""
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
# =============================================================================
|
|
319
|
+
# Candle Types
|
|
320
|
+
# =============================================================================
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
CandleInterval = Literal["1m", "5m", "15m", "30m", "1h", "4h", "1d", "1w"]
|
|
324
|
+
"""Candle interval for OHLCV data."""
|
|
325
|
+
|
|
326
|
+
|
|
327
|
+
class Candle(BaseModel):
|
|
328
|
+
"""OHLCV candle data."""
|
|
329
|
+
|
|
330
|
+
timestamp: datetime
|
|
331
|
+
"""Candle open timestamp (UTC)."""
|
|
332
|
+
|
|
333
|
+
open: float
|
|
334
|
+
"""Opening price."""
|
|
335
|
+
|
|
336
|
+
high: float
|
|
337
|
+
"""Highest price during the interval."""
|
|
338
|
+
|
|
339
|
+
low: float
|
|
340
|
+
"""Lowest price during the interval."""
|
|
341
|
+
|
|
342
|
+
close: float
|
|
343
|
+
"""Closing price."""
|
|
344
|
+
|
|
345
|
+
volume: float
|
|
346
|
+
"""Total volume traded during the interval."""
|
|
347
|
+
|
|
348
|
+
quote_volume: Optional[float] = None
|
|
349
|
+
"""Total quote volume (volume * price)."""
|
|
350
|
+
|
|
351
|
+
trade_count: Optional[int] = None
|
|
352
|
+
"""Number of trades during the interval."""
|
|
353
|
+
|
|
354
|
+
|
|
273
355
|
# =============================================================================
|
|
274
356
|
# WebSocket Types
|
|
275
357
|
# =============================================================================
|
|
276
358
|
|
|
277
|
-
WsChannel = Literal["orderbook", "trades", "ticker", "all_tickers"]
|
|
278
|
-
"""Available WebSocket channels. Note: ticker/all_tickers are real-time only."""
|
|
359
|
+
WsChannel = Literal["orderbook", "trades", "candles", "liquidations", "ticker", "all_tickers"]
|
|
360
|
+
"""Available WebSocket channels. Note: ticker/all_tickers are real-time only. Liquidations is historical only (May 2025+)."""
|
|
279
361
|
|
|
280
362
|
WsConnectionState = Literal["connecting", "connected", "disconnected", "reconnecting"]
|
|
281
363
|
"""WebSocket connection state."""
|
|
@@ -382,6 +464,41 @@ class WsHistoricalData(BaseModel):
|
|
|
382
464
|
data: dict[str, Any]
|
|
383
465
|
|
|
384
466
|
|
|
467
|
+
class OrderbookDelta(BaseModel):
|
|
468
|
+
"""Orderbook delta for tick-level data."""
|
|
469
|
+
|
|
470
|
+
timestamp: int
|
|
471
|
+
"""Timestamp in milliseconds."""
|
|
472
|
+
|
|
473
|
+
side: Literal["bid", "ask"]
|
|
474
|
+
"""Side: 'bid' or 'ask'."""
|
|
475
|
+
|
|
476
|
+
price: float
|
|
477
|
+
"""Price level."""
|
|
478
|
+
|
|
479
|
+
size: float
|
|
480
|
+
"""New size (0 = level removed)."""
|
|
481
|
+
|
|
482
|
+
sequence: int
|
|
483
|
+
"""Sequence number for ordering."""
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
class WsHistoricalTickData(BaseModel):
|
|
487
|
+
"""Historical tick data (granularity='tick' mode) - checkpoint + deltas.
|
|
488
|
+
|
|
489
|
+
This message type is sent when using granularity='tick' for Lighter.xyz
|
|
490
|
+
orderbook data. It provides a full checkpoint followed by incremental deltas.
|
|
491
|
+
"""
|
|
492
|
+
|
|
493
|
+
type: Literal["historical_tick_data"]
|
|
494
|
+
channel: WsChannel
|
|
495
|
+
coin: str
|
|
496
|
+
checkpoint: dict[str, Any]
|
|
497
|
+
"""Initial checkpoint (full orderbook snapshot)."""
|
|
498
|
+
deltas: list[OrderbookDelta]
|
|
499
|
+
"""Incremental deltas to apply after checkpoint."""
|
|
500
|
+
|
|
501
|
+
|
|
385
502
|
# =============================================================================
|
|
386
503
|
# WebSocket Bulk Stream Types (Bulk Download Mode)
|
|
387
504
|
# =============================================================================
|
|
@@ -42,6 +42,7 @@ except ImportError:
|
|
|
42
42
|
|
|
43
43
|
from .types import (
|
|
44
44
|
OrderBook,
|
|
45
|
+
OrderbookDelta,
|
|
45
46
|
PriceLevel,
|
|
46
47
|
Trade,
|
|
47
48
|
WsChannel,
|
|
@@ -57,6 +58,7 @@ from .types import (
|
|
|
57
58
|
WsReplayCompleted,
|
|
58
59
|
WsReplayStopped,
|
|
59
60
|
WsHistoricalData,
|
|
61
|
+
WsHistoricalTickData,
|
|
60
62
|
WsStreamStarted,
|
|
61
63
|
WsStreamProgress,
|
|
62
64
|
WsHistoricalBatch,
|
|
@@ -110,6 +112,7 @@ ErrorHandler = Callable[[Exception], None]
|
|
|
110
112
|
|
|
111
113
|
# Replay handlers
|
|
112
114
|
HistoricalDataHandler = Callable[[str, int, dict], None]
|
|
115
|
+
HistoricalTickDataHandler = Callable[[str, dict, list[OrderbookDelta]], None] # coin, checkpoint, deltas
|
|
113
116
|
ReplayStartHandler = Callable[[WsChannel, str, int, int, float], None] # channel, coin, start, end, speed
|
|
114
117
|
ReplayCompleteHandler = Callable[[WsChannel, str, int], None] # channel, coin, snapshots_sent
|
|
115
118
|
|
|
@@ -276,6 +279,7 @@ class OxArchiveWs:
|
|
|
276
279
|
|
|
277
280
|
# Replay handlers (Option B)
|
|
278
281
|
self._on_historical_data: Optional[HistoricalDataHandler] = None
|
|
282
|
+
self._on_historical_tick_data: Optional[HistoricalTickDataHandler] = None
|
|
279
283
|
self._on_replay_start: Optional[ReplayStartHandler] = None
|
|
280
284
|
self._on_replay_complete: Optional[ReplayCompleteHandler] = None
|
|
281
285
|
|
|
@@ -307,7 +311,9 @@ class OxArchiveWs:
|
|
|
307
311
|
url = f"{self.options.ws_url}?apiKey={self.options.api_key}"
|
|
308
312
|
|
|
309
313
|
try:
|
|
310
|
-
|
|
314
|
+
# Increase max_size to 50MB for large Lighter orderbook data with high granularity
|
|
315
|
+
# Lighter tick data with full depth (~3700 levels) can exceed 14MB per message
|
|
316
|
+
self._ws = await ws_connect(url, max_size=50 * 1024 * 1024)
|
|
311
317
|
self._reconnect_attempts = 0
|
|
312
318
|
self._set_state("connected")
|
|
313
319
|
|
|
@@ -427,6 +433,8 @@ class OxArchiveWs:
|
|
|
427
433
|
start: int,
|
|
428
434
|
end: Optional[int] = None,
|
|
429
435
|
speed: float = 1.0,
|
|
436
|
+
granularity: Optional[str] = None,
|
|
437
|
+
interval: Optional[str] = None,
|
|
430
438
|
) -> None:
|
|
431
439
|
"""Start historical replay with timing preserved.
|
|
432
440
|
|
|
@@ -436,9 +444,12 @@ class OxArchiveWs:
|
|
|
436
444
|
start: Start timestamp (Unix ms)
|
|
437
445
|
end: End timestamp (Unix ms, defaults to now)
|
|
438
446
|
speed: Playback speed multiplier (1 = real-time, 10 = 10x faster)
|
|
447
|
+
granularity: Data resolution for Lighter orderbook ('checkpoint', '30s', '10s', '1s', 'tick')
|
|
448
|
+
interval: Candle interval for candles channel ('1m', '5m', '15m', '30m', '1h', '4h', '1d', '1w')
|
|
439
449
|
|
|
440
450
|
Example:
|
|
441
451
|
>>> await ws.replay("orderbook", "BTC", start=time.time()*1000 - 86400000, speed=10)
|
|
452
|
+
>>> await ws.replay("candles", "BTC", start=..., speed=10, interval="15m")
|
|
442
453
|
"""
|
|
443
454
|
msg = {
|
|
444
455
|
"op": "replay",
|
|
@@ -449,6 +460,10 @@ class OxArchiveWs:
|
|
|
449
460
|
}
|
|
450
461
|
if end is not None:
|
|
451
462
|
msg["end"] = end
|
|
463
|
+
if granularity is not None:
|
|
464
|
+
msg["granularity"] = granularity
|
|
465
|
+
if interval is not None:
|
|
466
|
+
msg["interval"] = interval
|
|
452
467
|
await self._send(msg)
|
|
453
468
|
|
|
454
469
|
async def replay_pause(self) -> None:
|
|
@@ -482,6 +497,8 @@ class OxArchiveWs:
|
|
|
482
497
|
start: int,
|
|
483
498
|
end: int,
|
|
484
499
|
batch_size: int = 1000,
|
|
500
|
+
granularity: Optional[str] = None,
|
|
501
|
+
interval: Optional[str] = None,
|
|
485
502
|
) -> None:
|
|
486
503
|
"""Start bulk streaming for fast data download.
|
|
487
504
|
|
|
@@ -491,18 +508,26 @@ class OxArchiveWs:
|
|
|
491
508
|
start: Start timestamp (Unix ms)
|
|
492
509
|
end: End timestamp (Unix ms)
|
|
493
510
|
batch_size: Records per batch message
|
|
511
|
+
granularity: Data resolution for Lighter orderbook ('checkpoint', '30s', '10s', '1s', 'tick')
|
|
512
|
+
interval: Candle interval for candles channel ('1m', '5m', '15m', '30m', '1h', '4h', '1d', '1w')
|
|
494
513
|
|
|
495
514
|
Example:
|
|
496
515
|
>>> await ws.stream("orderbook", "ETH", start=..., end=..., batch_size=1000)
|
|
516
|
+
>>> await ws.stream("candles", "BTC", start=..., end=..., interval="1h")
|
|
497
517
|
"""
|
|
498
|
-
|
|
518
|
+
msg = {
|
|
499
519
|
"op": "stream",
|
|
500
520
|
"channel": channel,
|
|
501
521
|
"coin": coin,
|
|
502
522
|
"start": start,
|
|
503
523
|
"end": end,
|
|
504
524
|
"batch_size": batch_size,
|
|
505
|
-
}
|
|
525
|
+
}
|
|
526
|
+
if granularity is not None:
|
|
527
|
+
msg["granularity"] = granularity
|
|
528
|
+
if interval is not None:
|
|
529
|
+
msg["interval"] = interval
|
|
530
|
+
await self._send(msg)
|
|
506
531
|
|
|
507
532
|
async def stream_stop(self) -> None:
|
|
508
533
|
"""Stop the current bulk stream."""
|
|
@@ -547,6 +572,16 @@ class OxArchiveWs:
|
|
|
547
572
|
"""
|
|
548
573
|
self._on_historical_data = handler
|
|
549
574
|
|
|
575
|
+
def on_historical_tick_data(self, handler: HistoricalTickDataHandler) -> None:
|
|
576
|
+
"""Set handler for historical tick data (granularity='tick' mode).
|
|
577
|
+
|
|
578
|
+
This is for tick-level granularity on Lighter.xyz orderbook data.
|
|
579
|
+
Receives a checkpoint (full orderbook) followed by incremental deltas.
|
|
580
|
+
|
|
581
|
+
Handler receives: (coin, checkpoint, deltas)
|
|
582
|
+
"""
|
|
583
|
+
self._on_historical_tick_data = handler
|
|
584
|
+
|
|
550
585
|
def on_replay_start(self, handler: ReplayStartHandler) -> None:
|
|
551
586
|
"""Set handler for replay started event.
|
|
552
587
|
|
|
@@ -722,6 +757,10 @@ class OxArchiveWs:
|
|
|
722
757
|
elif msg_type == "historical_data" and self._on_historical_data:
|
|
723
758
|
self._on_historical_data(data["coin"], data["timestamp"], data["data"])
|
|
724
759
|
|
|
760
|
+
elif msg_type == "historical_tick_data" and self._on_historical_tick_data:
|
|
761
|
+
msg = WsHistoricalTickData(**data)
|
|
762
|
+
self._on_historical_tick_data(msg.coin, msg.checkpoint, msg.deltas)
|
|
763
|
+
|
|
725
764
|
elif msg_type == "replay_completed" and self._on_replay_complete:
|
|
726
765
|
self._on_replay_complete(data["channel"], data["coin"], data["snapshots_sent"])
|
|
727
766
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|