onesecondtrader 0.38.0__py3-none-any.whl → 0.40.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,26 +1,647 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import json
3
4
  import pathlib
4
5
  import sqlite3
6
+ import tempfile
7
+ import zipfile
5
8
 
9
+ import databento
10
+ from tqdm import tqdm
6
11
 
7
- def init_secmaster(db_path: pathlib.Path) -> None:
8
- """Initialize a new secmaster database at the specified path.
9
12
 
10
- Creates the database file with the schema defined in schema.sql (publishers, instruments,
11
- and ohlcv tables) but does not populate any data.
13
+ BATCH_SIZE = 10000
14
+
15
+
16
+ def create_secmaster_db(db_path: pathlib.Path) -> pathlib.Path:
17
+ """
18
+ Initialize a new secmaster database at the specified path.
19
+
20
+ Creates the database file with the schema defined in ./schema.sql but does not
21
+ populate any data.
12
22
 
13
23
  Args:
14
24
  db_path: Path where the database file will be created.
15
25
 
26
+ Returns:
27
+ The path where the database was created.
28
+
16
29
  Raises:
17
30
  FileExistsError: If a database already exists at the path.
18
31
  """
32
+
19
33
  if db_path.exists():
20
34
  raise FileExistsError(f"Database already exists: {db_path}")
21
35
  db_path.parent.mkdir(parents=True, exist_ok=True)
36
+
37
+ connection = sqlite3.connect(str(db_path))
38
+
22
39
  schema_path = pathlib.Path(__file__).parent / "schema.sql"
23
- conn = sqlite3.connect(str(db_path))
24
- conn.executescript(schema_path.read_text())
25
- conn.commit()
26
- conn.close()
40
+ connection.executescript(schema_path.read_text())
41
+
42
+ connection.commit()
43
+ connection.close()
44
+ return db_path
45
+
46
+
47
+ def ingest_symbology(json_path: pathlib.Path, db_path: pathlib.Path) -> int:
48
+ """
49
+ Ingest symbology mappings from a Databento symbology.json file into the database.
50
+
51
+ Parses the symbology.json file which maps ticker symbols to instrument IDs with
52
+ date ranges, and inserts the mappings into the symbology table.
53
+
54
+ Args:
55
+ json_path: Path to the symbology.json file.
56
+ db_path: Path to the secmaster SQLite database.
57
+
58
+ Returns:
59
+ The number of symbology records inserted.
60
+
61
+ Raises:
62
+ FileNotFoundError: If the JSON file does not exist.
63
+ sqlite3.Error: If a database error occurs during ingestion.
64
+ """
65
+ connection = sqlite3.connect(str(db_path))
66
+ _enable_bulk_loading(connection)
67
+ try:
68
+ count = _ingest_symbology_with_connection(json_path, connection)
69
+ connection.commit()
70
+ finally:
71
+ _disable_bulk_loading(connection)
72
+ connection.close()
73
+ return count
74
+
75
+
76
+ def _ingest_symbology_with_connection(
77
+ json_path: pathlib.Path, connection: sqlite3.Connection
78
+ ) -> int:
79
+ """
80
+ Ingest symbology using an existing connection. Uses batch inserts for performance.
81
+ """
82
+ with open(json_path, "r") as f:
83
+ data = json.load(f)
84
+
85
+ result = data.get("result", {})
86
+ cursor = connection.cursor()
87
+
88
+ batch = []
89
+ count = 0
90
+ for symbol, mappings in result.items():
91
+ for mapping in mappings:
92
+ batch.append((symbol, int(mapping["s"]), mapping["d0"], mapping["d1"]))
93
+ count += 1
94
+ if len(batch) >= BATCH_SIZE:
95
+ cursor.executemany(
96
+ "INSERT OR REPLACE INTO symbology "
97
+ "(symbol, instrument_id, start_date, end_date) "
98
+ "VALUES (?, ?, ?, ?)",
99
+ batch,
100
+ )
101
+ batch.clear()
102
+
103
+ if batch:
104
+ cursor.executemany(
105
+ "INSERT OR REPLACE INTO symbology "
106
+ "(symbol, instrument_id, start_date, end_date) "
107
+ "VALUES (?, ?, ?, ?)",
108
+ batch,
109
+ )
110
+
111
+ return count
112
+
113
+
114
+ def _enable_bulk_loading(connection: sqlite3.Connection) -> None:
115
+ """
116
+ Configure SQLite for fast bulk loading.
117
+
118
+ Disables safety features that slow down bulk inserts. The tradeoff is that if the
119
+ process crashes or power fails during import, the database may be corrupted and
120
+ need to be recreated. This is acceptable for bulk imports where the source data
121
+ is preserved and the import can be re-run.
122
+
123
+ Settings:
124
+ - synchronous=OFF: Don't wait for disk writes to complete
125
+ - journal_mode=OFF: Disable rollback journal (no crash recovery)
126
+ - cache_size=-64000: Use 64MB of memory for cache (negative = KB)
127
+ """
128
+ connection.execute("PRAGMA synchronous = OFF")
129
+ connection.execute("PRAGMA journal_mode = OFF")
130
+ connection.execute("PRAGMA cache_size = -64000")
131
+
132
+
133
+ def _disable_bulk_loading(connection: sqlite3.Connection) -> None:
134
+ """
135
+ Restore SQLite to safe default settings after bulk loading.
136
+ """
137
+ connection.execute("PRAGMA synchronous = FULL")
138
+ connection.execute("PRAGMA journal_mode = DELETE")
139
+ connection.execute("PRAGMA cache_size = -2000")
140
+
141
+
142
+ def ingest_dbzip(zip_path: pathlib.Path, db_path: pathlib.Path) -> tuple[int, int]:
143
+ """
144
+ Ingest market data from a Databento zip archive into the secmaster database.
145
+
146
+ Extracts all `.dbn.zst` files from the zip and ingests each one. Also ingests
147
+ symbology.json if present in the archive.
148
+
149
+ Uses optimized SQLite settings for fast bulk loading. These settings disable
150
+ crash recovery, so if the import is interrupted, the database may need to be
151
+ recreated. The source zip file is not modified.
152
+
153
+ Args:
154
+ zip_path: Path to the zip archive containing DBN files.
155
+ db_path: Path to the secmaster SQLite database.
156
+
157
+ Returns:
158
+ A tuple of (dbn_record_count, symbology_record_count).
159
+ """
160
+ dbn_count = 0
161
+ symbology_count = 0
162
+
163
+ connection = sqlite3.connect(str(db_path))
164
+ _enable_bulk_loading(connection)
165
+
166
+ try:
167
+ with zipfile.ZipFile(zip_path, "r") as zf:
168
+ namelist = zf.namelist()
169
+ dbn_files = [
170
+ n for n in namelist if n.endswith(".dbn.zst") or n.endswith(".dbn")
171
+ ]
172
+ with tempfile.TemporaryDirectory() as tmpdir:
173
+ for name in tqdm(dbn_files, desc="Ingesting DBN files", unit="file"):
174
+ zf.extract(name, tmpdir)
175
+ extracted_path = pathlib.Path(tmpdir) / name
176
+ dbn_count += _ingest_dbn_with_connection(extracted_path, connection)
177
+
178
+ if "symbology.json" in namelist:
179
+ zf.extract("symbology.json", tmpdir)
180
+ symbology_path = pathlib.Path(tmpdir) / "symbology.json"
181
+ symbology_count = _ingest_symbology_with_connection(
182
+ symbology_path, connection
183
+ )
184
+
185
+ connection.commit()
186
+ finally:
187
+ _disable_bulk_loading(connection)
188
+ connection.close()
189
+
190
+ return dbn_count, symbology_count
191
+
192
+
193
+ def ingest_dbn(dbn_path: pathlib.Path, db_path: pathlib.Path) -> int:
194
+ """
195
+ Ingest market data from a Databento Binary Encoding (DBN) file into the secmaster database.
196
+
197
+ Reads records from the DBN file and inserts them into the appropriate tables based on
198
+ their record type (rtype). Supports both uncompressed `.dbn` files and zstd-compressed
199
+ `.dbn.zst` files.
200
+
201
+ Args:
202
+ dbn_path: Path to the DBN file to ingest.
203
+ db_path: Path to the secmaster SQLite database.
204
+
205
+ Returns:
206
+ The number of records successfully ingested.
207
+
208
+ Raises:
209
+ FileNotFoundError: If the DBN file does not exist.
210
+ sqlite3.Error: If a database error occurs during ingestion.
211
+ """
212
+ connection = sqlite3.connect(str(db_path))
213
+ _enable_bulk_loading(connection)
214
+ try:
215
+ count = _ingest_dbn_with_connection(dbn_path, connection)
216
+ connection.commit()
217
+ finally:
218
+ _disable_bulk_loading(connection)
219
+ connection.close()
220
+ return count
221
+
222
+
223
+ def _ingest_dbn_with_connection(
224
+ dbn_path: pathlib.Path, connection: sqlite3.Connection
225
+ ) -> int:
226
+ """
227
+ Ingest DBN file using an existing connection. Uses batch inserts for performance.
228
+ """
229
+ store = databento.DBNStore.from_file(dbn_path)
230
+ cursor = connection.cursor()
231
+
232
+ batches: dict[str, list[tuple]] = {
233
+ "ohlcv": [],
234
+ "trades": [],
235
+ "quotes": [],
236
+ "bbo": [],
237
+ "mbo": [],
238
+ "mbp10": [],
239
+ "imbalance": [],
240
+ "statistics": [],
241
+ "status": [],
242
+ "instruments": [],
243
+ }
244
+
245
+ count = 0
246
+ for record in store:
247
+ match record:
248
+ case databento.OHLCVMsg():
249
+ batches["ohlcv"].append(_ohlcv_to_tuple(record))
250
+ case databento.TradeMsg():
251
+ batches["trades"].append(_trade_to_tuple(record))
252
+ case databento.MBP1Msg():
253
+ batches["quotes"].append(_quote_to_tuple(record))
254
+ case databento.BBOMsg():
255
+ batches["bbo"].append(_bbo_to_tuple(record))
256
+ case databento.MBOMsg():
257
+ batches["mbo"].append(_mbo_to_tuple(record))
258
+ case databento.MBP10Msg():
259
+ batches["mbp10"].append(_mbp10_to_tuple(record))
260
+ case databento.ImbalanceMsg():
261
+ batches["imbalance"].append(_imbalance_to_tuple(record))
262
+ case databento.StatMsg():
263
+ batches["statistics"].append(_statistics_to_tuple(record))
264
+ case databento.StatusMsg():
265
+ batches["status"].append(_status_to_tuple(record))
266
+ case databento.InstrumentDefMsg():
267
+ batches["instruments"].append(_instrument_to_tuple(record))
268
+ count += 1
269
+
270
+ if count % BATCH_SIZE == 0:
271
+ _flush_batches(cursor, batches)
272
+
273
+ _flush_batches(cursor, batches)
274
+ return count
275
+
276
+
277
+ def _flush_batches(cursor: sqlite3.Cursor, batches: dict) -> None:
278
+ """Flush all non-empty batches to the database using executemany."""
279
+ if batches["ohlcv"]:
280
+ cursor.executemany(
281
+ "INSERT OR REPLACE INTO ohlcv "
282
+ "(instrument_id, rtype, ts_event, open, high, low, close, volume) "
283
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
284
+ batches["ohlcv"],
285
+ )
286
+ batches["ohlcv"].clear()
287
+
288
+ if batches["trades"]:
289
+ cursor.executemany(
290
+ "INSERT OR REPLACE INTO trades "
291
+ "(instrument_id, ts_event, ts_recv, price, size, action, side, flags, depth, ts_in_delta, sequence) "
292
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
293
+ batches["trades"],
294
+ )
295
+ batches["trades"].clear()
296
+
297
+ if batches["quotes"]:
298
+ cursor.executemany(
299
+ "INSERT OR REPLACE INTO quotes "
300
+ "(instrument_id, ts_event, ts_recv, price, size, action, side, flags, depth, ts_in_delta, sequence, "
301
+ "bid_px, ask_px, bid_sz, ask_sz, bid_ct, ask_ct) "
302
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
303
+ batches["quotes"],
304
+ )
305
+ batches["quotes"].clear()
306
+
307
+ if batches["bbo"]:
308
+ cursor.executemany(
309
+ "INSERT OR REPLACE INTO bbo "
310
+ "(instrument_id, rtype, ts_event, ts_recv, price, size, side, flags, sequence, "
311
+ "bid_px, ask_px, bid_sz, ask_sz, bid_ct, ask_ct) "
312
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
313
+ batches["bbo"],
314
+ )
315
+ batches["bbo"].clear()
316
+
317
+ if batches["mbo"]:
318
+ cursor.executemany(
319
+ "INSERT OR REPLACE INTO mbo "
320
+ "(instrument_id, ts_event, ts_recv, order_id, price, size, flags, channel_id, action, side, ts_in_delta, sequence) "
321
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
322
+ batches["mbo"],
323
+ )
324
+ batches["mbo"].clear()
325
+
326
+ if batches["mbp10"]:
327
+ cursor.executemany(
328
+ "INSERT OR REPLACE INTO mbp10 "
329
+ "(instrument_id, ts_event, ts_recv, price, size, action, side, flags, depth, ts_in_delta, sequence, "
330
+ "bid_px_00, bid_px_01, bid_px_02, bid_px_03, bid_px_04, bid_px_05, bid_px_06, bid_px_07, bid_px_08, bid_px_09, "
331
+ "ask_px_00, ask_px_01, ask_px_02, ask_px_03, ask_px_04, ask_px_05, ask_px_06, ask_px_07, ask_px_08, ask_px_09, "
332
+ "bid_sz_00, bid_sz_01, bid_sz_02, bid_sz_03, bid_sz_04, bid_sz_05, bid_sz_06, bid_sz_07, bid_sz_08, bid_sz_09, "
333
+ "ask_sz_00, ask_sz_01, ask_sz_02, ask_sz_03, ask_sz_04, ask_sz_05, ask_sz_06, ask_sz_07, ask_sz_08, ask_sz_09, "
334
+ "bid_ct_00, bid_ct_01, bid_ct_02, bid_ct_03, bid_ct_04, bid_ct_05, bid_ct_06, bid_ct_07, bid_ct_08, bid_ct_09, "
335
+ "ask_ct_00, ask_ct_01, ask_ct_02, ask_ct_03, ask_ct_04, ask_ct_05, ask_ct_06, ask_ct_07, ask_ct_08, ask_ct_09) "
336
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
337
+ "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
338
+ "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
339
+ "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
340
+ "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
341
+ "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
342
+ "?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
343
+ batches["mbp10"],
344
+ )
345
+ batches["mbp10"].clear()
346
+
347
+ if batches["imbalance"]:
348
+ cursor.executemany(
349
+ "INSERT OR REPLACE INTO imbalance "
350
+ "(instrument_id, ts_event, ts_recv, ref_price, auction_time, cont_book_clr_price, auct_interest_clr_price, "
351
+ "ssr_filling_price, ind_match_price, upper_collar, lower_collar, paired_qty, total_imbalance_qty, "
352
+ "market_imbalance_qty, unpaired_qty, auction_type, side, auction_status, freeze_status, num_extensions, "
353
+ "unpaired_side, significant_imbalance) "
354
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
355
+ batches["imbalance"],
356
+ )
357
+ batches["imbalance"].clear()
358
+
359
+ if batches["statistics"]:
360
+ cursor.executemany(
361
+ "INSERT OR REPLACE INTO statistics "
362
+ "(instrument_id, ts_event, ts_recv, ts_ref, price, quantity, sequence, ts_in_delta, stat_type, channel_id, "
363
+ "update_action, stat_flags) "
364
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
365
+ batches["statistics"],
366
+ )
367
+ batches["statistics"].clear()
368
+
369
+ if batches["status"]:
370
+ cursor.executemany(
371
+ "INSERT OR REPLACE INTO status "
372
+ "(instrument_id, ts_event, ts_recv, action, reason, trading_event, is_trading, is_quoting, is_short_sell_restricted) "
373
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
374
+ batches["status"],
375
+ )
376
+ batches["status"].clear()
377
+
378
+ if batches["instruments"]:
379
+ cursor.executemany(
380
+ "INSERT OR REPLACE INTO instruments "
381
+ "(publisher_id, raw_instrument_id, raw_symbol, instrument_class, security_type, asset, cfi, exchange, currency, "
382
+ "strike_price, strike_price_currency, expiration, activation, maturity_year, maturity_month, maturity_day, "
383
+ "contract_multiplier, unit_of_measure, unit_of_measure_qty, underlying, display_factor, high_limit_price, "
384
+ "low_limit_price, min_price_increment, security_group, ts_recv) "
385
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
386
+ batches["instruments"],
387
+ )
388
+ batches["instruments"].clear()
389
+
390
+
391
+ def _ohlcv_to_tuple(record: databento.OHLCVMsg) -> tuple:
392
+ return (
393
+ record.instrument_id,
394
+ record.rtype.value,
395
+ record.ts_event,
396
+ record.open,
397
+ record.high,
398
+ record.low,
399
+ record.close,
400
+ record.volume,
401
+ )
402
+
403
+
404
+ def _trade_to_tuple(record: databento.TradeMsg) -> tuple:
405
+ return (
406
+ record.instrument_id,
407
+ record.ts_event,
408
+ record.ts_recv,
409
+ record.price,
410
+ record.size,
411
+ str(record.action),
412
+ str(record.side),
413
+ record.flags,
414
+ record.depth,
415
+ record.ts_in_delta,
416
+ record.sequence,
417
+ )
418
+
419
+
420
+ def _quote_to_tuple(record: databento.MBP1Msg) -> tuple:
421
+ return (
422
+ record.instrument_id,
423
+ record.ts_event,
424
+ record.ts_recv,
425
+ record.price,
426
+ record.size,
427
+ str(record.action),
428
+ str(record.side),
429
+ record.flags,
430
+ record.depth,
431
+ record.ts_in_delta,
432
+ record.sequence,
433
+ record.levels[0].bid_px,
434
+ record.levels[0].ask_px,
435
+ record.levels[0].bid_sz,
436
+ record.levels[0].ask_sz,
437
+ record.levels[0].bid_ct,
438
+ record.levels[0].ask_ct,
439
+ )
440
+
441
+
442
+ def _bbo_to_tuple(record: databento.BBOMsg) -> tuple:
443
+ return (
444
+ record.instrument_id,
445
+ record.rtype.value,
446
+ record.ts_event,
447
+ record.ts_recv,
448
+ record.price,
449
+ record.size,
450
+ str(record.side),
451
+ record.flags,
452
+ record.sequence,
453
+ record.levels[0].bid_px,
454
+ record.levels[0].ask_px,
455
+ record.levels[0].bid_sz,
456
+ record.levels[0].ask_sz,
457
+ record.levels[0].bid_ct,
458
+ record.levels[0].ask_ct,
459
+ )
460
+
461
+
462
+ def _mbo_to_tuple(record: databento.MBOMsg) -> tuple:
463
+ return (
464
+ record.instrument_id,
465
+ record.ts_event,
466
+ record.ts_recv,
467
+ record.order_id,
468
+ record.price,
469
+ record.size,
470
+ record.flags,
471
+ record.channel_id,
472
+ str(record.action),
473
+ str(record.side),
474
+ record.ts_in_delta,
475
+ record.sequence,
476
+ )
477
+
478
+
479
+ def _mbp10_to_tuple(record: databento.MBP10Msg) -> tuple:
480
+ levels = record.levels
481
+ return (
482
+ record.instrument_id,
483
+ record.ts_event,
484
+ record.ts_recv,
485
+ record.price,
486
+ record.size,
487
+ str(record.action),
488
+ str(record.side),
489
+ record.flags,
490
+ record.depth,
491
+ record.ts_in_delta,
492
+ record.sequence,
493
+ levels[0].bid_px,
494
+ levels[1].bid_px,
495
+ levels[2].bid_px,
496
+ levels[3].bid_px,
497
+ levels[4].bid_px,
498
+ levels[5].bid_px,
499
+ levels[6].bid_px,
500
+ levels[7].bid_px,
501
+ levels[8].bid_px,
502
+ levels[9].bid_px,
503
+ levels[0].ask_px,
504
+ levels[1].ask_px,
505
+ levels[2].ask_px,
506
+ levels[3].ask_px,
507
+ levels[4].ask_px,
508
+ levels[5].ask_px,
509
+ levels[6].ask_px,
510
+ levels[7].ask_px,
511
+ levels[8].ask_px,
512
+ levels[9].ask_px,
513
+ levels[0].bid_sz,
514
+ levels[1].bid_sz,
515
+ levels[2].bid_sz,
516
+ levels[3].bid_sz,
517
+ levels[4].bid_sz,
518
+ levels[5].bid_sz,
519
+ levels[6].bid_sz,
520
+ levels[7].bid_sz,
521
+ levels[8].bid_sz,
522
+ levels[9].bid_sz,
523
+ levels[0].ask_sz,
524
+ levels[1].ask_sz,
525
+ levels[2].ask_sz,
526
+ levels[3].ask_sz,
527
+ levels[4].ask_sz,
528
+ levels[5].ask_sz,
529
+ levels[6].ask_sz,
530
+ levels[7].ask_sz,
531
+ levels[8].ask_sz,
532
+ levels[9].ask_sz,
533
+ levels[0].bid_ct,
534
+ levels[1].bid_ct,
535
+ levels[2].bid_ct,
536
+ levels[3].bid_ct,
537
+ levels[4].bid_ct,
538
+ levels[5].bid_ct,
539
+ levels[6].bid_ct,
540
+ levels[7].bid_ct,
541
+ levels[8].bid_ct,
542
+ levels[9].bid_ct,
543
+ levels[0].ask_ct,
544
+ levels[1].ask_ct,
545
+ levels[2].ask_ct,
546
+ levels[3].ask_ct,
547
+ levels[4].ask_ct,
548
+ levels[5].ask_ct,
549
+ levels[6].ask_ct,
550
+ levels[7].ask_ct,
551
+ levels[8].ask_ct,
552
+ levels[9].ask_ct,
553
+ )
554
+
555
+
556
+ def _imbalance_to_tuple(record: databento.ImbalanceMsg) -> tuple:
557
+ return (
558
+ record.instrument_id,
559
+ record.ts_event,
560
+ record.ts_recv,
561
+ record.ref_price,
562
+ record.auction_time,
563
+ record.cont_book_clr_price,
564
+ record.auct_interest_clr_price,
565
+ record.ssr_filling_price,
566
+ record.ind_match_price,
567
+ record.upper_collar,
568
+ record.lower_collar,
569
+ record.paired_qty,
570
+ record.total_imbalance_qty,
571
+ record.market_imbalance_qty,
572
+ record.unpaired_qty,
573
+ str(record.auction_type),
574
+ str(record.side),
575
+ record.auction_status,
576
+ record.freeze_status,
577
+ record.num_extensions,
578
+ str(record.unpaired_side),
579
+ str(record.significant_imbalance),
580
+ )
581
+
582
+
583
+ def _statistics_to_tuple(record: databento.StatMsg) -> tuple:
584
+ stat_type = record.stat_type
585
+ update_action = record.update_action
586
+ return (
587
+ record.instrument_id,
588
+ record.ts_event,
589
+ record.ts_recv,
590
+ record.ts_ref,
591
+ record.price,
592
+ record.quantity,
593
+ record.sequence,
594
+ record.ts_in_delta,
595
+ stat_type.value if hasattr(stat_type, "value") else stat_type,
596
+ record.channel_id,
597
+ update_action.value if hasattr(update_action, "value") else update_action,
598
+ record.stat_flags,
599
+ )
600
+
601
+
602
+ def _status_to_tuple(record: databento.StatusMsg) -> tuple:
603
+ action = record.action
604
+ reason = record.reason
605
+ trading_event = record.trading_event
606
+ return (
607
+ record.instrument_id,
608
+ record.ts_event,
609
+ record.ts_recv,
610
+ action.value if hasattr(action, "value") else action,
611
+ reason.value if hasattr(reason, "value") else reason,
612
+ trading_event.value if hasattr(trading_event, "value") else trading_event,
613
+ str(record.is_trading),
614
+ str(record.is_quoting),
615
+ str(record.is_short_sell_restricted),
616
+ )
617
+
618
+
619
+ def _instrument_to_tuple(record: databento.InstrumentDefMsg) -> tuple:
620
+ return (
621
+ record.publisher_id,
622
+ record.instrument_id,
623
+ record.raw_symbol,
624
+ str(record.instrument_class),
625
+ record.security_type,
626
+ record.asset,
627
+ record.cfi,
628
+ record.exchange,
629
+ record.currency,
630
+ record.strike_price,
631
+ record.strike_price_currency,
632
+ record.expiration,
633
+ record.activation,
634
+ record.maturity_year,
635
+ record.maturity_month,
636
+ record.maturity_day,
637
+ record.contract_multiplier,
638
+ record.unit_of_measure,
639
+ record.unit_of_measure_qty,
640
+ record.underlying,
641
+ record.display_factor,
642
+ record.high_limit_price,
643
+ record.low_limit_price,
644
+ record.min_price_increment,
645
+ record.group,
646
+ record.ts_recv,
647
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onesecondtrader
3
- Version: 0.38.0
3
+ Version: 0.40.0
4
4
  Summary: The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place.
5
5
  License-File: LICENSE
6
6
  Author: Nils P. Kujath
@@ -11,9 +11,13 @@ Classifier: Programming Language :: Python :: 3.11
11
11
  Classifier: Programming Language :: Python :: 3.12
12
12
  Classifier: Programming Language :: Python :: 3.13
13
13
  Classifier: Programming Language :: Python :: 3.14
14
+ Requires-Dist: databento (>=0.69.0,<0.70.0)
15
+ Requires-Dist: ib-async (>=2.1.0,<3.0.0)
14
16
  Requires-Dist: matplotlib (>=3.10.7,<4.0.0)
17
+ Requires-Dist: mplfinance (>=0.12.10b0,<0.13.0)
15
18
  Requires-Dist: pandas (>=2.3.1,<3.0.0)
16
19
  Requires-Dist: python-dotenv (>=1.0.0,<2.0.0)
20
+ Requires-Dist: tqdm (>=4.67.1,<5.0.0)
17
21
  Description-Content-Type: text/markdown
18
22
 
19
23
  # OneSecondTrader