onesecondtrader 0.41.0__py3-none-any.whl → 0.44.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. onesecondtrader/__init__.py +0 -58
  2. onesecondtrader/models/__init__.py +11 -0
  3. onesecondtrader/models/bar_fields.py +23 -0
  4. onesecondtrader/models/bar_period.py +21 -0
  5. onesecondtrader/models/order_types.py +21 -0
  6. onesecondtrader/models/trade_sides.py +20 -0
  7. {onesecondtrader-0.41.0.dist-info → onesecondtrader-0.44.0.dist-info}/METADATA +2 -2
  8. onesecondtrader-0.44.0.dist-info/RECORD +10 -0
  9. onesecondtrader/connectors/__init__.py +0 -3
  10. onesecondtrader/connectors/brokers/__init__.py +0 -4
  11. onesecondtrader/connectors/brokers/ib.py +0 -418
  12. onesecondtrader/connectors/brokers/simulated.py +0 -349
  13. onesecondtrader/connectors/datafeeds/__init__.py +0 -4
  14. onesecondtrader/connectors/datafeeds/ib.py +0 -286
  15. onesecondtrader/connectors/datafeeds/simulated.py +0 -167
  16. onesecondtrader/connectors/gateways/__init__.py +0 -3
  17. onesecondtrader/connectors/gateways/ib.py +0 -314
  18. onesecondtrader/core/__init__.py +0 -7
  19. onesecondtrader/core/brokers/__init__.py +0 -3
  20. onesecondtrader/core/brokers/base.py +0 -46
  21. onesecondtrader/core/datafeeds/__init__.py +0 -3
  22. onesecondtrader/core/datafeeds/base.py +0 -32
  23. onesecondtrader/core/events/__init__.py +0 -33
  24. onesecondtrader/core/events/bases.py +0 -29
  25. onesecondtrader/core/events/market.py +0 -22
  26. onesecondtrader/core/events/requests.py +0 -31
  27. onesecondtrader/core/events/responses.py +0 -54
  28. onesecondtrader/core/indicators/__init__.py +0 -13
  29. onesecondtrader/core/indicators/averages.py +0 -56
  30. onesecondtrader/core/indicators/bar.py +0 -47
  31. onesecondtrader/core/indicators/base.py +0 -60
  32. onesecondtrader/core/messaging/__init__.py +0 -7
  33. onesecondtrader/core/messaging/eventbus.py +0 -47
  34. onesecondtrader/core/messaging/subscriber.py +0 -69
  35. onesecondtrader/core/models/__init__.py +0 -14
  36. onesecondtrader/core/models/data.py +0 -18
  37. onesecondtrader/core/models/orders.py +0 -15
  38. onesecondtrader/core/models/params.py +0 -21
  39. onesecondtrader/core/models/records.py +0 -32
  40. onesecondtrader/core/strategies/__init__.py +0 -7
  41. onesecondtrader/core/strategies/base.py +0 -324
  42. onesecondtrader/core/strategies/examples.py +0 -43
  43. onesecondtrader/dashboard/__init__.py +0 -3
  44. onesecondtrader/dashboard/app.py +0 -1677
  45. onesecondtrader/dashboard/registry.py +0 -100
  46. onesecondtrader/orchestrator/__init__.py +0 -7
  47. onesecondtrader/orchestrator/orchestrator.py +0 -105
  48. onesecondtrader/orchestrator/recorder.py +0 -196
  49. onesecondtrader/orchestrator/schema.sql +0 -208
  50. onesecondtrader/secmaster/__init__.py +0 -6
  51. onesecondtrader/secmaster/schema.sql +0 -740
  52. onesecondtrader/secmaster/utils.py +0 -737
  53. onesecondtrader-0.41.0.dist-info/RECORD +0 -49
  54. {onesecondtrader-0.41.0.dist-info → onesecondtrader-0.44.0.dist-info}/WHEEL +0 -0
  55. {onesecondtrader-0.41.0.dist-info → onesecondtrader-0.44.0.dist-info}/licenses/LICENSE +0 -0
@@ -1,737 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import json
4
- import pathlib
5
- import sqlite3
6
- import tempfile
7
- import zipfile
8
-
9
- import databento
10
- from tqdm import tqdm
11
-
12
-
13
- BATCH_SIZE = 10000
14
-
15
-
16
- def create_secmaster_db(db_path: pathlib.Path) -> pathlib.Path:
17
- """
18
- Initialize a new secmaster database at the specified path.
19
-
20
- Creates the database file with the schema defined in ./schema.sql but does not
21
- populate any data.
22
-
23
- Args:
24
- db_path: Path where the database file will be created.
25
-
26
- Returns:
27
- The path where the database was created.
28
-
29
- Raises:
30
- FileExistsError: If a database already exists at the path.
31
- """
32
-
33
- if db_path.exists():
34
- raise FileExistsError(f"Database already exists: {db_path}")
35
- db_path.parent.mkdir(parents=True, exist_ok=True)
36
-
37
- connection = sqlite3.connect(str(db_path))
38
-
39
- schema_path = pathlib.Path(__file__).parent / "schema.sql"
40
- connection.executescript(schema_path.read_text())
41
-
42
- connection.commit()
43
- connection.close()
44
- return db_path
45
-
46
-
47
- def ingest_symbology(json_path: pathlib.Path, db_path: pathlib.Path) -> int:
48
- """
49
- Ingest symbology mappings from a Databento symbology.json file into the database.
50
-
51
- Parses the symbology.json file which maps ticker symbols to instrument IDs with
52
- date ranges, and inserts the mappings into the symbology table.
53
-
54
- Args:
55
- json_path: Path to the symbology.json file.
56
- db_path: Path to the secmaster SQLite database.
57
-
58
- Returns:
59
- The number of symbology records inserted.
60
-
61
- Raises:
62
- FileNotFoundError: If the JSON file does not exist.
63
- sqlite3.Error: If a database error occurs during ingestion.
64
- """
65
- connection = sqlite3.connect(str(db_path))
66
- _enable_bulk_loading(connection)
67
- try:
68
- count = _ingest_symbology_with_connection(json_path, connection)
69
- connection.commit()
70
- finally:
71
- _disable_bulk_loading(connection)
72
- connection.close()
73
- return count
74
-
75
-
76
- def _ingest_symbology_with_connection(
77
- json_path: pathlib.Path, connection: sqlite3.Connection
78
- ) -> int:
79
- """
80
- Ingest symbology using an existing connection. Uses batch inserts for performance.
81
- """
82
- with open(json_path, "r") as f:
83
- data = json.load(f)
84
-
85
- result = data.get("result", {})
86
- cursor = connection.cursor()
87
-
88
- batch = []
89
- count = 0
90
- for symbol, mappings in result.items():
91
- for mapping in mappings:
92
- batch.append((symbol, int(mapping["s"]), mapping["d0"], mapping["d1"]))
93
- count += 1
94
- if len(batch) >= BATCH_SIZE:
95
- cursor.executemany(
96
- "INSERT OR REPLACE INTO symbology "
97
- "(symbol, instrument_id, start_date, end_date) "
98
- "VALUES (?, ?, ?, ?)",
99
- batch,
100
- )
101
- batch.clear()
102
-
103
- if batch:
104
- cursor.executemany(
105
- "INSERT OR REPLACE INTO symbology "
106
- "(symbol, instrument_id, start_date, end_date) "
107
- "VALUES (?, ?, ?, ?)",
108
- batch,
109
- )
110
-
111
- return count
112
-
113
-
114
- def _enable_bulk_loading(connection: sqlite3.Connection) -> None:
115
- """
116
- Configure SQLite for fast bulk loading.
117
-
118
- Disables safety features that slow down bulk inserts. The tradeoff is that if the
119
- process crashes or power fails during import, the database may be corrupted and
120
- need to be recreated. This is acceptable for bulk imports where the source data
121
- is preserved and the import can be re-run.
122
-
123
- Settings:
124
- - synchronous=OFF: Don't wait for disk writes to complete
125
- - journal_mode=OFF: Disable rollback journal (no crash recovery)
126
- - cache_size=-64000: Use 64MB of memory for cache (negative = KB)
127
- """
128
- connection.execute("PRAGMA synchronous = OFF")
129
- connection.execute("PRAGMA journal_mode = OFF")
130
- connection.execute("PRAGMA cache_size = -64000")
131
-
132
-
133
- def _disable_bulk_loading(connection: sqlite3.Connection) -> None:
134
- """
135
- Restore SQLite to safe default settings after bulk loading.
136
- """
137
- connection.execute("PRAGMA synchronous = FULL")
138
- connection.execute("PRAGMA journal_mode = DELETE")
139
- connection.execute("PRAGMA cache_size = -2000")
140
-
141
-
142
- def ingest_dbzip(zip_path: pathlib.Path, db_path: pathlib.Path) -> tuple[int, int]:
143
- """
144
- Ingest market data from a Databento zip archive into the secmaster database.
145
-
146
- Extracts all `.dbn.zst` files from the zip and ingests each one. Also ingests
147
- symbology.json if present in the archive.
148
-
149
- Uses optimized SQLite settings for fast bulk loading. These settings disable
150
- crash recovery, so if the import is interrupted, the database may need to be
151
- recreated. The source zip file is not modified.
152
-
153
- Args:
154
- zip_path: Path to the zip archive containing DBN files.
155
- db_path: Path to the secmaster SQLite database.
156
-
157
- Returns:
158
- A tuple of (dbn_record_count, symbology_record_count).
159
- """
160
- dbn_count = 0
161
- symbology_count = 0
162
-
163
- connection = sqlite3.connect(str(db_path))
164
- _enable_bulk_loading(connection)
165
-
166
- try:
167
- with zipfile.ZipFile(zip_path, "r") as zf:
168
- namelist = zf.namelist()
169
- dbn_files = [
170
- n for n in namelist if n.endswith(".dbn.zst") or n.endswith(".dbn")
171
- ]
172
- with tempfile.TemporaryDirectory() as tmpdir:
173
- for name in tqdm(dbn_files, desc="Ingesting DBN files", unit="file"):
174
- zf.extract(name, tmpdir)
175
- extracted_path = pathlib.Path(tmpdir) / name
176
- dbn_count += _ingest_dbn_with_connection(extracted_path, connection)
177
-
178
- if "symbology.json" in namelist:
179
- zf.extract("symbology.json", tmpdir)
180
- symbology_path = pathlib.Path(tmpdir) / "symbology.json"
181
- symbology_count = _ingest_symbology_with_connection(
182
- symbology_path, connection
183
- )
184
-
185
- connection.commit()
186
- finally:
187
- _disable_bulk_loading(connection)
188
- connection.close()
189
-
190
- update_meta(db_path)
191
- update_symbol_coverage(db_path)
192
- return dbn_count, symbology_count
193
-
194
-
195
- def ingest_dbn(dbn_path: pathlib.Path, db_path: pathlib.Path) -> int:
196
- """
197
- Ingest market data from a Databento Binary Encoding (DBN) file into the secmaster database.
198
-
199
- Reads records from the DBN file and inserts them into the appropriate tables based on
200
- their record type (rtype). Supports both uncompressed `.dbn` files and zstd-compressed
201
- `.dbn.zst` files.
202
-
203
- Args:
204
- dbn_path: Path to the DBN file to ingest.
205
- db_path: Path to the secmaster SQLite database.
206
-
207
- Returns:
208
- The number of records successfully ingested.
209
-
210
- Raises:
211
- FileNotFoundError: If the DBN file does not exist.
212
- sqlite3.Error: If a database error occurs during ingestion.
213
- """
214
- connection = sqlite3.connect(str(db_path))
215
- _enable_bulk_loading(connection)
216
- try:
217
- count = _ingest_dbn_with_connection(dbn_path, connection)
218
- connection.commit()
219
- finally:
220
- _disable_bulk_loading(connection)
221
- connection.close()
222
- update_meta(db_path)
223
- update_symbol_coverage(db_path)
224
- return count
225
-
226
-
227
- def _ingest_dbn_with_connection(
228
- dbn_path: pathlib.Path, connection: sqlite3.Connection
229
- ) -> int:
230
- """
231
- Ingest DBN file using an existing connection. Uses batch inserts for performance.
232
- """
233
- store = databento.DBNStore.from_file(dbn_path)
234
- cursor = connection.cursor()
235
-
236
- batches: dict[str, list[tuple]] = {
237
- "ohlcv": [],
238
- "trades": [],
239
- "quotes": [],
240
- "bbo": [],
241
- "mbo": [],
242
- "mbp10": [],
243
- "imbalance": [],
244
- "statistics": [],
245
- "status": [],
246
- "instruments": [],
247
- }
248
-
249
- count = 0
250
- for record in store:
251
- match record:
252
- case databento.OHLCVMsg():
253
- batches["ohlcv"].append(_ohlcv_to_tuple(record))
254
- case databento.TradeMsg():
255
- batches["trades"].append(_trade_to_tuple(record))
256
- case databento.MBP1Msg():
257
- batches["quotes"].append(_quote_to_tuple(record))
258
- case databento.BBOMsg():
259
- batches["bbo"].append(_bbo_to_tuple(record))
260
- case databento.MBOMsg():
261
- batches["mbo"].append(_mbo_to_tuple(record))
262
- case databento.MBP10Msg():
263
- batches["mbp10"].append(_mbp10_to_tuple(record))
264
- case databento.ImbalanceMsg():
265
- batches["imbalance"].append(_imbalance_to_tuple(record))
266
- case databento.StatMsg():
267
- batches["statistics"].append(_statistics_to_tuple(record))
268
- case databento.StatusMsg():
269
- batches["status"].append(_status_to_tuple(record))
270
- case databento.InstrumentDefMsg():
271
- batches["instruments"].append(_instrument_to_tuple(record))
272
- count += 1
273
-
274
- if count % BATCH_SIZE == 0:
275
- _flush_batches(cursor, batches)
276
-
277
- _flush_batches(cursor, batches)
278
- return count
279
-
280
-
281
- def _flush_batches(cursor: sqlite3.Cursor, batches: dict) -> None:
282
- """Flush all non-empty batches to the database using executemany."""
283
- if batches["ohlcv"]:
284
- cursor.executemany(
285
- "INSERT OR REPLACE INTO ohlcv "
286
- "(instrument_id, rtype, ts_event, open, high, low, close, volume) "
287
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
288
- batches["ohlcv"],
289
- )
290
- batches["ohlcv"].clear()
291
-
292
- if batches["trades"]:
293
- cursor.executemany(
294
- "INSERT OR REPLACE INTO trades "
295
- "(instrument_id, ts_event, ts_recv, price, size, action, side, flags, depth, ts_in_delta, sequence) "
296
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
297
- batches["trades"],
298
- )
299
- batches["trades"].clear()
300
-
301
- if batches["quotes"]:
302
- cursor.executemany(
303
- "INSERT OR REPLACE INTO quotes "
304
- "(instrument_id, ts_event, ts_recv, price, size, action, side, flags, depth, ts_in_delta, sequence, "
305
- "bid_px, ask_px, bid_sz, ask_sz, bid_ct, ask_ct) "
306
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
307
- batches["quotes"],
308
- )
309
- batches["quotes"].clear()
310
-
311
- if batches["bbo"]:
312
- cursor.executemany(
313
- "INSERT OR REPLACE INTO bbo "
314
- "(instrument_id, rtype, ts_event, ts_recv, price, size, side, flags, sequence, "
315
- "bid_px, ask_px, bid_sz, ask_sz, bid_ct, ask_ct) "
316
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
317
- batches["bbo"],
318
- )
319
- batches["bbo"].clear()
320
-
321
- if batches["mbo"]:
322
- cursor.executemany(
323
- "INSERT OR REPLACE INTO mbo "
324
- "(instrument_id, ts_event, ts_recv, order_id, price, size, flags, channel_id, action, side, ts_in_delta, sequence) "
325
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
326
- batches["mbo"],
327
- )
328
- batches["mbo"].clear()
329
-
330
- if batches["mbp10"]:
331
- cursor.executemany(
332
- "INSERT OR REPLACE INTO mbp10 "
333
- "(instrument_id, ts_event, ts_recv, price, size, action, side, flags, depth, ts_in_delta, sequence, "
334
- "bid_px_00, bid_px_01, bid_px_02, bid_px_03, bid_px_04, bid_px_05, bid_px_06, bid_px_07, bid_px_08, bid_px_09, "
335
- "ask_px_00, ask_px_01, ask_px_02, ask_px_03, ask_px_04, ask_px_05, ask_px_06, ask_px_07, ask_px_08, ask_px_09, "
336
- "bid_sz_00, bid_sz_01, bid_sz_02, bid_sz_03, bid_sz_04, bid_sz_05, bid_sz_06, bid_sz_07, bid_sz_08, bid_sz_09, "
337
- "ask_sz_00, ask_sz_01, ask_sz_02, ask_sz_03, ask_sz_04, ask_sz_05, ask_sz_06, ask_sz_07, ask_sz_08, ask_sz_09, "
338
- "bid_ct_00, bid_ct_01, bid_ct_02, bid_ct_03, bid_ct_04, bid_ct_05, bid_ct_06, bid_ct_07, bid_ct_08, bid_ct_09, "
339
- "ask_ct_00, ask_ct_01, ask_ct_02, ask_ct_03, ask_ct_04, ask_ct_05, ask_ct_06, ask_ct_07, ask_ct_08, ask_ct_09) "
340
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
341
- "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
342
- "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
343
- "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
344
- "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
345
- "?, ?, ?, ?, ?, ?, ?, ?, ?, ?, "
346
- "?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
347
- batches["mbp10"],
348
- )
349
- batches["mbp10"].clear()
350
-
351
- if batches["imbalance"]:
352
- cursor.executemany(
353
- "INSERT OR REPLACE INTO imbalance "
354
- "(instrument_id, ts_event, ts_recv, ref_price, auction_time, cont_book_clr_price, auct_interest_clr_price, "
355
- "ssr_filling_price, ind_match_price, upper_collar, lower_collar, paired_qty, total_imbalance_qty, "
356
- "market_imbalance_qty, unpaired_qty, auction_type, side, auction_status, freeze_status, num_extensions, "
357
- "unpaired_side, significant_imbalance) "
358
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
359
- batches["imbalance"],
360
- )
361
- batches["imbalance"].clear()
362
-
363
- if batches["statistics"]:
364
- cursor.executemany(
365
- "INSERT OR REPLACE INTO statistics "
366
- "(instrument_id, ts_event, ts_recv, ts_ref, price, quantity, sequence, ts_in_delta, stat_type, channel_id, "
367
- "update_action, stat_flags) "
368
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
369
- batches["statistics"],
370
- )
371
- batches["statistics"].clear()
372
-
373
- if batches["status"]:
374
- cursor.executemany(
375
- "INSERT OR REPLACE INTO status "
376
- "(instrument_id, ts_event, ts_recv, action, reason, trading_event, is_trading, is_quoting, is_short_sell_restricted) "
377
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)",
378
- batches["status"],
379
- )
380
- batches["status"].clear()
381
-
382
- if batches["instruments"]:
383
- cursor.executemany(
384
- "INSERT OR REPLACE INTO instruments "
385
- "(publisher_id, raw_instrument_id, raw_symbol, instrument_class, security_type, asset, cfi, exchange, currency, "
386
- "strike_price, strike_price_currency, expiration, activation, maturity_year, maturity_month, maturity_day, "
387
- "contract_multiplier, unit_of_measure, unit_of_measure_qty, underlying, display_factor, high_limit_price, "
388
- "low_limit_price, min_price_increment, security_group, ts_recv) "
389
- "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
390
- batches["instruments"],
391
- )
392
- batches["instruments"].clear()
393
-
394
-
395
- def _ohlcv_to_tuple(record: databento.OHLCVMsg) -> tuple:
396
- return (
397
- record.instrument_id,
398
- record.rtype.value,
399
- record.ts_event,
400
- record.open,
401
- record.high,
402
- record.low,
403
- record.close,
404
- record.volume,
405
- )
406
-
407
-
408
- def _trade_to_tuple(record: databento.TradeMsg) -> tuple:
409
- return (
410
- record.instrument_id,
411
- record.ts_event,
412
- record.ts_recv,
413
- record.price,
414
- record.size,
415
- str(record.action),
416
- str(record.side),
417
- record.flags,
418
- record.depth,
419
- record.ts_in_delta,
420
- record.sequence,
421
- )
422
-
423
-
424
- def _quote_to_tuple(record: databento.MBP1Msg) -> tuple:
425
- return (
426
- record.instrument_id,
427
- record.ts_event,
428
- record.ts_recv,
429
- record.price,
430
- record.size,
431
- str(record.action),
432
- str(record.side),
433
- record.flags,
434
- record.depth,
435
- record.ts_in_delta,
436
- record.sequence,
437
- record.levels[0].bid_px,
438
- record.levels[0].ask_px,
439
- record.levels[0].bid_sz,
440
- record.levels[0].ask_sz,
441
- record.levels[0].bid_ct,
442
- record.levels[0].ask_ct,
443
- )
444
-
445
-
446
- def _bbo_to_tuple(record: databento.BBOMsg) -> tuple:
447
- return (
448
- record.instrument_id,
449
- record.rtype.value,
450
- record.ts_event,
451
- record.ts_recv,
452
- record.price,
453
- record.size,
454
- str(record.side),
455
- record.flags,
456
- record.sequence,
457
- record.levels[0].bid_px,
458
- record.levels[0].ask_px,
459
- record.levels[0].bid_sz,
460
- record.levels[0].ask_sz,
461
- record.levels[0].bid_ct,
462
- record.levels[0].ask_ct,
463
- )
464
-
465
-
466
- def _mbo_to_tuple(record: databento.MBOMsg) -> tuple:
467
- return (
468
- record.instrument_id,
469
- record.ts_event,
470
- record.ts_recv,
471
- record.order_id,
472
- record.price,
473
- record.size,
474
- record.flags,
475
- record.channel_id,
476
- str(record.action),
477
- str(record.side),
478
- record.ts_in_delta,
479
- record.sequence,
480
- )
481
-
482
-
483
- def _mbp10_to_tuple(record: databento.MBP10Msg) -> tuple:
484
- levels = record.levels
485
- return (
486
- record.instrument_id,
487
- record.ts_event,
488
- record.ts_recv,
489
- record.price,
490
- record.size,
491
- str(record.action),
492
- str(record.side),
493
- record.flags,
494
- record.depth,
495
- record.ts_in_delta,
496
- record.sequence,
497
- levels[0].bid_px,
498
- levels[1].bid_px,
499
- levels[2].bid_px,
500
- levels[3].bid_px,
501
- levels[4].bid_px,
502
- levels[5].bid_px,
503
- levels[6].bid_px,
504
- levels[7].bid_px,
505
- levels[8].bid_px,
506
- levels[9].bid_px,
507
- levels[0].ask_px,
508
- levels[1].ask_px,
509
- levels[2].ask_px,
510
- levels[3].ask_px,
511
- levels[4].ask_px,
512
- levels[5].ask_px,
513
- levels[6].ask_px,
514
- levels[7].ask_px,
515
- levels[8].ask_px,
516
- levels[9].ask_px,
517
- levels[0].bid_sz,
518
- levels[1].bid_sz,
519
- levels[2].bid_sz,
520
- levels[3].bid_sz,
521
- levels[4].bid_sz,
522
- levels[5].bid_sz,
523
- levels[6].bid_sz,
524
- levels[7].bid_sz,
525
- levels[8].bid_sz,
526
- levels[9].bid_sz,
527
- levels[0].ask_sz,
528
- levels[1].ask_sz,
529
- levels[2].ask_sz,
530
- levels[3].ask_sz,
531
- levels[4].ask_sz,
532
- levels[5].ask_sz,
533
- levels[6].ask_sz,
534
- levels[7].ask_sz,
535
- levels[8].ask_sz,
536
- levels[9].ask_sz,
537
- levels[0].bid_ct,
538
- levels[1].bid_ct,
539
- levels[2].bid_ct,
540
- levels[3].bid_ct,
541
- levels[4].bid_ct,
542
- levels[5].bid_ct,
543
- levels[6].bid_ct,
544
- levels[7].bid_ct,
545
- levels[8].bid_ct,
546
- levels[9].bid_ct,
547
- levels[0].ask_ct,
548
- levels[1].ask_ct,
549
- levels[2].ask_ct,
550
- levels[3].ask_ct,
551
- levels[4].ask_ct,
552
- levels[5].ask_ct,
553
- levels[6].ask_ct,
554
- levels[7].ask_ct,
555
- levels[8].ask_ct,
556
- levels[9].ask_ct,
557
- )
558
-
559
-
560
- def _imbalance_to_tuple(record: databento.ImbalanceMsg) -> tuple:
561
- return (
562
- record.instrument_id,
563
- record.ts_event,
564
- record.ts_recv,
565
- record.ref_price,
566
- record.auction_time,
567
- record.cont_book_clr_price,
568
- record.auct_interest_clr_price,
569
- record.ssr_filling_price,
570
- record.ind_match_price,
571
- record.upper_collar,
572
- record.lower_collar,
573
- record.paired_qty,
574
- record.total_imbalance_qty,
575
- record.market_imbalance_qty,
576
- record.unpaired_qty,
577
- str(record.auction_type),
578
- str(record.side),
579
- record.auction_status,
580
- record.freeze_status,
581
- record.num_extensions,
582
- str(record.unpaired_side),
583
- str(record.significant_imbalance),
584
- )
585
-
586
-
587
- def _statistics_to_tuple(record: databento.StatMsg) -> tuple:
588
- stat_type = record.stat_type
589
- update_action = record.update_action
590
- return (
591
- record.instrument_id,
592
- record.ts_event,
593
- record.ts_recv,
594
- record.ts_ref,
595
- record.price,
596
- record.quantity,
597
- record.sequence,
598
- record.ts_in_delta,
599
- stat_type.value if hasattr(stat_type, "value") else stat_type,
600
- record.channel_id,
601
- update_action.value if hasattr(update_action, "value") else update_action,
602
- record.stat_flags,
603
- )
604
-
605
-
606
- def _status_to_tuple(record: databento.StatusMsg) -> tuple:
607
- action = record.action
608
- reason = record.reason
609
- trading_event = record.trading_event
610
- return (
611
- record.instrument_id,
612
- record.ts_event,
613
- record.ts_recv,
614
- action.value if hasattr(action, "value") else action,
615
- reason.value if hasattr(reason, "value") else reason,
616
- trading_event.value if hasattr(trading_event, "value") else trading_event,
617
- str(record.is_trading),
618
- str(record.is_quoting),
619
- str(record.is_short_sell_restricted),
620
- )
621
-
622
-
623
- def _instrument_to_tuple(record: databento.InstrumentDefMsg) -> tuple:
624
- return (
625
- record.publisher_id,
626
- record.instrument_id,
627
- record.raw_symbol,
628
- str(record.instrument_class),
629
- record.security_type,
630
- record.asset,
631
- record.cfi,
632
- record.exchange,
633
- record.currency,
634
- record.strike_price,
635
- record.strike_price_currency,
636
- record.expiration,
637
- record.activation,
638
- record.maturity_year,
639
- record.maturity_month,
640
- record.maturity_day,
641
- record.contract_multiplier,
642
- record.unit_of_measure,
643
- record.unit_of_measure_qty,
644
- record.underlying,
645
- record.display_factor,
646
- record.high_limit_price,
647
- record.low_limit_price,
648
- record.min_price_increment,
649
- record.group,
650
- record.ts_recv,
651
- )
652
-
653
-
654
- def update_meta(db_path: pathlib.Path) -> None:
655
- """
656
- Compute and store aggregate statistics in the meta table.
657
-
658
- This function runs expensive COUNT/MIN/MAX queries once and stores the results
659
- in the meta table for fast retrieval by the dashboard.
660
-
661
- Args:
662
- db_path: Path to the secmaster SQLite database.
663
- """
664
- import time
665
-
666
- connection = sqlite3.connect(str(db_path))
667
- cursor = connection.cursor()
668
-
669
- cursor.execute("SELECT COUNT(DISTINCT instrument_id) FROM symbology")
670
- symbol_count = cursor.fetchone()[0]
671
-
672
- cursor.execute("SELECT COUNT(*) FROM ohlcv")
673
- ohlcv_count = cursor.fetchone()[0]
674
-
675
- cursor.execute("SELECT MIN(ts_event), MAX(ts_event) FROM ohlcv")
676
- row = cursor.fetchone()
677
- min_ts, max_ts = row[0] or 0, row[1] or 0
678
-
679
- cursor.execute("SELECT DISTINCT rtype FROM ohlcv ORDER BY rtype")
680
- rtypes = ",".join(str(r[0]) for r in cursor.fetchall())
681
-
682
- stats = [
683
- ("symbol_count", str(symbol_count)),
684
- ("ohlcv_record_count", str(ohlcv_count)),
685
- ("ohlcv_min_ts", str(min_ts)),
686
- ("ohlcv_max_ts", str(max_ts)),
687
- ("ohlcv_schemas", rtypes),
688
- ("last_updated", str(int(time.time()))),
689
- ]
690
-
691
- cursor.executemany(
692
- "INSERT OR REPLACE INTO meta (key, value) VALUES (?, ?)",
693
- stats,
694
- )
695
-
696
- connection.commit()
697
- connection.close()
698
-
699
-
700
- def update_symbol_coverage(db_path: pathlib.Path) -> int:
701
- """
702
- Compute and store per-symbol coverage statistics in the symbol_coverage table.
703
-
704
- This function aggregates OHLCV data per instrument_id/rtype first (fast, uses
705
- primary key), then joins with symbology to get symbols.
706
-
707
- Args:
708
- db_path: Path to the secmaster SQLite database.
709
-
710
- Returns:
711
- The number of symbol/rtype combinations stored.
712
- """
713
- connection = sqlite3.connect(str(db_path))
714
- cursor = connection.cursor()
715
-
716
- cursor.execute("DELETE FROM symbol_coverage")
717
-
718
- cursor.execute(
719
- """
720
- INSERT INTO symbol_coverage (symbol, rtype, min_ts, max_ts, record_count)
721
- SELECT s.symbol, agg.rtype, MIN(agg.min_ts), MAX(agg.max_ts), SUM(agg.cnt)
722
- FROM (
723
- SELECT instrument_id, rtype, MIN(ts_event) as min_ts, MAX(ts_event) as max_ts, COUNT(*) as cnt
724
- FROM ohlcv
725
- GROUP BY instrument_id, rtype
726
- ) agg
727
- JOIN (
728
- SELECT DISTINCT instrument_id, symbol FROM symbology
729
- ) s ON agg.instrument_id = s.instrument_id
730
- GROUP BY s.symbol, agg.rtype
731
- """
732
- )
733
-
734
- count = cursor.rowcount
735
- connection.commit()
736
- connection.close()
737
- return count