onesecondtrader 0.52.0__py3-none-any.whl → 0.54.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,11 @@
1
+ """
2
+ Provides data feed components for ingesting market data into the system.
3
+ """
4
+
5
+ from .base import DatafeedBase
6
+ from .simulated import SimulatedDatafeed
7
+
8
+ __all__ = [
9
+ "DatafeedBase",
10
+ "SimulatedDatafeed",
11
+ ]
@@ -0,0 +1,90 @@
1
+ from __future__ import annotations
2
+
3
+ import abc
4
+
5
+ from onesecondtrader import events, messaging, models
6
+
7
+
8
+ class DatafeedBase(abc.ABC):
9
+ """
10
+ Abstract base class for market data feed implementations.
11
+
12
+ A data feed is responsible for connecting to an external data source, managing symbol and bar-period subscriptions, and publishing market data events onto the system event bus.
13
+
14
+ Concrete subclasses implement the mechanics of connectivity, subscription handling, and lifecycle management for a specific data source.
15
+ """
16
+
17
+ def __init__(self, event_bus: messaging.EventBus) -> None:
18
+ """
19
+ Initialize the data feed with an event bus.
20
+
21
+ parameters:
22
+ event_bus:
23
+ Event bus used to publish market data events produced by this data feed.
24
+ """
25
+ self._event_bus = event_bus
26
+
27
+ def _publish(self, event: events.EventBase) -> None:
28
+ """
29
+ Publish a market data event to the event bus.
30
+
31
+ This method is intended for use by subclasses to forward incoming data from the external source into the internal event-driven system.
32
+
33
+ parameters:
34
+ event:
35
+ Event instance to be published.
36
+ """
37
+ self._event_bus.publish(event)
38
+
39
+ @abc.abstractmethod
40
+ def connect(self) -> None:
41
+ """
42
+ Establish a connection to the underlying data source.
43
+
44
+ Implementations should perform any required setup, authentication, or resource allocation needed before subscriptions can be registered.
45
+ """
46
+ pass
47
+
48
+ @abc.abstractmethod
49
+ def disconnect(self) -> None:
50
+ """
51
+ Terminate the connection to the underlying data source.
52
+
53
+ Implementations should release resources and ensure that no further events are published after disconnection.
54
+ """
55
+ pass
56
+
57
+ @abc.abstractmethod
58
+ def subscribe(self, symbols: list[str], bar_period: models.BarPeriod) -> None:
59
+ """
60
+ Subscribe to market data for one or more symbols at a given bar period.
61
+
62
+ parameters:
63
+ symbols:
64
+ Instrument symbols to subscribe to, interpreted according to the conventions of the underlying data source.
65
+ bar_period:
66
+ Bar aggregation period specifying the granularity of market data.
67
+ """
68
+ pass
69
+
70
+ @abc.abstractmethod
71
+ def unsubscribe(self, symbols: list[str], bar_period: models.BarPeriod) -> None:
72
+ """
73
+ Cancel existing subscriptions for one or more symbols at a given bar period.
74
+
75
+ parameters:
76
+ symbols:
77
+ Instrument symbols for which subscriptions should be removed.
78
+ bar_period:
79
+ Bar aggregation period associated with the subscriptions.
80
+ """
81
+ pass
82
+
83
+ def wait_until_complete(self) -> None:
84
+ """
85
+ Block until the data feed has completed all pending work.
86
+
87
+ This method may be overridden by subclasses that perform asynchronous ingestion or background processing.
88
+ The default implementation does nothing.
89
+ """
90
+ pass
@@ -0,0 +1,209 @@
1
+ from __future__ import annotations
2
+
3
+ import itertools
4
+ import os
5
+ import sqlite3
6
+ import threading
7
+
8
+ from onesecondtrader import events, messaging, models
9
+ from onesecondtrader.datafeeds.base import DatafeedBase
10
+
11
+ _RTYPE_MAP = {
12
+ models.BarPeriod.SECOND: 32,
13
+ models.BarPeriod.MINUTE: 33,
14
+ models.BarPeriod.HOUR: 34,
15
+ models.BarPeriod.DAY: 35,
16
+ }
17
+
18
+ _RTYPE_TO_BAR_PERIOD = {v: k for k, v in _RTYPE_MAP.items()}
19
+
20
+
21
+ class SimulatedDatafeed(DatafeedBase):
22
+ """
23
+ Simulated market data feed backed by a secmaster SQLite database.
24
+
25
+ This datafeed replays historical OHLCV bars from a secmaster database, resolving symbols
26
+ via time-bounded symbology mappings. Bars are delivered in timestamp order, with all bars
27
+ sharing the same timestamp published before calling `wait_until_system_idle`.
28
+
29
+ Subclasses must set `publisher_name`, `dataset`, and `symbol_type` as class attributes to
30
+ scope the feed to a specific data source. The database must contain publishers with numeric
31
+ `source_instrument_id` values; symbol-only publishers (e.g., yfinance) are not supported.
32
+ """
33
+
34
+ db_path: str = ""
35
+ publisher_name: str = ""
36
+ dataset: str = ""
37
+ symbol_type: str = ""
38
+ price_scale: float = 1e9
39
+ start_ts: int | None = None
40
+ end_ts: int | None = None
41
+
42
+ def __init__(self, event_bus: messaging.EventBus) -> None:
43
+ """
44
+ Parameters:
45
+ event_bus:
46
+ Event bus used to publish bar events and synchronize with subscribers.
47
+ """
48
+ super().__init__(event_bus)
49
+ self._db_path = self.db_path or os.environ.get(
50
+ "SECMASTER_DB_PATH", "secmaster.db"
51
+ )
52
+ if not self.publisher_name:
53
+ raise ValueError("publisher_name is required")
54
+ if not self.dataset:
55
+ raise ValueError("dataset is required")
56
+ if not self.symbol_type:
57
+ raise ValueError("symbol_type is required")
58
+ self._subscriptions: set[tuple[str, models.BarPeriod]] = set()
59
+ self._subscriptions_lock = threading.Lock()
60
+ self._connection: sqlite3.Connection | None = None
61
+ self._thread: threading.Thread | None = None
62
+ self._stop_event = threading.Event()
63
+ self._publisher_id: int | None = None
64
+
65
+ def connect(self) -> None:
66
+ """
67
+ Open a connection to the secmaster database and resolve the publisher.
68
+
69
+ If already connected, this method returns immediately.
70
+ """
71
+ if self._connection:
72
+ return
73
+ self._connection = sqlite3.connect(self._db_path, check_same_thread=False)
74
+ self._connection.execute("PRAGMA foreign_keys = ON")
75
+ self._connection.execute("PRAGMA journal_mode = WAL")
76
+ row = self._connection.execute(
77
+ "SELECT publisher_id FROM publishers WHERE name = ? AND dataset = ?",
78
+ (self.publisher_name, self.dataset),
79
+ ).fetchone()
80
+ if row is None:
81
+ raise ValueError(
82
+ f"Publisher not found: {self.publisher_name}/{self.dataset}"
83
+ )
84
+ self._publisher_id = row[0]
85
+
86
+ def disconnect(self) -> None:
87
+ """
88
+ Close the database connection and stop any active streaming.
89
+
90
+ If not connected, this method returns immediately.
91
+ """
92
+ if not self._connection:
93
+ return
94
+ self._stop_event.set()
95
+ if self._thread and self._thread.is_alive():
96
+ self._thread.join()
97
+ self._connection.close()
98
+ self._connection = None
99
+ self._publisher_id = None
100
+
101
+ def subscribe(self, symbols: list[str], bar_period: models.BarPeriod) -> None:
102
+ """
103
+ Register symbols for bar delivery at the specified period.
104
+
105
+ Parameters:
106
+ symbols:
107
+ List of ticker symbols to subscribe.
108
+ bar_period:
109
+ Bar aggregation period for the subscription.
110
+ """
111
+ with self._subscriptions_lock:
112
+ self._subscriptions.update((s, bar_period) for s in symbols)
113
+
114
+ def unsubscribe(self, symbols: list[str], bar_period: models.BarPeriod) -> None:
115
+ """
116
+ Remove symbols from bar delivery at the specified period.
117
+
118
+ Parameters:
119
+ symbols:
120
+ List of ticker symbols to unsubscribe.
121
+ bar_period:
122
+ Bar aggregation period for the subscription.
123
+ """
124
+ with self._subscriptions_lock:
125
+ self._subscriptions.difference_update((s, bar_period) for s in symbols)
126
+
127
+ def wait_until_complete(self) -> None:
128
+ """
129
+ Stream all subscribed bars and block until delivery is complete.
130
+
131
+ Bars are published in timestamp order. After each timestamp batch, the method
132
+ waits for all event bus subscribers to become idle before proceeding.
133
+ """
134
+ with self._subscriptions_lock:
135
+ has_subscriptions = bool(self._subscriptions)
136
+ if not has_subscriptions:
137
+ return
138
+ if self._thread is None or not self._thread.is_alive():
139
+ self._stop_event.clear()
140
+ self._thread = threading.Thread(
141
+ target=self._stream,
142
+ name=self.__class__.__name__,
143
+ daemon=False,
144
+ )
145
+ self._thread.start()
146
+ self._thread.join()
147
+
148
+ def _stream(self) -> None:
149
+ if not self._connection or self._publisher_id is None:
150
+ return
151
+
152
+ with self._subscriptions_lock:
153
+ subscriptions = list(self._subscriptions)
154
+ if not subscriptions:
155
+ return
156
+
157
+ symbols = list({symbol for symbol, _ in subscriptions})
158
+ rtypes = list({_RTYPE_MAP[bp] for _, bp in subscriptions})
159
+ subscription_set = {(symbol, _RTYPE_MAP[bp]) for symbol, bp in subscriptions}
160
+
161
+ params: list = [self._publisher_id, self.symbol_type]
162
+ params.extend(symbols)
163
+ params.extend(rtypes)
164
+ if self.start_ts is not None:
165
+ params.append(self.start_ts)
166
+ if self.end_ts is not None:
167
+ params.append(self.end_ts)
168
+
169
+ query = f"""
170
+ SELECT s.symbol, o.rtype, o.ts_event, o.open, o.high, o.low, o.close, o.volume
171
+ FROM ohlcv o
172
+ JOIN instruments i ON i.instrument_id = o.instrument_id
173
+ JOIN symbology s
174
+ ON s.publisher_ref = i.publisher_ref
175
+ AND s.source_instrument_id = i.source_instrument_id
176
+ AND date(o.ts_event / 1000000000, 'unixepoch') >= s.start_date
177
+ AND date(o.ts_event / 1000000000, 'unixepoch') <= s.end_date
178
+ WHERE i.publisher_ref = ?
179
+ AND s.symbol_type = ?
180
+ AND s.symbol IN ({",".join("?" * len(symbols))})
181
+ AND o.rtype IN ({",".join("?" * len(rtypes))})
182
+ {"AND o.ts_event >= ?" if self.start_ts is not None else ""}
183
+ {"AND o.ts_event <= ?" if self.end_ts is not None else ""}
184
+ ORDER BY o.ts_event, s.symbol
185
+ """
186
+
187
+ rows = self._connection.execute(query, params)
188
+
189
+ def to_bar(row):
190
+ symbol, rtype, ts_event, open_, high, low, close, volume = row
191
+ if (symbol, rtype) not in subscription_set:
192
+ return None
193
+ return events.market.BarReceived(
194
+ ts_event_ns=ts_event,
195
+ symbol=symbol,
196
+ bar_period=_RTYPE_TO_BAR_PERIOD[rtype],
197
+ open=open_ / self.price_scale,
198
+ high=high / self.price_scale,
199
+ low=low / self.price_scale,
200
+ close=close / self.price_scale,
201
+ volume=volume,
202
+ )
203
+
204
+ for _, group in itertools.groupby(rows, key=lambda r: r[2]):
205
+ if self._stop_event.is_set():
206
+ return
207
+ for bar in filter(None, map(to_bar, group)):
208
+ self._publish(bar)
209
+ self._event_bus.wait_until_system_idle()
@@ -1,3 +1,15 @@
1
1
  """
2
2
  Provides a schema for creating and utilities to populate the security master database.
3
3
  """
4
+
5
+ from .utils import (
6
+ create_secmaster_db,
7
+ ingest_databento_zip,
8
+ ingest_databento_dbn,
9
+ )
10
+
11
+ __all__ = [
12
+ "create_secmaster_db",
13
+ "ingest_databento_zip",
14
+ "ingest_databento_dbn",
15
+ ]
@@ -4,10 +4,16 @@
4
4
  -- Instrument identity is modeled per publisher namespace and supports either numeric upstream identifiers or symbols.
5
5
  -- Contract specifications and other static reference metadata are intentionally out of scope for this schema and should be stored separately if ingested.
6
6
  --
7
+ -- The schema is explicitly ingestion-safe in the sense that:
8
+ --
9
+ -- 1) publishers are keyed by (vendor, dataset) rather than vendor alone, allowing multiple feeds per vendor;
10
+ -- 2) symbology admits multiple mappings sharing the same start date by including the resolved instrument identifier
11
+ -- in the primary key, preventing accidental overwrites during bulk ingestion.
12
+ --
7
13
  -- | Table | Description |
8
14
  -- |---------------|-------------|
9
- -- | `publishers` | Registry of data sources and their identifier namespaces. |
10
- -- | `instruments` | Registry of instruments observed from market data ingestion within a publisher namespace. |
15
+ -- | `publishers` | Registry of vendor+dataset namespaces used for market data and instrument ingestion. |
16
+ -- | `instruments` | Registry of instruments observed from ingestion within a publisher namespace. |
11
17
  -- | `ohlcv` | Aggregated OHLCV bar data keyed by instrument, bar duration (`rtype`), and event timestamp (`ts_event`). |
12
18
  -- | `symbology` | Time-bounded mappings from publisher-native symbols to publisher-native instrument identifiers. |
13
19
 
@@ -15,15 +21,24 @@
15
21
 
16
22
  -- Registry of all data sources used for market data and instrument ingestion.
17
23
  --
18
- -- Each row represents a distinct data source.
19
- -- A publisher establishes the provenance of instrument definitions and price data and provides the context in which raw symbols and native instrument identifiers are interpreted.
24
+ -- Each row represents a distinct data product (feed) within a vendor namespace.
25
+ -- A publisher record is uniquely identified by the pair (`name`, `dataset`), not by `name` alone.
26
+ -- This allows a single vendor (e.g. Databento) to appear multiple times, once per concrete dataset/feed
27
+ -- (e.g. `GLBX.MDP3`, `XNAS.ITCH`).
28
+ --
29
+ -- A publisher establishes the provenance of instrument definitions and price data and provides the context
30
+ -- in which raw symbols and native instrument identifiers are interpreted.
20
31
  --
21
- -- | Field | Type | Constraints | Description |
22
- -- |-----------------|-----------|----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
23
- -- | `publisher_id` | `INTEGER` | `PRIMARY KEY` | Internal surrogate key uniquely identifying a data source within the system. |
24
- -- | `name` | `TEXT` | `NOT NULL`, `UNIQUE` | Human-readable identifier for the data source or vendor (e.g. `databento`, `yfinance`). |
25
- -- | `dataset` | `TEXT` | `NOT NULL` | Identifier of the concrete data product or feed through which data is sourced; uses Databento dataset names (e.g. `GLBX.MDP3`) for Databento ingestion and internal identifiers for other sources (e.g. `YFINANCE`). |
26
- -- | `venue` | `TEXT` | | Optional ISO 10383 Market Identifier Code (MIC) describing the primary trading venue; may be NULL for aggregated or multi-venue sources. |
32
+ -- | Field | Type | Constraints | Description |
33
+ -- |-----------------|-----------|-------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
34
+ -- | `publisher_id` | `INTEGER` | `PRIMARY KEY` | Internal surrogate key uniquely identifying a publisher record within the system. |
35
+ -- | `name` | `TEXT` | `NOT NULL` | Human-readable vendor identifier for the data source (e.g. `databento`, `yfinance`). |
36
+ -- | `dataset` | `TEXT` | `NOT NULL` | Identifier of the concrete data product or feed through which data is sourced; uses Databento dataset names (e.g. `GLBX.MDP3`) for Databento ingestion and internal identifiers for other sources (e.g. `YFINANCE`). |
37
+ -- | `venue` | `TEXT` | | Optional ISO 10383 Market Identifier Code (MIC) describing the primary trading venue; may be NULL for aggregated or multi-venue sources. |
38
+ --
39
+ -- **Table constraints**
40
+ --
41
+ -- * `UNIQUE(name, dataset)` ensures that each vendor+feed combination is represented at most once.
27
42
  --
28
43
  -- **Examples**
29
44
  --
@@ -33,6 +48,12 @@
33
48
  -- * `dataset` = `'GLBX.MDP3'`
34
49
  -- * `venue` = `XCME`
35
50
  --
51
+ -- Databento NASDAQ TotalView feed:
52
+ --
53
+ -- * `name` = `'databento'`
54
+ -- * `dataset` = `'XNAS.ITCH'`
55
+ -- * `venue` = `XNAS`
56
+ --
36
57
  -- Yahoo Finance equity data:
37
58
  --
38
59
  -- * `name` = `'yfinance'`
@@ -41,14 +62,16 @@
41
62
  --
42
63
  CREATE TABLE publishers (
43
64
  publisher_id INTEGER PRIMARY KEY,
44
- name TEXT NOT NULL UNIQUE,
65
+ name TEXT NOT NULL,
45
66
  dataset TEXT NOT NULL,
46
- venue TEXT
67
+ venue TEXT,
68
+ UNIQUE (name, dataset)
47
69
  );
48
70
 
49
71
 
50
72
 
51
73
 
74
+
52
75
  -- Registry of instruments observed through market data ingestion.
53
76
  --
54
77
  -- Each row represents an instrument identity within a publisher namespace.
@@ -59,13 +82,13 @@ CREATE TABLE publishers (
59
82
  -- The table does not store contract specifications or other reference metadata.
60
83
  -- Such metadata must be stored separately when available.
61
84
  --
62
- -- | Field | Type | Constraints | Description |
63
- -- |------------------------|-----------|------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------|
64
- -- | `instrument_id` | `INTEGER` | `PRIMARY KEY` | Internal surrogate key identifying an instrument record within the system. |
65
- -- | `publisher_ref` | `INTEGER` | `NOT NULL`, `FK` | Foreign key reference to `publishers.publisher_id`, defining the publisher namespace in which this instrument identity is valid. |
85
+ -- | Field | Type | Constraints | Description |
86
+ -- |------------------------|-----------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------|
87
+ -- | `instrument_id` | `INTEGER` | `PRIMARY KEY` | Internal surrogate key identifying an instrument record within the system. |
88
+ -- | `publisher_ref` | `INTEGER` | `NOT NULL`, `FK` | Foreign key reference to `publishers.publisher_id`, defining the publisher namespace in which this instrument identity is valid. |
66
89
  -- | `source_instrument_id` | `INTEGER` | | Publisher-native numeric instrument identifier as provided by the upstream data source (e.g. Databento instrument_id); may be `NULL` for symbol-only sources. |
67
- -- | `symbol` | `TEXT` | | Publisher-native symbol string identifying the instrument (e.g. raw symbol, ticker); may be NULL when numeric identifiers are used. |
68
- -- | `symbol_type` | `TEXT` | | Identifier describing the symbol scheme or resolution type used by the publisher (e.g. `raw_symbol`, `continuous`, `ticker`). |
90
+ -- | `symbol` | `TEXT` | | Publisher-native symbol string identifying the instrument (e.g. raw symbol, ticker); may be NULL when numeric identifiers are used. |
91
+ -- | `symbol_type` | `TEXT` | | Identifier describing the symbol scheme or resolution type used by the publisher (e.g. `raw_symbol`, `continuous`, `ticker`). |
69
92
  --
70
93
  -- Each instrument must be identifiable by at least one of `source_instrument_id` or `symbol`.
71
94
  -- Uniqueness constraints ensure that instrument identities do not collide within a publisher namespace.
@@ -78,7 +101,7 @@ CREATE TABLE instruments (
78
101
 
79
102
  source_instrument_id INTEGER,
80
103
  symbol TEXT,
81
- symbol_type TEXT,
104
+ symbol_type TEXT,
82
105
 
83
106
  FOREIGN KEY (publisher_ref) REFERENCES publishers(publisher_id),
84
107
 
@@ -87,6 +110,8 @@ CREATE TABLE instruments (
87
110
  OR symbol IS NOT NULL
88
111
  ),
89
112
 
113
+ CHECK (symbol IS NULL OR symbol_type IS NOT NULL),
114
+
90
115
  UNIQUE (publisher_ref, source_instrument_id),
91
116
  UNIQUE (publisher_ref, symbol, symbol_type)
92
117
  );
@@ -98,16 +123,16 @@ CREATE TABLE instruments (
98
123
 
99
124
  -- Stores aggregated OHLCV bars for instruments at multiple time resolutions.
100
125
  --
101
- -- | Field | Type | Constraints | Description |
102
- -- |-----------------|-----------|---------------------------------------------|-------------------------------------------------------------------------------------------------------------------------|
103
- -- | `instrument_id` | `INTEGER` | `NOT NULL`, `FK` | Foreign key reference to `instruments.instrument_id`, identifying the instrument to which this bar belongs. |
126
+ -- | Field | Type | Constraints | Description |
127
+ -- |-----------------|-----------|---------------------------------------------|--------------------------------------------------------------------------------------------------------------------------|
128
+ -- | `instrument_id` | `INTEGER` | `NOT NULL`, `FK` | Foreign key reference to `instruments.instrument_id`, identifying the instrument to which this bar belongs. |
104
129
  -- | `rtype` | `INTEGER` | `NOT NULL`, `CHECK IN (32, 33, 34, 35, 36)` | Record type code encoding the bar duration using Databento OHLCV conventions (e.g. `32`=1s, `33`=1m, `34`=1h, `35`=1d). |
105
- -- | `ts_event` | `INTEGER` | `NOT NULL` | Event timestamp of the bar as provided by the upstream source, stored as nanoseconds since the UTC Unix epoch. |
106
- -- | `open` | `INTEGER` | `NOT NULL` | Opening price of the bar interval, stored as a fixed-point integer using the upstream price scaling convention. |
107
- -- | `high` | `INTEGER` | `NOT NULL` | Highest traded price during the bar interval, stored as a fixed-point integer. |
108
- -- | `low` | `INTEGER` | `NOT NULL`, `CHECK(low <= high)` | Lowest traded price during the bar interval, stored as a fixed-point integer. |
109
- -- | `close` | `INTEGER` | `NOT NULL` | Closing price of the bar interval, stored as a fixed-point integer. |
110
- -- | `volume` | `INTEGER` | `NOT NULL`, `CHECK(volume >= 0)` | Total traded volume during the bar interval. |
130
+ -- | `ts_event` | `INTEGER` | `NOT NULL` | Event timestamp of the bar as provided by the upstream source, stored as nanoseconds since the UTC Unix epoch. |
131
+ -- | `open` | `INTEGER` | `NOT NULL` | Opening price of the bar interval, stored as a fixed-point integer using the upstream price scaling convention. |
132
+ -- | `high` | `INTEGER` | `NOT NULL` | Highest traded price during the bar interval, stored as a fixed-point integer. |
133
+ -- | `low` | `INTEGER` | `NOT NULL`, `CHECK(low <= high)` | Lowest traded price during the bar interval, stored as a fixed-point integer. |
134
+ -- | `close` | `INTEGER` | `NOT NULL` | Closing price of the bar interval, stored as a fixed-point integer. |
135
+ -- | `volume` | `INTEGER` | `NOT NULL`, `CHECK(volume >= 0)` | Total traded volume during the bar interval. |
111
136
  --
112
137
  -- The composite primary key enforces uniqueness per instrument, bar duration, and event timestamp.
113
138
  -- Integrity constraints ensure basic OHLC consistency and prevent invalid price relationships from being stored.
@@ -135,7 +160,12 @@ CREATE TABLE ohlcv (
135
160
 
136
161
  -- Stores time-bounded mappings from publisher-native symbols to publisher-native instrument identifiers.
137
162
  --
138
- -- The table captures symbol resolution rules as provided by upstream data sources and must be interpreted within the namespace of a specific publisher.
163
+ -- The table captures symbol resolution rules as provided by upstream data sources and must be interpreted within the
164
+ -- namespace of a specific publisher.
165
+ --
166
+ -- The schema permits multiple mappings to share the same `start_date` for a given (`publisher_ref`, `symbol`, `symbol_type`)
167
+ -- by including `source_instrument_id` in the primary key. This prevents accidental overwrite when upstream symbology exports
168
+ -- contain same-day corrections, backfills, or parallel resolution segments.
139
169
  --
140
170
  -- | Field | Type | Constraints | Description |
141
171
  -- |------------------------|-----------|------------------|----------------------------------------------------------------------------------------------------------------------------|
@@ -146,7 +176,7 @@ CREATE TABLE ohlcv (
146
176
  -- | `start_date` | `TEXT` | `NOT NULL` | First calendar date (inclusive) on which this symbol-to-instrument mapping is valid, stored in YYYY-MM-DD format. |
147
177
  -- | `end_date` | `TEXT` | `NOT NULL` | Last calendar date (inclusive) on which this symbol-to-instrument mapping is valid, stored in YYYY-MM-DD format. |
148
178
  --
149
- -- The primary key enforces uniqueness of symbol mappings per publisher, symbol type, and start date.
179
+ -- The primary key enforces uniqueness of mappings at the granularity of a resolved instrument.
150
180
  -- Date bounds are interpreted as closed intervals.
151
181
  --
152
182
  CREATE TABLE symbology (
@@ -157,8 +187,9 @@ CREATE TABLE symbology (
157
187
  start_date TEXT NOT NULL,
158
188
  end_date TEXT NOT NULL,
159
189
  FOREIGN KEY (publisher_ref) REFERENCES publishers(publisher_id),
160
- PRIMARY KEY (publisher_ref, symbol, symbol_type, start_date),
161
- UNIQUE (publisher_ref, symbol_type, source_instrument_id, start_date),
190
+ FOREIGN KEY (publisher_ref, source_instrument_id)
191
+ REFERENCES instruments(publisher_ref, source_instrument_id),
192
+ PRIMARY KEY (publisher_ref, symbol, symbol_type, start_date, source_instrument_id),
162
193
  CHECK (start_date <= end_date)
163
194
  );
164
195
 
@@ -1,16 +1,32 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import json
4
+ import logging
3
5
  import pathlib
6
+ import shutil
4
7
  import sqlite3
8
+ import tempfile
9
+ import zipfile
10
+
11
+ import databento
12
+
13
+
14
+ BATCH_SIZE = 10000
15
+ LOG_EVERY_OHLCV = 1_000_000
16
+ LOG_EVERY_SYMBOLOGY = 50_000
17
+
18
+ logger = logging.getLogger(__name__)
5
19
 
6
20
 
7
21
  def create_secmaster_db(db_path: pathlib.Path, schema_version: int = 1) -> pathlib.Path:
8
22
  """
9
23
  Create a new security master SQLite database using a selected schema version.
10
24
 
11
- The database file is created at the given path and initialized by executing the SQL script located in the `schema_versions` directory adjacent to this module.
25
+ The database file is created at the given path and initialized by executing the SQL script
26
+ located in the `schema_versions` directory adjacent to this module.
12
27
 
13
- The function expects the schema script to set `PRAGMA user_version` to the corresponding schema version and verifies this after execution.
28
+ The function expects the schema script to set `PRAGMA user_version` to the corresponding
29
+ schema version and verifies this after execution.
14
30
 
15
31
  Parameters:
16
32
  db_path:
@@ -27,8 +43,8 @@ def create_secmaster_db(db_path: pathlib.Path, schema_version: int = 1) -> pathl
27
43
  FileNotFoundError:
28
44
  If the schema script for `schema_version` does not exist.
29
45
  sqlite3.DatabaseError:
30
- If the applied schema does not set the expected `user_version`
31
- or if SQLite fails while executing the schema.
46
+ If the applied schema does not set the expected `user_version` or if SQLite fails
47
+ while executing the schema.
32
48
  """
33
49
  if db_path.exists():
34
50
  raise FileExistsError(f"Database already exists: {db_path}")
@@ -57,8 +73,539 @@ def create_secmaster_db(db_path: pathlib.Path, schema_version: int = 1) -> pathl
57
73
 
58
74
  if actual_version != schema_version:
59
75
  raise sqlite3.DatabaseError(
60
- f"Schema script set user_version={actual_version}, "
61
- f"expected {schema_version}"
76
+ f"Schema script set user_version={actual_version}, expected {schema_version}"
62
77
  )
63
78
 
64
79
  return db_path
80
+
81
+
82
+ def ingest_databento_zip(
83
+ zip_path: pathlib.Path,
84
+ db_path: pathlib.Path,
85
+ publisher_name: str = "databento",
86
+ symbol_type: str = "raw_symbol",
87
+ dataset: str | None = None,
88
+ ) -> tuple[int, int]:
89
+ """
90
+ Ingest market data from a Databento zip archive into the security master database.
91
+
92
+ The archive may contain one or more DBN files and an optional `symbology.json`. The function
93
+ ingests OHLCV records from DBN files into `ohlcv` and ingests symbol-to-instrument mappings
94
+ into `symbology`.
95
+
96
+ The publisher namespace is created if absent. Publisher identity is determined by the pair
97
+ `(publisher_name, dataset)`, where `dataset` is extracted from `metadata.json` in the archive.
98
+
99
+ Ingestion is idempotent with respect to primary keys: existing `ohlcv` and `symbology` rows are
100
+ left unchanged.
101
+
102
+ Parameters:
103
+ zip_path:
104
+ Path to the Databento zip archive.
105
+ db_path:
106
+ Path to the security master SQLite database.
107
+ publisher_name:
108
+ Vendor name stored in `publishers.name`. The dataset is derived from archive metadata.
109
+ symbol_type:
110
+ Symbol scheme stored in `symbology.symbol_type` for symbols found in `symbology.json`.
111
+ dataset:
112
+ Optional dataset override. If provided, it is used when `metadata.json` is missing or
113
+ does not specify a dataset.
114
+
115
+ Returns:
116
+ A tuple of (ohlcv_record_count_seen, symbology_record_count_seen).
117
+ """
118
+ ohlcv_count = 0
119
+ symbology_count = 0
120
+
121
+ logger.info("Opening Databento archive: %s", zip_path)
122
+
123
+ if not db_path.is_file():
124
+ raise FileNotFoundError(f"Security master DB not found: {db_path}")
125
+
126
+ con = sqlite3.connect(str(db_path))
127
+
128
+ try:
129
+ con.execute("PRAGMA foreign_keys = ON;")
130
+ _assert_secmaster_db(con)
131
+ _enable_bulk_loading(con)
132
+
133
+ with con:
134
+ with zipfile.ZipFile(zip_path, "r") as zf:
135
+ dataset, venue = _extract_dataset_info(zf, dataset_override=dataset)
136
+ logger.info(
137
+ "Publisher resolved: name=%s dataset=%s venue=%s",
138
+ publisher_name,
139
+ dataset,
140
+ venue,
141
+ )
142
+ publisher_id = _get_or_create_publisher(
143
+ con, publisher_name, dataset, venue
144
+ )
145
+
146
+ with tempfile.TemporaryDirectory() as tmpdir:
147
+ dbn_files = [
148
+ n
149
+ for n in zf.namelist()
150
+ if n.endswith(".dbn.zst") or n.endswith(".dbn")
151
+ ]
152
+ symbology_member = _zip_find_member(zf, "symbology.json")
153
+
154
+ if not dbn_files and symbology_member is None:
155
+ raise ValueError(
156
+ "Archive contains no DBN files and no symbology.json"
157
+ )
158
+
159
+ logger.info("Found %d DBN file(s) in archive", len(dbn_files))
160
+
161
+ for name in dbn_files:
162
+ extracted_path = _zip_member_to_tempfile(zf, name, tmpdir)
163
+ try:
164
+ logger.info("Ingesting DBN file: %s", extracted_path.name)
165
+ ohlcv_count += _ingest_dbn(
166
+ extracted_path, con, publisher_id
167
+ )
168
+ finally:
169
+ try:
170
+ extracted_path.unlink()
171
+ except FileNotFoundError:
172
+ pass
173
+
174
+ if symbology_member is not None:
175
+ symbology_path = _zip_member_to_tempfile(
176
+ zf, symbology_member, tmpdir
177
+ )
178
+ try:
179
+ logger.info("Ingesting symbology.json")
180
+ symbology_count += _ingest_symbology(
181
+ symbology_path,
182
+ con,
183
+ publisher_id,
184
+ symbol_type=symbol_type,
185
+ )
186
+ finally:
187
+ try:
188
+ symbology_path.unlink()
189
+ except FileNotFoundError:
190
+ pass
191
+ else:
192
+ logger.info("No symbology.json present in archive")
193
+ finally:
194
+ try:
195
+ _disable_bulk_loading(con)
196
+ finally:
197
+ con.close()
198
+
199
+ logger.info(
200
+ "Finished zip ingestion: %s (%d OHLCV records, %d symbology records)",
201
+ zip_path.name,
202
+ ohlcv_count,
203
+ symbology_count,
204
+ )
205
+
206
+ return ohlcv_count, symbology_count
207
+
208
+
209
+ def ingest_databento_dbn(
210
+ dbn_path: pathlib.Path,
211
+ db_path: pathlib.Path,
212
+ publisher_name: str = "databento",
213
+ ) -> int:
214
+ """
215
+ Ingest market data from a Databento DBN file into the security master database.
216
+
217
+ Reads OHLCV records from the DBN file and inserts them into `ohlcv`. The publisher namespace
218
+ is created if absent. Publisher identity is determined by the pair `(publisher_name, dataset)`,
219
+ where `dataset` is read from DBN metadata.
220
+
221
+ Ingestion is idempotent with respect to primary keys: existing bars are left unchanged.
222
+
223
+ Parameters:
224
+ dbn_path:
225
+ Path to the DBN file (.dbn or .dbn.zst).
226
+ db_path:
227
+ Path to the security master SQLite database.
228
+ publisher_name:
229
+ Vendor name stored in `publishers.name`. The dataset is derived from DBN metadata.
230
+
231
+ Returns:
232
+ The number of OHLCV records seen in the DBN stream.
233
+ """
234
+ logger.info("Starting DBN ingestion: %s", dbn_path)
235
+
236
+ if not db_path.is_file():
237
+ raise FileNotFoundError(f"Security master DB not found: {db_path}")
238
+
239
+ con = sqlite3.connect(str(db_path))
240
+
241
+ try:
242
+ con.execute("PRAGMA foreign_keys = ON;")
243
+ _assert_secmaster_db(con)
244
+ _enable_bulk_loading(con)
245
+
246
+ with con:
247
+ store = databento.DBNStore.from_file(dbn_path)
248
+ dataset = store.metadata.dataset
249
+ if not dataset:
250
+ raise ValueError(f"DBN metadata missing dataset: {dbn_path}")
251
+ venue = dataset.split(".")[0] if "." in dataset else None
252
+
253
+ logger.info(
254
+ "Publisher resolved: name=%s dataset=%s venue=%s",
255
+ publisher_name,
256
+ dataset,
257
+ venue,
258
+ )
259
+
260
+ publisher_id = _get_or_create_publisher(con, publisher_name, dataset, venue)
261
+ count = _ingest_dbn(dbn_path, con, publisher_id)
262
+ finally:
263
+ try:
264
+ _disable_bulk_loading(con)
265
+ finally:
266
+ con.close()
267
+
268
+ logger.info("Finished DBN ingestion: %s (%d OHLCV records)", dbn_path.name, count)
269
+
270
+ return count
271
+
272
+
273
+ def _extract_dataset_info(
274
+ zf: zipfile.ZipFile,
275
+ dataset_override: str | None = None,
276
+ ) -> tuple[str, str | None]:
277
+ metadata_member = _zip_find_member(zf, "metadata.json")
278
+ if metadata_member is None:
279
+ if dataset_override is None:
280
+ raise ValueError(
281
+ "Archive is missing metadata.json and no dataset override was provided"
282
+ )
283
+ dataset = dataset_override
284
+ else:
285
+ with zf.open(metadata_member) as f:
286
+ metadata = json.load(f)
287
+ dataset = metadata.get("query", {}).get("dataset")
288
+ if not dataset:
289
+ if dataset_override is None:
290
+ raise ValueError(
291
+ f"metadata.json is missing query.dataset (member={metadata_member!r})"
292
+ )
293
+ dataset = dataset_override
294
+
295
+ venue = dataset.split(".")[0] if "." in dataset else None
296
+ return dataset, venue
297
+
298
+
299
+ def _zip_find_member(
300
+ zf: zipfile.ZipFile,
301
+ basename: str,
302
+ allow_multiple: bool = False,
303
+ ) -> str | None:
304
+ candidates = [
305
+ name
306
+ for name in zf.namelist()
307
+ if name == basename or name.endswith("/" + basename)
308
+ ]
309
+ if not candidates:
310
+ return None
311
+ if len(candidates) == 1:
312
+ return candidates[0]
313
+
314
+ candidates = sorted(candidates)
315
+ if not allow_multiple:
316
+ raise ValueError(f"Multiple {basename} members found in archive: {candidates}")
317
+
318
+ selected = candidates[0]
319
+ logger.warning("Multiple %s found in archive; using %s", basename, selected)
320
+ return selected
321
+
322
+
323
+ def _zip_member_to_tempfile(
324
+ zf: zipfile.ZipFile,
325
+ member_name: str,
326
+ tmpdir: str,
327
+ ) -> pathlib.Path:
328
+ suffix = "".join(pathlib.PurePosixPath(member_name).suffixes)
329
+ with tempfile.NamedTemporaryFile(
330
+ mode="wb",
331
+ suffix=suffix,
332
+ delete=False,
333
+ dir=tmpdir,
334
+ ) as tmp:
335
+ with zf.open(member_name) as src:
336
+ shutil.copyfileobj(src, tmp)
337
+ return pathlib.Path(tmp.name)
338
+
339
+
340
+ def _get_or_create_publisher(
341
+ con: sqlite3.Connection,
342
+ name: str,
343
+ dataset: str,
344
+ venue: str | None,
345
+ ) -> int:
346
+ cursor = con.cursor()
347
+ cursor.execute(
348
+ "SELECT publisher_id FROM publishers WHERE name = ? AND dataset = ?",
349
+ (name, dataset),
350
+ )
351
+ row = cursor.fetchone()
352
+ if row:
353
+ return row[0]
354
+
355
+ cursor.execute(
356
+ "INSERT INTO publishers (name, dataset, venue) VALUES (?, ?, ?)",
357
+ (name, dataset, venue),
358
+ )
359
+ return cursor.lastrowid # type: ignore[return-value]
360
+
361
+
362
+ def _get_or_create_instrument(
363
+ con: sqlite3.Connection,
364
+ publisher_id: int,
365
+ source_instrument_id: int,
366
+ ) -> int:
367
+ cursor = con.cursor()
368
+ cursor.execute(
369
+ "SELECT instrument_id FROM instruments WHERE publisher_ref = ? AND source_instrument_id = ?",
370
+ (publisher_id, source_instrument_id),
371
+ )
372
+ row = cursor.fetchone()
373
+ if row:
374
+ return row[0]
375
+
376
+ cursor.execute(
377
+ "INSERT INTO instruments (publisher_ref, source_instrument_id) VALUES (?, ?)",
378
+ (publisher_id, source_instrument_id),
379
+ )
380
+ return cursor.lastrowid # type: ignore[return-value]
381
+
382
+
383
+ def _assert_secmaster_db(
384
+ con: sqlite3.Connection, expected_user_version: int = 1
385
+ ) -> None:
386
+ row = con.execute("PRAGMA user_version;").fetchone()
387
+ user_version = int(row[0]) if row else 0
388
+ if user_version != expected_user_version:
389
+ raise sqlite3.DatabaseError(
390
+ "Security master schema user_version="
391
+ f"{user_version} does not match expected {expected_user_version}"
392
+ )
393
+
394
+ required = {"publishers", "instruments", "ohlcv", "symbology"}
395
+ present = {
396
+ r[0]
397
+ for r in con.execute(
398
+ "SELECT name FROM sqlite_master WHERE type = 'table'"
399
+ ).fetchall()
400
+ }
401
+ missing = sorted(required - present)
402
+ if missing:
403
+ raise sqlite3.DatabaseError(
404
+ f"Security master schema missing required tables: {', '.join(missing)}"
405
+ )
406
+
407
+
408
+ def _ingest_dbn(
409
+ dbn_path: pathlib.Path,
410
+ con: sqlite3.Connection,
411
+ publisher_id: int,
412
+ ) -> int:
413
+ store = databento.DBNStore.from_file(dbn_path)
414
+ cursor = con.cursor()
415
+
416
+ instrument_cache: dict[int, int] = {}
417
+ batch: list[tuple] = []
418
+ count = 0
419
+
420
+ logger.info("Streaming OHLCV records from: %s", dbn_path.name)
421
+
422
+ for record in store:
423
+ if not isinstance(record, databento.OHLCVMsg):
424
+ continue
425
+
426
+ source_id = record.instrument_id
427
+ if source_id not in instrument_cache:
428
+ instrument_cache[source_id] = _get_or_create_instrument(
429
+ con, publisher_id, source_id
430
+ )
431
+ internal_id = instrument_cache[source_id]
432
+
433
+ rtype_val = (
434
+ record.rtype.value if hasattr(record.rtype, "value") else record.rtype
435
+ )
436
+
437
+ batch.append(
438
+ (
439
+ internal_id,
440
+ rtype_val,
441
+ record.ts_event,
442
+ record.open,
443
+ record.high,
444
+ record.low,
445
+ record.close,
446
+ record.volume,
447
+ )
448
+ )
449
+ count += 1
450
+
451
+ if count % LOG_EVERY_OHLCV == 0:
452
+ logger.info("Ingested %d OHLCV records from %s", count, dbn_path.name)
453
+
454
+ if len(batch) >= BATCH_SIZE:
455
+ cursor.executemany(
456
+ "INSERT OR IGNORE INTO ohlcv "
457
+ "(instrument_id, rtype, ts_event, open, high, low, close, volume) "
458
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
459
+ batch,
460
+ )
461
+ batch.clear()
462
+
463
+ if batch:
464
+ cursor.executemany(
465
+ "INSERT OR IGNORE INTO ohlcv "
466
+ "(instrument_id, rtype, ts_event, open, high, low, close, volume) "
467
+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?)",
468
+ batch,
469
+ )
470
+
471
+ logger.info("Completed OHLCV ingest from %s (%d records)", dbn_path.name, count)
472
+
473
+ return count
474
+
475
+
476
+ def _ingest_symbology(
477
+ json_path: pathlib.Path,
478
+ con: sqlite3.Connection,
479
+ publisher_id: int,
480
+ symbol_type: str = "raw_symbol",
481
+ ) -> int:
482
+ if not isinstance(symbol_type, str) or not symbol_type:
483
+ raise ValueError("symbol_type must be a non-empty string")
484
+
485
+ with open(json_path, "r") as f:
486
+ data = json.load(f)
487
+
488
+ if not isinstance(data, dict):
489
+ raise ValueError("symbology.json root must be a JSON object")
490
+
491
+ result = data.get("result", {})
492
+ if not isinstance(result, dict):
493
+ raise ValueError("symbology.json['result'] must be an object")
494
+ cursor = con.cursor()
495
+
496
+ batch: list[tuple] = []
497
+ count = 0
498
+
499
+ logger.info("Streaming symbology mappings from: %s", json_path.name)
500
+
501
+ instrument_cache: set[int] = set()
502
+
503
+ for symbol, mappings in result.items():
504
+ if not isinstance(mappings, list):
505
+ raise ValueError(
506
+ f"symbology.json mappings must be a list for symbol={symbol!r}"
507
+ )
508
+
509
+ for i, mapping in enumerate(mappings):
510
+ if not isinstance(mapping, dict):
511
+ raise ValueError(
512
+ f"symbology.json mapping must be an object at symbol={symbol!r} index={i}"
513
+ )
514
+
515
+ missing_keys = [k for k in ("s", "d0", "d1") if k not in mapping]
516
+ if missing_keys:
517
+ raise ValueError(
518
+ "symbology.json mapping missing key(s) "
519
+ f"{missing_keys} at symbol={symbol!r} index={i}"
520
+ )
521
+
522
+ source_id = int(mapping["s"])
523
+
524
+ if source_id not in instrument_cache:
525
+ _get_or_create_instrument(con, publisher_id, source_id)
526
+ instrument_cache.add(source_id)
527
+
528
+ batch.append(
529
+ (
530
+ publisher_id,
531
+ symbol,
532
+ symbol_type,
533
+ source_id,
534
+ mapping["d0"],
535
+ mapping["d1"],
536
+ )
537
+ )
538
+ count += 1
539
+
540
+ if count % LOG_EVERY_SYMBOLOGY == 0:
541
+ logger.info(
542
+ "Ingested %d symbology mappings from %s", count, json_path.name
543
+ )
544
+
545
+ if len(batch) >= BATCH_SIZE:
546
+ cursor.executemany(
547
+ "INSERT OR IGNORE INTO symbology "
548
+ "(publisher_ref, symbol, symbol_type, source_instrument_id, start_date, end_date) "
549
+ "VALUES (?, ?, ?, ?, ?, ?)",
550
+ batch,
551
+ )
552
+ batch.clear()
553
+
554
+ if batch:
555
+ cursor.executemany(
556
+ "INSERT OR IGNORE INTO symbology "
557
+ "(publisher_ref, symbol, symbol_type, source_instrument_id, start_date, end_date) "
558
+ "VALUES (?, ?, ?, ?, ?, ?)",
559
+ batch,
560
+ )
561
+
562
+ _validate_no_overlapping_symbology(con, publisher_id, symbol_type)
563
+
564
+ logger.info(
565
+ "Completed symbology ingest from %s (%d mappings)", json_path.name, count
566
+ )
567
+
568
+ return count
569
+
570
+
571
+ def _validate_no_overlapping_symbology(
572
+ con: sqlite3.Connection,
573
+ publisher_id: int,
574
+ symbol_type: str,
575
+ ) -> None:
576
+ query = """
577
+ WITH ordered AS (
578
+ SELECT
579
+ symbol,
580
+ start_date,
581
+ end_date,
582
+ LEAD(start_date) OVER (
583
+ PARTITION BY symbol ORDER BY start_date
584
+ ) AS next_start
585
+ FROM symbology
586
+ WHERE publisher_ref = ? AND symbol_type = ?
587
+ )
588
+ SELECT symbol, start_date, end_date, next_start
589
+ FROM ordered
590
+ WHERE next_start IS NOT NULL AND end_date > next_start
591
+ LIMIT 1
592
+ """
593
+ row = con.execute(query, (publisher_id, symbol_type)).fetchone()
594
+ if row:
595
+ symbol, start, end, next_start = row
596
+ raise ValueError(
597
+ f"Overlapping symbology detected for symbol={symbol!r}: "
598
+ f"segment [{start}, {end}] overlaps with next segment starting {next_start}"
599
+ )
600
+
601
+
602
+ def _enable_bulk_loading(con: sqlite3.Connection) -> None:
603
+ con.execute("PRAGMA journal_mode = WAL")
604
+ con.execute("PRAGMA synchronous = NORMAL")
605
+ con.execute("PRAGMA cache_size = -64000")
606
+
607
+
608
+ def _disable_bulk_loading(con: sqlite3.Connection) -> None:
609
+ con.execute("PRAGMA synchronous = FULL")
610
+ con.execute("PRAGMA journal_mode = DELETE")
611
+ con.execute("PRAGMA cache_size = -2000")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: onesecondtrader
3
- Version: 0.52.0
3
+ Version: 0.54.0
4
4
  Summary: The Trading Infrastructure Toolkit for Python. Research, simulate, and deploy algorithmic trading strategies — all in one place.
5
5
  License-File: LICENSE
6
6
  Author: Nils P. Kujath
@@ -2,6 +2,9 @@ onesecondtrader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  onesecondtrader/brokers/__init__.py,sha256=CmOhwKOayuYCeg5KRiTp4fc8nSDnsLzIBkUNWhUevlo,271
3
3
  onesecondtrader/brokers/base.py,sha256=I4tQFr7P1DF5QAWb3I9tHz5D_zTleH8vEXS2WsH55DE,3531
4
4
  onesecondtrader/brokers/simulated.py,sha256=ZY39a84J2BmC2ADMkrSRzNBumPXudVBz2eUSnnHb0LM,17930
5
+ onesecondtrader/datafeeds/__init__.py,sha256=uu67phyj4ruIlMqrsbFQ_mP5u2we881WgQoOhuN_KvU,214
6
+ onesecondtrader/datafeeds/base.py,sha256=MOSUCuVfzPpqGf_T7O2aW64m9H2J4oa5u3VClQwZASE,3089
7
+ onesecondtrader/datafeeds/simulated.py,sha256=LjPJU6RTt7i9e0pE0YSrtjmGLAQOGmmIpWSQMS9Dh6g,7840
5
8
  onesecondtrader/events/__init__.py,sha256=1T7hJA6afxClEXvvnbXtHu9iMyhduRdJZWlg4ObWaKE,222
6
9
  onesecondtrader/events/base.py,sha256=WpLo1bSKJe7Poh2IuDDCiBYZo9vE8mkq3cQlUpyTXsY,850
7
10
  onesecondtrader/events/market/__init__.py,sha256=49z6maexBIDkAjIfkLbYzSZWEbyTpQ_HEEgT0eacrDo,132
@@ -34,11 +37,11 @@ onesecondtrader/models/bar_period.py,sha256=J8ncVtcAxR52uD0nbC8Knds_GUP5wiuNj5rA
34
37
  onesecondtrader/models/order_types.py,sha256=SiJamarLQ7zkHzHLLbd86I_TeZrQJ4QEIMqNHj4dxXU,737
35
38
  onesecondtrader/models/rejection_reasons.py,sha256=Avp1JYf413_aUQQkEeswI-9EJBmQdd7B6bnQ1MslDNE,2132
36
39
  onesecondtrader/models/trade_sides.py,sha256=Pf9BpxoUxqgKC_EKAExfSqgfIIK9NW-RpJES0XHRF-8,583
37
- onesecondtrader/secmaster/__init__.py,sha256=miFKJ50zz3STh2HktAjYX8fPD8mzgWipr2EeiZfKdoM,95
40
+ onesecondtrader/secmaster/__init__.py,sha256=XAouFrbRTpWWp8U43LQUkj8EZvJR9ydlI9fVdJjH1BY,294
38
41
  onesecondtrader/secmaster/schema_versions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
- onesecondtrader/secmaster/schema_versions/secmaster_schema_v1.sql,sha256=-IeHE-nO0j8hJfgvaN07_0o4zoZ2oCsvSTncCUbrfvk,11540
40
- onesecondtrader/secmaster/utils.py,sha256=kh7djz1o8Yxl2lSMqu1oqWYs7l8_FNMaDb_8yvItIbQ,2172
41
- onesecondtrader-0.52.0.dist-info/METADATA,sha256=gKzl22R5cIZ8laToY-fDdURwaMDr4udUpNc3aBsVb3o,9951
42
- onesecondtrader-0.52.0.dist-info/WHEEL,sha256=3ny-bZhpXrU6vSQ1UPG34FoxZBp3lVcvK0LkgUz6VLk,88
43
- onesecondtrader-0.52.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
44
- onesecondtrader-0.52.0.dist-info/RECORD,,
42
+ onesecondtrader/secmaster/schema_versions/secmaster_schema_v1.sql,sha256=E41rVhpYlXiC_GR4cw1bNQW_8Fdy8d-s1RJASIUCijM,12974
43
+ onesecondtrader/secmaster/utils.py,sha256=d8PMSNLWVr10G0CSdL9vF-j_9jTfvOLxC-6k42x6LRU,19587
44
+ onesecondtrader-0.54.0.dist-info/METADATA,sha256=iVpo6U1CIhN_NOYh1yzyqq25GVcadtP26VL93M6NehY,9951
45
+ onesecondtrader-0.54.0.dist-info/WHEEL,sha256=kJCRJT_g0adfAJzTx2GUMmS80rTJIVHRCfG0DQgLq3o,88
46
+ onesecondtrader-0.54.0.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
47
+ onesecondtrader-0.54.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 2.3.0
2
+ Generator: poetry-core 2.3.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any