onetick-py 1.176.0__py3-none-any.whl → 1.179.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- onetick/py/__init__.py +1 -1
- onetick/py/_version.py +1 -1
- onetick/py/aggregations/_docs.py +10 -0
- onetick/py/aggregations/order_book.py +18 -6
- onetick/py/compatibility.py +88 -18
- onetick/py/core/_source/source_methods/misc.py +73 -13
- onetick/py/core/_source/tmp_otq.py +8 -0
- onetick/py/core/source.py +25 -3
- onetick/py/db/_inspection.py +113 -50
- onetick/py/oqd/sources.py +22 -8
- onetick/py/run.py +9 -5
- onetick/py/sources/__init__.py +1 -0
- onetick/py/sources/dataframe.py +370 -0
- onetick/py/sources/symbols.py +4 -6
- onetick/py/sources/ticks.py +10 -2
- onetick/py/types.py +36 -0
- onetick/py/utils/__init__.py +1 -0
- onetick/py/utils/debug.py +17 -0
- onetick/py/utils/render_cli.py +88 -0
- {onetick_py-1.176.0.dist-info → onetick_py-1.179.0.dist-info}/METADATA +2 -1
- {onetick_py-1.176.0.dist-info → onetick_py-1.179.0.dist-info}/RECORD +25 -22
- {onetick_py-1.176.0.dist-info → onetick_py-1.179.0.dist-info}/entry_points.txt +1 -0
- {onetick_py-1.176.0.dist-info → onetick_py-1.179.0.dist-info}/WHEEL +0 -0
- {onetick_py-1.176.0.dist-info → onetick_py-1.179.0.dist-info}/licenses/LICENSE +0 -0
- {onetick_py-1.176.0.dist-info → onetick_py-1.179.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,370 @@
|
|
|
1
|
+
from typing import Optional, Tuple
|
|
2
|
+
|
|
3
|
+
import onetick.py as otp
|
|
4
|
+
from onetick.py.otq import otq
|
|
5
|
+
import pandas as pd
|
|
6
|
+
|
|
7
|
+
from onetick.py.core.source import Source
|
|
8
|
+
|
|
9
|
+
from .. import types as ott
|
|
10
|
+
from .. import utils
|
|
11
|
+
|
|
12
|
+
from .common import update_node_tick_type
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class _ReadFromDataFrameSource(Source):
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
dataframe=None,
|
|
19
|
+
timestamp_column=None,
|
|
20
|
+
symbol_name_field=None,
|
|
21
|
+
symbol_value=None,
|
|
22
|
+
symbol=utils.adaptive,
|
|
23
|
+
db=utils.adaptive_to_default,
|
|
24
|
+
tick_type=utils.adaptive,
|
|
25
|
+
start=utils.adaptive,
|
|
26
|
+
end=utils.adaptive,
|
|
27
|
+
schema=None,
|
|
28
|
+
**kwargs,
|
|
29
|
+
):
|
|
30
|
+
if self._try_default_constructor(**kwargs):
|
|
31
|
+
return
|
|
32
|
+
|
|
33
|
+
if schema is None:
|
|
34
|
+
schema = {}
|
|
35
|
+
|
|
36
|
+
super().__init__(
|
|
37
|
+
_symbols=symbol,
|
|
38
|
+
_start=start,
|
|
39
|
+
_end=end,
|
|
40
|
+
_base_ep_func=lambda: self.base_ep(
|
|
41
|
+
dataframe=dataframe,
|
|
42
|
+
timestamp_column=timestamp_column,
|
|
43
|
+
symbol_name_field=symbol_name_field,
|
|
44
|
+
symbol_value=symbol_value,
|
|
45
|
+
db=db,
|
|
46
|
+
tick_type=tick_type,
|
|
47
|
+
columns=schema,
|
|
48
|
+
),
|
|
49
|
+
schema=schema,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
def base_ep(
|
|
53
|
+
self,
|
|
54
|
+
dataframe,
|
|
55
|
+
timestamp_column=None,
|
|
56
|
+
symbol_name_field=None,
|
|
57
|
+
symbol_value=None,
|
|
58
|
+
db=utils.adaptive_to_default,
|
|
59
|
+
tick_type=utils.adaptive,
|
|
60
|
+
columns=None,
|
|
61
|
+
):
|
|
62
|
+
if columns is None:
|
|
63
|
+
columns = {}
|
|
64
|
+
|
|
65
|
+
if symbol_value is None:
|
|
66
|
+
symbol_value = ''
|
|
67
|
+
|
|
68
|
+
if symbol_name_field is None:
|
|
69
|
+
symbol_name_field = ''
|
|
70
|
+
|
|
71
|
+
if timestamp_column:
|
|
72
|
+
temp_column_name = f'__TMP_TS_COLUMN__{timestamp_column}__'
|
|
73
|
+
dataframe = dataframe.rename(columns={timestamp_column: temp_column_name})
|
|
74
|
+
timestamp_column = temp_column_name
|
|
75
|
+
|
|
76
|
+
src = Source(
|
|
77
|
+
otq.ReadFromDataFrame(
|
|
78
|
+
dataframe=dataframe,
|
|
79
|
+
symbol_name_field=symbol_name_field,
|
|
80
|
+
symbol_value=symbol_value,
|
|
81
|
+
).get_data_file_ep(),
|
|
82
|
+
schema=columns,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
update_node_tick_type(src, tick_type, db)
|
|
86
|
+
|
|
87
|
+
if timestamp_column:
|
|
88
|
+
# DATA_FILE_QUERY process timestamps as GMT timezone
|
|
89
|
+
# In order to process timestamps with query TZ we store timestamps as strings
|
|
90
|
+
# and process them after this EP
|
|
91
|
+
src.sink(
|
|
92
|
+
otq.UpdateField(
|
|
93
|
+
field="TIMESTAMP",
|
|
94
|
+
value=f'parse_nsectime("%Y-%m-%d %H:%M:%S.%J", {timestamp_column}, _TIMEZONE)',
|
|
95
|
+
allow_unordered_output_times=True,
|
|
96
|
+
)
|
|
97
|
+
)
|
|
98
|
+
src.sink(otq.Passthrough(fields=timestamp_column, drop_fields=True))
|
|
99
|
+
|
|
100
|
+
return src
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def _get_offsets(dataframe, timestamp_column) -> Tuple[otp.datetime, list]:
|
|
104
|
+
offsets = [ott.timedelta(0)]
|
|
105
|
+
base_ts = pd.Timestamp(dataframe[timestamp_column][0])
|
|
106
|
+
|
|
107
|
+
for i in range(1, len(dataframe)):
|
|
108
|
+
diff: pd.Timedelta = pd.Timestamp(dataframe[timestamp_column][i]) - base_ts
|
|
109
|
+
offsets.append(ott.timedelta(**diff.components._asdict()))
|
|
110
|
+
|
|
111
|
+
return ott.datetime(base_ts), offsets
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _fallback_source(
|
|
115
|
+
dataframe,
|
|
116
|
+
timestamp_column=None,
|
|
117
|
+
symbol_value=None,
|
|
118
|
+
symbol_name_field=None,
|
|
119
|
+
db=utils.adaptive_to_default,
|
|
120
|
+
tick_type=utils.adaptive,
|
|
121
|
+
):
|
|
122
|
+
rows_num = len(dataframe)
|
|
123
|
+
if symbol_value:
|
|
124
|
+
dataframe['SYMBOL_NAME'] = symbol_value
|
|
125
|
+
|
|
126
|
+
if symbol_name_field:
|
|
127
|
+
dataframe['SYMBOL_NAME'] = dataframe[symbol_name_field]
|
|
128
|
+
|
|
129
|
+
data = dataframe.to_dict(orient='list')
|
|
130
|
+
|
|
131
|
+
ticks_kwargs = {}
|
|
132
|
+
if timestamp_column:
|
|
133
|
+
if rows_num > 0:
|
|
134
|
+
base_ts, offsets = _get_offsets(dataframe, timestamp_column)
|
|
135
|
+
ticks_kwargs['offset'] = offsets
|
|
136
|
+
ticks_kwargs['start'] = base_ts
|
|
137
|
+
|
|
138
|
+
# remove original timestamp
|
|
139
|
+
del data[timestamp_column]
|
|
140
|
+
|
|
141
|
+
ts_column_mapping = []
|
|
142
|
+
save_ts_column_list = [
|
|
143
|
+
col for col in data.keys()
|
|
144
|
+
if col and col.lower() == 'timestamp'
|
|
145
|
+
]
|
|
146
|
+
if save_ts_column_list:
|
|
147
|
+
# For some reason OneTick CSV_FILE_LISTING doesn't like timestamp columns with any case
|
|
148
|
+
|
|
149
|
+
for idx, col in enumerate(save_ts_column_list):
|
|
150
|
+
_mapping = (f'__TMP_TIMESTAMP_COLUMN_{idx}__', col)
|
|
151
|
+
ts_column_mapping.append(_mapping)
|
|
152
|
+
data[_mapping[0]] = data[_mapping[1]]
|
|
153
|
+
del data[_mapping[1]]
|
|
154
|
+
|
|
155
|
+
src = otp.Ticks(data=data, db=db, tick_type=tick_type, **ticks_kwargs)
|
|
156
|
+
|
|
157
|
+
if symbol_name_field:
|
|
158
|
+
src.drop(['SYMBOL_NAME'], inplace=True)
|
|
159
|
+
|
|
160
|
+
if not timestamp_column:
|
|
161
|
+
src['Time'] = otp.meta_fields.end_time
|
|
162
|
+
|
|
163
|
+
if ts_column_mapping:
|
|
164
|
+
for _mapping in ts_column_mapping:
|
|
165
|
+
src.rename({_mapping[0]: _mapping[1]}, inplace=True)
|
|
166
|
+
|
|
167
|
+
return src
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _autodetect_timestamp_column(dataframe: pd.DataFrame) -> Optional[str]:
|
|
171
|
+
timestamp_columns = [col for col in dataframe.columns if col.lower() in ['time', 'timestamp']]
|
|
172
|
+
if len(timestamp_columns) > 1:
|
|
173
|
+
raise ValueError(
|
|
174
|
+
'Could not determine timestamp column from multiple available choices: ' + ', '.join(timestamp_columns)
|
|
175
|
+
)
|
|
176
|
+
elif len(timestamp_columns) == 1:
|
|
177
|
+
return timestamp_columns[0]
|
|
178
|
+
|
|
179
|
+
return None
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def ReadFromDataFrame(
|
|
183
|
+
dataframe=None,
|
|
184
|
+
timestamp_column=utils.adaptive,
|
|
185
|
+
symbol_name_field=None,
|
|
186
|
+
symbol=utils.adaptive,
|
|
187
|
+
db=utils.adaptive_to_default,
|
|
188
|
+
tick_type=utils.adaptive,
|
|
189
|
+
start=utils.adaptive,
|
|
190
|
+
end=utils.adaptive,
|
|
191
|
+
force_compatibility_mode=False,
|
|
192
|
+
**kwargs,
|
|
193
|
+
):
|
|
194
|
+
"""
|
|
195
|
+
Load :pandas:`pandas.DataFrame` as data source
|
|
196
|
+
|
|
197
|
+
Parameters
|
|
198
|
+
----------
|
|
199
|
+
dataframe: :pandas:`pandas.DataFrame`
|
|
200
|
+
Pandas DataFrame to load.
|
|
201
|
+
timestamp_column: str, optional
|
|
202
|
+
Column containing time info.
|
|
203
|
+
|
|
204
|
+
If parameter not set and DataFrame has one of columns ``TIME`` or ``Timestamp`` (case-insensitive),
|
|
205
|
+
it will be automatically used as ``timestamp_column``. To disable this, set ``timestamp_column=None``.
|
|
206
|
+
|
|
207
|
+
Timestamp column dtype should be either datetime related or string.
|
|
208
|
+
symbol_name_field: str, optional
|
|
209
|
+
Column containing symbol name.
|
|
210
|
+
symbol: str
|
|
211
|
+
Symbol(s) from which data should be taken.
|
|
212
|
+
|
|
213
|
+
If both `symbol_name_field` and `symbol` are omitted
|
|
214
|
+
:py:attr:`otp.config.default_symbol<onetick.py.configuration.Config.default_symbol>` value will be used.
|
|
215
|
+
db: str
|
|
216
|
+
Custom database name for the node of the graph.
|
|
217
|
+
tick_type: str
|
|
218
|
+
Tick type.
|
|
219
|
+
Default: ANY.
|
|
220
|
+
start: :py:class:`otp.datetime <onetick.py.datetime>`
|
|
221
|
+
Custom start time of the query.
|
|
222
|
+
end: :py:class:`otp.datetime <onetick.py.datetime>`
|
|
223
|
+
Custom end time of the query.
|
|
224
|
+
force_compatibility_mode: bool
|
|
225
|
+
Force use of old dataframe load method
|
|
226
|
+
|
|
227
|
+
Examples
|
|
228
|
+
--------
|
|
229
|
+
|
|
230
|
+
Let's assume that we have the following pandas dataframe:
|
|
231
|
+
|
|
232
|
+
>>> print(dataframe) # doctest: +SKIP
|
|
233
|
+
Timestamp SIDE PRICE SIZE
|
|
234
|
+
0 2024-01-01 12:00:00.001 BUY 50.05 100
|
|
235
|
+
1 2024-01-01 12:00:02.000 SELL 50.05 150
|
|
236
|
+
2 2024-01-01 12:00:02.500 BUY 49.95 200
|
|
237
|
+
3 2024-01-01 12:00:03.100 SELL 49.98 80
|
|
238
|
+
4 2024-01-01 12:00:03.250 BUY 50.02 250
|
|
239
|
+
|
|
240
|
+
Simple dataframe loading, timestamp column will be automatically detected and converted to datetime:
|
|
241
|
+
|
|
242
|
+
>>> src = otp.ReadFromDataFrame(dataframe, symbol='AAPL') # doctest: +SKIP
|
|
243
|
+
>>> otp.run(src, date=otp.date(2024, 1, 1)) # doctest: +SKIP
|
|
244
|
+
Time SIDE PRICE SIZE SYMBOL_NAME
|
|
245
|
+
0 2024-01-01 12:00:00.001 BUY 50.05 100 AAPL
|
|
246
|
+
1 2024-01-01 12:00:02.000 SELL 50.05 150 AAPL
|
|
247
|
+
2 2024-01-01 12:00:02.500 BUY 49.95 200 AAPL
|
|
248
|
+
3 2024-01-01 12:00:03.100 SELL 49.98 80 AAPL
|
|
249
|
+
4 2024-01-01 12:00:03.250 BUY 50.02 250 AAPL
|
|
250
|
+
|
|
251
|
+
Setting custom `timestamp_column`. For example, if we have ``DATA_TIMES`` column, instead of ``Timestamps``
|
|
252
|
+
|
|
253
|
+
>>> src = otp.ReadFromDataFrame(dataframe, symbol='AAPL', timestamp_column='DATA_TIMES') # doctest: +SKIP
|
|
254
|
+
>>> otp.run(src, date=otp.date(2024, 1, 1)) # doctest: +SKIP
|
|
255
|
+
Time SIDE PRICE SIZE SYMBOL_NAME
|
|
256
|
+
0 2024-01-01 12:00:00.001 BUY 50.05 100 AAPL
|
|
257
|
+
1 2024-01-01 12:00:02.000 SELL 50.05 150 AAPL
|
|
258
|
+
2 2024-01-01 12:00:02.500 BUY 49.95 200 AAPL
|
|
259
|
+
3 2024-01-01 12:00:03.100 SELL 49.98 80 AAPL
|
|
260
|
+
4 2024-01-01 12:00:03.250 BUY 50.02 250 AAPL
|
|
261
|
+
|
|
262
|
+
You can load data even without time data. ``Time`` column will be set as query end time.
|
|
263
|
+
|
|
264
|
+
>>> src = otp.ReadFromDataFrame(dataframe, symbol='AAPL') # doctest: +SKIP
|
|
265
|
+
>>> otp.run(src, date=otp.date(2024, 1, 1)) # doctest: +SKIP
|
|
266
|
+
Time SIDE PRICE SIZE SYMBOL_NAME
|
|
267
|
+
0 2024-01-02 BUY 50.05 100 AAPL
|
|
268
|
+
1 2024-01-02 SELL 50.05 150 AAPL
|
|
269
|
+
2 2024-01-02 BUY 49.95 200 AAPL
|
|
270
|
+
3 2024-01-02 SELL 49.98 80 AAPL
|
|
271
|
+
4 2024-01-02 BUY 50.02 250 AAPL
|
|
272
|
+
|
|
273
|
+
Same effect will be if you don't set ``timestamp_column`` and disable automatic timestamp detection:
|
|
274
|
+
|
|
275
|
+
>>> src = otp.ReadFromDataFrame(dataframe, symbol='AAPL', timestamp_column=None) # doctest: +SKIP
|
|
276
|
+
>>> otp.run(src, date=otp.date(2024, 1, 1)) # doctest: +SKIP
|
|
277
|
+
Time Timestamp SIDE PRICE SIZE SYMBOL_NAME
|
|
278
|
+
0 2024-01-02 2024-01-01 12:00:00.001 BUY 50.05 100 AAPL
|
|
279
|
+
1 2024-01-02 2024-01-01 12:00:02.000 SELL 50.05 150 AAPL
|
|
280
|
+
2 2024-01-02 2024-01-01 12:00:02.500 BUY 49.95 200 AAPL
|
|
281
|
+
3 2024-01-02 2024-01-01 12:00:03.100 SELL 49.98 80 AAPL
|
|
282
|
+
4 2024-01-02 2024-01-01 12:00:03.250 BUY 50.02 250 AAPL
|
|
283
|
+
|
|
284
|
+
Setting ``symbol_name_field`` for setting symbol name from dataframe.
|
|
285
|
+
In this example, let's say, that we have column ``SYMBOL`` with symbol names.
|
|
286
|
+
|
|
287
|
+
>>> src = otp.ReadFromDataFrame(dataframe, symbol_name_field='SYMBOL') # doctest: +SKIP
|
|
288
|
+
>>> otp.run(src, date=otp.date(2024, 1, 1)) # doctest: +SKIP
|
|
289
|
+
Time SIDE PRICE SIZE SYMBOL
|
|
290
|
+
0 2024-01-01 12:00:00.001 BUY 50.05 100 AAPL
|
|
291
|
+
1 2024-01-01 12:00:02.000 SELL 50.05 150 AAPL
|
|
292
|
+
2 2024-01-01 12:00:02.500 BUY 49.95 200 AAPL
|
|
293
|
+
3 2024-01-01 12:00:03.100 SELL 49.98 80 AAPL
|
|
294
|
+
4 2024-01-01 12:00:03.250 BUY 50.02 250 AAPL
|
|
295
|
+
"""
|
|
296
|
+
if dataframe is None:
|
|
297
|
+
raise ValueError('DataFrame should be passed to `ReadFromDataFrame` constructor')
|
|
298
|
+
|
|
299
|
+
if not isinstance(dataframe, pd.DataFrame):
|
|
300
|
+
raise ValueError(f'`dataframe` parameter expected to be pandas DataFrame, got `{type(dataframe)}`')
|
|
301
|
+
|
|
302
|
+
dataframe = dataframe.copy(deep=True)
|
|
303
|
+
|
|
304
|
+
if ('TIMESTAMP' in dataframe.columns or 'Time' in dataframe.columns) and not (
|
|
305
|
+
timestamp_column in ['TIMESTAMP', 'Time'] or timestamp_column is utils.adaptive
|
|
306
|
+
):
|
|
307
|
+
# Can't set meta fields
|
|
308
|
+
raise ValueError(
|
|
309
|
+
'It\'s not allowed to both have `TIMESTAMP` or `Time` column in DataFrame '
|
|
310
|
+
'and pass `timestamp_column` parameter with different column '
|
|
311
|
+
'or disable timestamp column autodetection'
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
if timestamp_column is utils.adaptive:
|
|
315
|
+
timestamp_column = _autodetect_timestamp_column(dataframe)
|
|
316
|
+
|
|
317
|
+
if timestamp_column:
|
|
318
|
+
if timestamp_column not in dataframe.columns:
|
|
319
|
+
raise ValueError(f'Column `{timestamp_column}` passed as `timestamp_column` parameter not in dataframe')
|
|
320
|
+
|
|
321
|
+
if ott.np2type(dataframe[timestamp_column].dtype) in [ott.nsectime, ott.msectime]:
|
|
322
|
+
# convert back to string
|
|
323
|
+
dataframe[timestamp_column] = (dataframe[timestamp_column].dt.strftime('%Y-%m-%d %H:%M:%S.%f') +
|
|
324
|
+
dataframe[timestamp_column].dt.nanosecond.astype(str).str.zfill(3))
|
|
325
|
+
|
|
326
|
+
columns = {}
|
|
327
|
+
for column, dtype in dataframe.dtypes.to_dict().items():
|
|
328
|
+
if timestamp_column == column:
|
|
329
|
+
continue
|
|
330
|
+
else:
|
|
331
|
+
dtype = ott.np2type(dtype)
|
|
332
|
+
|
|
333
|
+
columns[column] = dtype
|
|
334
|
+
|
|
335
|
+
if not symbol_name_field and symbol is utils.adaptive:
|
|
336
|
+
symbol = otp.config.get('default_symbol')
|
|
337
|
+
|
|
338
|
+
symbol_value = None
|
|
339
|
+
if symbol is not utils.adaptive:
|
|
340
|
+
if symbol_name_field:
|
|
341
|
+
raise ValueError('`symbol_name_field` parameter is passed while `symbol` parameter is defined')
|
|
342
|
+
|
|
343
|
+
symbol_value = symbol
|
|
344
|
+
|
|
345
|
+
# otq.ReadFromDataFrame adds this column in this case
|
|
346
|
+
columns['SYMBOL_NAME'] = str
|
|
347
|
+
elif symbol_name_field and symbol_name_field not in dataframe.columns:
|
|
348
|
+
raise ValueError(f'Column `{symbol_name_field}` passed as `symbol_name_field` parameter not in dataframe')
|
|
349
|
+
|
|
350
|
+
if hasattr(otq, 'ReadFromDataFrame') and not force_compatibility_mode:
|
|
351
|
+
return _ReadFromDataFrameSource(
|
|
352
|
+
dataframe=dataframe,
|
|
353
|
+
timestamp_column=timestamp_column,
|
|
354
|
+
symbol_name_field=symbol_name_field,
|
|
355
|
+
symbol_value=symbol_value,
|
|
356
|
+
symbol=symbol,
|
|
357
|
+
db=db,
|
|
358
|
+
tick_type=tick_type,
|
|
359
|
+
start=start,
|
|
360
|
+
end=end,
|
|
361
|
+
schema=columns,
|
|
362
|
+
**kwargs,
|
|
363
|
+
)
|
|
364
|
+
else:
|
|
365
|
+
return _fallback_source(
|
|
366
|
+
dataframe=dataframe,
|
|
367
|
+
timestamp_column=timestamp_column,
|
|
368
|
+
symbol_name_field=symbol_name_field,
|
|
369
|
+
symbol_value=symbol_value,
|
|
370
|
+
)
|
onetick/py/sources/symbols.py
CHANGED
|
@@ -27,7 +27,9 @@ class Symbols(Source):
|
|
|
27
27
|
Name of the database where to search symbols.
|
|
28
28
|
By default the database used by :py:func:`otp.run <onetick.py.run>` will be inherited.
|
|
29
29
|
keep_db: bool
|
|
30
|
-
Flag that indicates whether symbols should have a
|
|
30
|
+
Flag that indicates whether symbols should have a database name prefix in the output.
|
|
31
|
+
If True, symbols are returned in *DB_NAME::SYMBOL_NAME* format.
|
|
32
|
+
Otherwise just symbol names are returned.
|
|
31
33
|
pattern: str
|
|
32
34
|
Usual and special characters can be used to search for symbols.
|
|
33
35
|
Special characters are:
|
|
@@ -350,16 +352,12 @@ class Symbols(Source):
|
|
|
350
352
|
self._tmp_otq.merge(_tmp_otq)
|
|
351
353
|
|
|
352
354
|
def base_ep(self, ep_tick_type, keep_db, **params):
|
|
353
|
-
use_prepend_db_name = is_symbols_prepend_db_name_supported()
|
|
354
|
-
if use_prepend_db_name and not keep_db:
|
|
355
|
-
params['prepend_db_name'] = False
|
|
356
|
-
|
|
357
355
|
src = Source(otq.FindDbSymbols(**params))
|
|
358
356
|
|
|
359
357
|
update_node_tick_type(src, ep_tick_type)
|
|
360
358
|
src.schema['SYMBOL_NAME'] = str
|
|
361
359
|
|
|
362
|
-
if not keep_db
|
|
360
|
+
if not keep_db:
|
|
363
361
|
src["SYMBOL_NAME"] = src["SYMBOL_NAME"].str.regex_replace('.*::', '')
|
|
364
362
|
|
|
365
363
|
return src
|
onetick/py/sources/ticks.py
CHANGED
|
@@ -378,6 +378,8 @@ def Ticks(data=None, # NOSONAR
|
|
|
378
378
|
|
|
379
379
|
* :pandas:`DataFrame <pandas.DataFrame>`
|
|
380
380
|
|
|
381
|
+
.. deprecated:: 1.178.0
|
|
382
|
+
|
|
381
383
|
* ``None`` -- ``inplace_data`` will be used
|
|
382
384
|
|
|
383
385
|
symbol: str, list of str, :class:`Source`, :class:`query`, :py:func:`eval query <onetick.py.eval>`
|
|
@@ -448,8 +450,8 @@ def Ticks(data=None, # NOSONAR
|
|
|
448
450
|
>>> time_array = [start_datetime + otp.Hour(1) + otp.Nano(1)]
|
|
449
451
|
>>> a_array = [start_datetime - otp.Day(15) - otp.Nano(7)]
|
|
450
452
|
>>> df = pd.DataFrame({'Time': time_array,'A': a_array})
|
|
451
|
-
>>> data = otp.Ticks(df)
|
|
452
|
-
>>> otp.run(data, start=start_datetime, end=start_datetime + otp.Day(1))
|
|
453
|
+
>>> data = otp.Ticks(df) # doctest: +SKIP
|
|
454
|
+
>>> otp.run(data, start=start_datetime, end=start_datetime + otp.Day(1)) # doctest: +SKIP
|
|
453
455
|
Time A
|
|
454
456
|
0 2023-01-01 13:00:00.000000001 2022-12-17 11:59:59.999999993
|
|
455
457
|
|
|
@@ -485,6 +487,12 @@ def Ticks(data=None, # NOSONAR
|
|
|
485
487
|
db = configuration.config.get('default_db')
|
|
486
488
|
|
|
487
489
|
if isinstance(data, pd.DataFrame):
|
|
490
|
+
warnings.warn(
|
|
491
|
+
"Using pandas DataFrame as `data` parameter is deprecated, "
|
|
492
|
+
"use `otp.ReadFromDataFrame` source instead.",
|
|
493
|
+
FutureWarning,
|
|
494
|
+
)
|
|
495
|
+
|
|
488
496
|
if offset is not utils.adaptive:
|
|
489
497
|
raise ValueError("Parameter 'offset' can't be set when passing pandas.DataFrame.")
|
|
490
498
|
if data.empty:
|
onetick/py/types.py
CHANGED
|
@@ -1854,6 +1854,42 @@ def type2np(t):
|
|
|
1854
1854
|
return np.dtype(t)
|
|
1855
1855
|
|
|
1856
1856
|
|
|
1857
|
+
def np2type(t):
|
|
1858
|
+
if not isinstance(t, (np.dtype, pd.api.extensions.ExtensionDtype)):
|
|
1859
|
+
raise ValueError(f'Unsupported value passed to `np2type`: `{t}` not numpy dtype')
|
|
1860
|
+
|
|
1861
|
+
if t.name == 'int64':
|
|
1862
|
+
return int
|
|
1863
|
+
elif t.name == 'int8':
|
|
1864
|
+
return byte
|
|
1865
|
+
elif t.name == 'int16':
|
|
1866
|
+
return short
|
|
1867
|
+
elif t.name == 'int32':
|
|
1868
|
+
return _int
|
|
1869
|
+
elif t.name == 'uint64':
|
|
1870
|
+
return ulong
|
|
1871
|
+
elif t.name == 'uint32':
|
|
1872
|
+
return uint
|
|
1873
|
+
elif t.name == 'float64':
|
|
1874
|
+
return float
|
|
1875
|
+
elif t.name == 'boolean':
|
|
1876
|
+
return bool
|
|
1877
|
+
elif t.name.startswith('datetime64[ns'):
|
|
1878
|
+
return nsectime
|
|
1879
|
+
elif t.name.startswith('datetime64[ms'):
|
|
1880
|
+
return msectime
|
|
1881
|
+
elif t.name == 'object':
|
|
1882
|
+
return str
|
|
1883
|
+
elif t.str.startswith('<U'):
|
|
1884
|
+
length = t.name[2:]
|
|
1885
|
+
if length:
|
|
1886
|
+
return string[int(length)]
|
|
1887
|
+
else:
|
|
1888
|
+
return str
|
|
1889
|
+
|
|
1890
|
+
raise ValueError(f'Unknown numpy dtype passed to `np2type`: `{t}`')
|
|
1891
|
+
|
|
1892
|
+
|
|
1857
1893
|
# TODO: move this union of types to some common place
|
|
1858
1894
|
def datetime2expr(
|
|
1859
1895
|
dt_obj: Union[_datetime, _date, pd.Timestamp, date, datetime],
|
onetick/py/utils/__init__.py
CHANGED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import time
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
INDENTS: list[int] = []
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def debug(f):
|
|
8
|
+
def wrapper(*args, **kwargs):
|
|
9
|
+
start = time.time()
|
|
10
|
+
cur_indent = INDENTS[-1] + 1 if INDENTS else 0
|
|
11
|
+
INDENTS.append(cur_indent)
|
|
12
|
+
print(' ' * 4 * cur_indent + f.__name__, 'started', args, kwargs)
|
|
13
|
+
result = f(*args, **kwargs)
|
|
14
|
+
print(' ' * 4 * cur_indent + f.__name__, 'executed in', time.time() - start)
|
|
15
|
+
INDENTS.pop()
|
|
16
|
+
return result
|
|
17
|
+
return wrapper
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
from typing import Any, Dict, Optional
|
|
3
|
+
|
|
4
|
+
import onetick.py as otp
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def parse_args():
|
|
8
|
+
parser = argparse.ArgumentParser(description='Render queries from otq file')
|
|
9
|
+
|
|
10
|
+
parser.add_argument('path', help='Path to otq file')
|
|
11
|
+
parser.add_argument(
|
|
12
|
+
'--image-path',
|
|
13
|
+
help='Path to output image file. Default: query.svg in current working directory',
|
|
14
|
+
)
|
|
15
|
+
parser.add_argument('--output-format', help='Output format of output image')
|
|
16
|
+
parser.add_argument(
|
|
17
|
+
'--load-external-otqs', help='Load query dependencies from external otq files',
|
|
18
|
+
action=argparse.BooleanOptionalAction,
|
|
19
|
+
)
|
|
20
|
+
parser.add_argument(
|
|
21
|
+
'--view', help='Show generated image after render', action=argparse.BooleanOptionalAction,
|
|
22
|
+
)
|
|
23
|
+
parser.add_argument(
|
|
24
|
+
'--y-limit', help='Limit for maximum number of lines of some EP parameters strings', type=int,
|
|
25
|
+
)
|
|
26
|
+
parser.add_argument(
|
|
27
|
+
'--x-limit', help='Limit for maximum number of characters for each line in text of some EP parameters strings',
|
|
28
|
+
type=int,
|
|
29
|
+
)
|
|
30
|
+
parser.add_argument(
|
|
31
|
+
'--parse-eval-from-params', help='Enable parsing and printing `eval` sub-queries from EP parameters',
|
|
32
|
+
action=argparse.BooleanOptionalAction,
|
|
33
|
+
)
|
|
34
|
+
parser.add_argument(
|
|
35
|
+
'--render-debug-info', help='Render additional debug information',
|
|
36
|
+
action=argparse.BooleanOptionalAction,
|
|
37
|
+
)
|
|
38
|
+
parser.add_argument(
|
|
39
|
+
'--debug', help='Allow to print stdout or stderr from `Graphviz` render',
|
|
40
|
+
action=argparse.BooleanOptionalAction,
|
|
41
|
+
)
|
|
42
|
+
parser.add_argument(
|
|
43
|
+
'--graphviz-compat-mode',
|
|
44
|
+
help='Change internal parameters of result graph for better compatibility with old `Graphviz` versions',
|
|
45
|
+
action=argparse.BooleanOptionalAction,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
args = parser.parse_args()
|
|
49
|
+
return args
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def render_otq(
|
|
53
|
+
path: str,
|
|
54
|
+
image_path: Optional[str] = None,
|
|
55
|
+
y_limit: Optional[int] = None,
|
|
56
|
+
x_limit: Optional[int] = None,
|
|
57
|
+
**kwargs,
|
|
58
|
+
):
|
|
59
|
+
if image_path is None:
|
|
60
|
+
if kwargs.get('output_format'):
|
|
61
|
+
raise ValueError('`image-path` should be specified in order to use parameter `output-format`')
|
|
62
|
+
|
|
63
|
+
image_path = 'query.svg'
|
|
64
|
+
|
|
65
|
+
call_kwargs: Dict[str, Any] = {'path': path, 'image_path': image_path}
|
|
66
|
+
if x_limit is not None and y_limit is not None:
|
|
67
|
+
call_kwargs['line_limit'] = (y_limit, x_limit)
|
|
68
|
+
elif x_limit is not None or y_limit is not None:
|
|
69
|
+
raise ValueError('Both `y_limit` and `x_limit` should be set')
|
|
70
|
+
|
|
71
|
+
for param_name in [
|
|
72
|
+
'output_format', 'load_external_otqs', 'view', 'parse_eval_from_params', 'render_debug_info',
|
|
73
|
+
'debug', 'graphviz_compat_mode',
|
|
74
|
+
]:
|
|
75
|
+
if kwargs.get(param_name) is not None:
|
|
76
|
+
call_kwargs[param_name] = kwargs[param_name]
|
|
77
|
+
|
|
78
|
+
output_path = otp.utils.render_otq(**call_kwargs)
|
|
79
|
+
print(f'Rendered graph saved in {output_path}')
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def main():
|
|
83
|
+
args = parse_args()
|
|
84
|
+
render_otq(**vars(args))
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
if __name__ == '__main__':
|
|
88
|
+
main()
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: onetick-py
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.179.0
|
|
4
4
|
Summary: Python package that allows you to work with OneTick
|
|
5
5
|
Author-email: solutions <solutions@onetick.com>
|
|
6
6
|
License-Expression: MIT
|
|
7
7
|
Project-URL: Documentation, https://docs.pip.distribution.sol.onetick.com
|
|
8
8
|
Project-URL: OneTick, https://onetick.com
|
|
9
9
|
Project-URL: GitHub, https://github.com/onemarketdata/onetick-py
|
|
10
|
+
Project-URL: Issues, https://github.com/onemarketdata/onetick-py/issues
|
|
10
11
|
Classifier: Topic :: Database :: Front-Ends
|
|
11
12
|
Classifier: Topic :: Scientific/Engineering
|
|
12
13
|
Classifier: Programming Language :: Python :: 3.9
|