onetick-py 1.177.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- locator_parser/__init__.py +0 -0
- locator_parser/acl.py +73 -0
- locator_parser/actions.py +262 -0
- locator_parser/common.py +368 -0
- locator_parser/io.py +43 -0
- locator_parser/locator.py +150 -0
- onetick/__init__.py +101 -0
- onetick/doc_utilities/__init__.py +3 -0
- onetick/doc_utilities/napoleon.py +40 -0
- onetick/doc_utilities/ot_doctest.py +140 -0
- onetick/doc_utilities/snippets.py +279 -0
- onetick/lib/__init__.py +4 -0
- onetick/lib/instance.py +141 -0
- onetick/py/__init__.py +293 -0
- onetick/py/_stack_info.py +89 -0
- onetick/py/_version.py +2 -0
- onetick/py/aggregations/__init__.py +11 -0
- onetick/py/aggregations/_base.py +648 -0
- onetick/py/aggregations/_docs.py +948 -0
- onetick/py/aggregations/compute.py +286 -0
- onetick/py/aggregations/functions.py +2216 -0
- onetick/py/aggregations/generic.py +104 -0
- onetick/py/aggregations/high_low.py +80 -0
- onetick/py/aggregations/num_distinct.py +83 -0
- onetick/py/aggregations/order_book.py +501 -0
- onetick/py/aggregations/other.py +1014 -0
- onetick/py/backports.py +26 -0
- onetick/py/cache.py +374 -0
- onetick/py/callback/__init__.py +5 -0
- onetick/py/callback/callback.py +276 -0
- onetick/py/callback/callbacks.py +131 -0
- onetick/py/compatibility.py +798 -0
- onetick/py/configuration.py +771 -0
- onetick/py/core/__init__.py +0 -0
- onetick/py/core/_csv_inspector.py +93 -0
- onetick/py/core/_internal/__init__.py +0 -0
- onetick/py/core/_internal/_manually_bound_value.py +6 -0
- onetick/py/core/_internal/_nodes_history.py +250 -0
- onetick/py/core/_internal/_op_utils/__init__.py +0 -0
- onetick/py/core/_internal/_op_utils/every_operand.py +9 -0
- onetick/py/core/_internal/_op_utils/is_const.py +10 -0
- onetick/py/core/_internal/_per_tick_scripts/tick_list_sort_template.script +121 -0
- onetick/py/core/_internal/_proxy_node.py +140 -0
- onetick/py/core/_internal/_state_objects.py +2312 -0
- onetick/py/core/_internal/_state_vars.py +93 -0
- onetick/py/core/_source/__init__.py +0 -0
- onetick/py/core/_source/_symbol_param.py +95 -0
- onetick/py/core/_source/schema.py +97 -0
- onetick/py/core/_source/source_methods/__init__.py +0 -0
- onetick/py/core/_source/source_methods/aggregations.py +809 -0
- onetick/py/core/_source/source_methods/applyers.py +296 -0
- onetick/py/core/_source/source_methods/columns.py +141 -0
- onetick/py/core/_source/source_methods/data_quality.py +301 -0
- onetick/py/core/_source/source_methods/debugs.py +272 -0
- onetick/py/core/_source/source_methods/drops.py +120 -0
- onetick/py/core/_source/source_methods/fields.py +619 -0
- onetick/py/core/_source/source_methods/filters.py +1002 -0
- onetick/py/core/_source/source_methods/joins.py +1413 -0
- onetick/py/core/_source/source_methods/merges.py +605 -0
- onetick/py/core/_source/source_methods/misc.py +1455 -0
- onetick/py/core/_source/source_methods/pandases.py +155 -0
- onetick/py/core/_source/source_methods/renames.py +356 -0
- onetick/py/core/_source/source_methods/sorts.py +183 -0
- onetick/py/core/_source/source_methods/switches.py +142 -0
- onetick/py/core/_source/source_methods/symbols.py +117 -0
- onetick/py/core/_source/source_methods/times.py +627 -0
- onetick/py/core/_source/source_methods/writes.py +986 -0
- onetick/py/core/_source/symbol.py +205 -0
- onetick/py/core/_source/tmp_otq.py +222 -0
- onetick/py/core/column.py +209 -0
- onetick/py/core/column_operations/__init__.py +0 -0
- onetick/py/core/column_operations/_methods/__init__.py +4 -0
- onetick/py/core/column_operations/_methods/_internal.py +28 -0
- onetick/py/core/column_operations/_methods/conversions.py +216 -0
- onetick/py/core/column_operations/_methods/methods.py +292 -0
- onetick/py/core/column_operations/_methods/op_types.py +160 -0
- onetick/py/core/column_operations/accessors/__init__.py +0 -0
- onetick/py/core/column_operations/accessors/_accessor.py +28 -0
- onetick/py/core/column_operations/accessors/decimal_accessor.py +104 -0
- onetick/py/core/column_operations/accessors/dt_accessor.py +537 -0
- onetick/py/core/column_operations/accessors/float_accessor.py +184 -0
- onetick/py/core/column_operations/accessors/str_accessor.py +1367 -0
- onetick/py/core/column_operations/base.py +1121 -0
- onetick/py/core/cut_builder.py +150 -0
- onetick/py/core/db_constants.py +20 -0
- onetick/py/core/eval_query.py +245 -0
- onetick/py/core/lambda_object.py +441 -0
- onetick/py/core/multi_output_source.py +232 -0
- onetick/py/core/per_tick_script.py +2256 -0
- onetick/py/core/query_inspector.py +464 -0
- onetick/py/core/source.py +1744 -0
- onetick/py/db/__init__.py +2 -0
- onetick/py/db/_inspection.py +1128 -0
- onetick/py/db/db.py +1327 -0
- onetick/py/db/utils.py +64 -0
- onetick/py/docs/__init__.py +0 -0
- onetick/py/docs/docstring_parser.py +112 -0
- onetick/py/docs/utils.py +81 -0
- onetick/py/functions.py +2398 -0
- onetick/py/license.py +190 -0
- onetick/py/log.py +88 -0
- onetick/py/math.py +935 -0
- onetick/py/misc.py +470 -0
- onetick/py/oqd/__init__.py +22 -0
- onetick/py/oqd/eps.py +1195 -0
- onetick/py/oqd/sources.py +325 -0
- onetick/py/otq.py +216 -0
- onetick/py/pyomd_mock.py +47 -0
- onetick/py/run.py +916 -0
- onetick/py/servers.py +173 -0
- onetick/py/session.py +1347 -0
- onetick/py/sources/__init__.py +19 -0
- onetick/py/sources/cache.py +167 -0
- onetick/py/sources/common.py +128 -0
- onetick/py/sources/csv.py +642 -0
- onetick/py/sources/custom.py +85 -0
- onetick/py/sources/data_file.py +305 -0
- onetick/py/sources/data_source.py +1045 -0
- onetick/py/sources/empty.py +94 -0
- onetick/py/sources/odbc.py +337 -0
- onetick/py/sources/order_book.py +271 -0
- onetick/py/sources/parquet.py +168 -0
- onetick/py/sources/pit.py +191 -0
- onetick/py/sources/query.py +495 -0
- onetick/py/sources/snapshots.py +419 -0
- onetick/py/sources/split_query_output_by_symbol.py +198 -0
- onetick/py/sources/symbology_mapping.py +123 -0
- onetick/py/sources/symbols.py +374 -0
- onetick/py/sources/ticks.py +825 -0
- onetick/py/sql.py +70 -0
- onetick/py/state.py +251 -0
- onetick/py/types.py +2131 -0
- onetick/py/utils/__init__.py +70 -0
- onetick/py/utils/acl.py +93 -0
- onetick/py/utils/config.py +186 -0
- onetick/py/utils/default.py +49 -0
- onetick/py/utils/file.py +38 -0
- onetick/py/utils/helpers.py +76 -0
- onetick/py/utils/locator.py +94 -0
- onetick/py/utils/perf.py +498 -0
- onetick/py/utils/query.py +49 -0
- onetick/py/utils/render.py +1374 -0
- onetick/py/utils/script.py +244 -0
- onetick/py/utils/temp.py +471 -0
- onetick/py/utils/types.py +120 -0
- onetick/py/utils/tz.py +84 -0
- onetick_py-1.177.0.dist-info/METADATA +137 -0
- onetick_py-1.177.0.dist-info/RECORD +152 -0
- onetick_py-1.177.0.dist-info/WHEEL +5 -0
- onetick_py-1.177.0.dist-info/entry_points.txt +2 -0
- onetick_py-1.177.0.dist-info/licenses/LICENSE +21 -0
- onetick_py-1.177.0.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,825 @@
|
|
|
1
|
+
import datetime as dt
|
|
2
|
+
import inspect
|
|
3
|
+
import sys
|
|
4
|
+
import warnings
|
|
5
|
+
import math
|
|
6
|
+
|
|
7
|
+
from typing import Optional, Union, Type, Sequence
|
|
8
|
+
|
|
9
|
+
import onetick.py as otp
|
|
10
|
+
from onetick.py.otq import otq
|
|
11
|
+
import pandas as pd
|
|
12
|
+
|
|
13
|
+
import onetick.py.core._source
|
|
14
|
+
import onetick.py.functions
|
|
15
|
+
import onetick.py.db._inspection
|
|
16
|
+
from onetick.py.core.column import _Column
|
|
17
|
+
from onetick.py.core.source import Source
|
|
18
|
+
|
|
19
|
+
from .. import types as ott
|
|
20
|
+
from .. import utils, configuration
|
|
21
|
+
from ..core.column_operations._methods.methods import is_arithmetical
|
|
22
|
+
from ..core.column_operations.base import _Operation
|
|
23
|
+
from ..compatibility import is_supported_bucket_units_for_tick_generator
|
|
24
|
+
from onetick.py.aggregations._base import get_bucket_interval_from_datepart
|
|
25
|
+
|
|
26
|
+
from ..aggregations._docs import _bucket_time_doc
|
|
27
|
+
from onetick.py.docs.utils import docstring
|
|
28
|
+
|
|
29
|
+
from .common import get_start_end_by_date, update_node_tick_type, AdaptiveTickType
|
|
30
|
+
from .empty import Empty
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_case_expr_with_datetime_limited_by_end_time(dt_expr: str) -> str:
|
|
34
|
+
return f'CASE({dt_expr} > _END_TIME, 1, _END_TIME, {dt_expr})'
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class Tick(Source):
|
|
38
|
+
|
|
39
|
+
@docstring(parameters=[_bucket_time_doc], add_self=True)
|
|
40
|
+
def __init__(
|
|
41
|
+
self,
|
|
42
|
+
data: Optional[dict] = None,
|
|
43
|
+
offset=0,
|
|
44
|
+
offset_part='millisecond',
|
|
45
|
+
time: Optional[ott.datetime] = None,
|
|
46
|
+
timezone_for_time=None,
|
|
47
|
+
symbol=utils.adaptive_to_default,
|
|
48
|
+
db=utils.adaptive_to_default,
|
|
49
|
+
start=utils.adaptive,
|
|
50
|
+
end=utils.adaptive,
|
|
51
|
+
date=None,
|
|
52
|
+
tick_type: Optional[AdaptiveTickType] = utils.adaptive,
|
|
53
|
+
bucket_time: str = "start",
|
|
54
|
+
bucket_interval: int = 0,
|
|
55
|
+
bucket_units: Union[str, Type[utils.adaptive]] = utils.adaptive,
|
|
56
|
+
num_ticks_per_timestamp: int = 1,
|
|
57
|
+
**kwargs,
|
|
58
|
+
):
|
|
59
|
+
"""
|
|
60
|
+
Generates a single tick for each bucket.
|
|
61
|
+
By default a single tick for the whole query time interval is generated.
|
|
62
|
+
|
|
63
|
+
Parameters
|
|
64
|
+
----------
|
|
65
|
+
data: dict
|
|
66
|
+
dictionary of columns names with their values.
|
|
67
|
+
If specified, then parameter ``kwargs`` can't be used.
|
|
68
|
+
offset: int, :ref:`datetime offset <datetime_offsets>`,\
|
|
69
|
+
:py:class:`otp.timedelta <onetick.py.timedelta>`, default=0
|
|
70
|
+
tick timestamp offset from query start time in `offset_part`
|
|
71
|
+
offset_part: one of [nanosecond, millisecond, second, minute, hour, day, dayofyear, weekday, week, month, quarter, year], default=millisecond #noqa
|
|
72
|
+
unit of time to calculate ``offset`` from.
|
|
73
|
+
Could be omitted if :ref:`datetime offset <datetime_offsets>` or
|
|
74
|
+
:py:class:`otp.timedelta <onetick.py.timedelta>` objects are set as ``offset``.
|
|
75
|
+
time: :py:class:`otp.datetime <onetick.py.datetime>`
|
|
76
|
+
fixed time to set to all ticks.
|
|
77
|
+
Note that this time should be inside time interval set by ``start`` and ``end`` parameters
|
|
78
|
+
or by query time range.
|
|
79
|
+
timezone_for_time: str
|
|
80
|
+
timezone of the ``time``
|
|
81
|
+
symbol: str, list of str, :class:`Source`, :class:`query`, :py:func:`eval query <onetick.py.eval>`
|
|
82
|
+
Symbol(s) from which data should be taken.
|
|
83
|
+
db: str
|
|
84
|
+
Database to use for tick generation
|
|
85
|
+
start: :py:class:`otp.datetime <onetick.py.datetime>`
|
|
86
|
+
start time for tick generation. By default the start time of the query will be used.
|
|
87
|
+
end: :py:class:`otp.datetime <onetick.py.datetime>`
|
|
88
|
+
end time for tick generation. By default the end time of the query will be used.
|
|
89
|
+
date: :py:class:`otp.datetime <inetick.py.datetime>` – allows to specify a whole day
|
|
90
|
+
instead of passing explicitly start and end parameters. If it is set along with
|
|
91
|
+
the start and end parameters then last two are ignored.
|
|
92
|
+
tick_type: str
|
|
93
|
+
By default, the tick type value is not significant, and a placeholder string constant will be utilized.
|
|
94
|
+
If you prefer to use the sink node's tick type instead of specifying your own,
|
|
95
|
+
you can set the value to None.
|
|
96
|
+
bucket_interval: int or :ref:`datetime offset objects <datetime_offsets>`
|
|
97
|
+
Determines the length of each bucket (units depends on ``bucket_units``)
|
|
98
|
+
for which the tick will be generated.
|
|
99
|
+
|
|
100
|
+
Bucket interval can also be set via :ref:`datetime offset objects <datetime_offsets>`
|
|
101
|
+
like :py:func:`otp.Second <onetick.py.Second>`, :py:func:`otp.Minute <onetick.py.Minute>`,
|
|
102
|
+
:py:func:`otp.Hour <onetick.py.Hour>`, :py:func:`otp.Day <onetick.py.Day>`,
|
|
103
|
+
:py:func:`otp.Month <onetick.py.Month>`.
|
|
104
|
+
In this case you could omit setting ``bucket_units`` parameter.
|
|
105
|
+
bucket_units: 'seconds', 'days' or 'months'
|
|
106
|
+
Unit for value in ``bucket_interval``.
|
|
107
|
+
Default is 'seconds'.
|
|
108
|
+
num_ticks_per_timestamp: int
|
|
109
|
+
The number of ticks to generate for every value of timestamp.
|
|
110
|
+
kwargs:
|
|
111
|
+
dictionary of columns names with their values.
|
|
112
|
+
If specified, then parameter ``data`` can't be used.
|
|
113
|
+
|
|
114
|
+
See also
|
|
115
|
+
--------
|
|
116
|
+
| **TICK_GENERATOR** OneTick event processor
|
|
117
|
+
| :py:class:`otp.Ticks <onetick.py.Ticks>`
|
|
118
|
+
|
|
119
|
+
Examples
|
|
120
|
+
--------
|
|
121
|
+
|
|
122
|
+
Simple usage, generate single tick:
|
|
123
|
+
|
|
124
|
+
>>> t = otp.Tick(A=1, B='string', C=3.14, D=otp.dt(2000, 1, 1, 1, 1, 1, 1))
|
|
125
|
+
>>> otp.run(t)
|
|
126
|
+
Time A B C D
|
|
127
|
+
0 2003-12-01 1 string 3.14 2000-01-01 01:01:01.000001
|
|
128
|
+
|
|
129
|
+
Generate single tick with offset:
|
|
130
|
+
|
|
131
|
+
>>> t = otp.Tick(A=1, offset=otp.Minute(10))
|
|
132
|
+
>>> otp.run(t)
|
|
133
|
+
Time A
|
|
134
|
+
0 2003-12-01 00:10:00 1
|
|
135
|
+
|
|
136
|
+
Generate one tick for each day in a week:
|
|
137
|
+
|
|
138
|
+
>>> t = otp.Tick(A=1, start=otp.dt(2023, 1, 1), end=otp.dt(2023, 1, 8), bucket_interval=24 * 60 * 60)
|
|
139
|
+
>>> otp.run(t)
|
|
140
|
+
Time A
|
|
141
|
+
0 2023-01-01 1
|
|
142
|
+
1 2023-01-02 1
|
|
143
|
+
2 2023-01-03 1
|
|
144
|
+
3 2023-01-04 1
|
|
145
|
+
4 2023-01-05 1
|
|
146
|
+
5 2023-01-06 1
|
|
147
|
+
6 2023-01-07 1
|
|
148
|
+
|
|
149
|
+
Generate tick every hour and add 1 minute offset to ticks' timestamps:
|
|
150
|
+
|
|
151
|
+
>>> t = otp.Tick(A=1, offset=1, offset_part='minute', bucket_interval=60 * 60)
|
|
152
|
+
>>> t.head(5)
|
|
153
|
+
Time A
|
|
154
|
+
0 2003-12-01 00:01:00 1
|
|
155
|
+
1 2003-12-01 01:01:00 1
|
|
156
|
+
2 2003-12-01 02:01:00 1
|
|
157
|
+
3 2003-12-01 03:01:00 1
|
|
158
|
+
4 2003-12-01 04:01:00 1
|
|
159
|
+
|
|
160
|
+
Generate tick every hour and set fixed time:
|
|
161
|
+
|
|
162
|
+
>>> t = otp.Tick(A=1, time=otp.dt(2023, 1, 2, 3, 4, 5, 6), bucket_interval=60 * 60,
|
|
163
|
+
... start=otp.dt(2023, 1, 1), end=otp.dt(2023, 1, 8))
|
|
164
|
+
>>> t.head(5)
|
|
165
|
+
Time A
|
|
166
|
+
0 2023-01-02 03:04:05.000006 1
|
|
167
|
+
1 2023-01-02 03:04:05.000006 1
|
|
168
|
+
2 2023-01-02 03:04:05.000006 1
|
|
169
|
+
3 2023-01-02 03:04:05.000006 1
|
|
170
|
+
4 2023-01-02 03:04:05.000006 1
|
|
171
|
+
|
|
172
|
+
Use :ref:`datetime offset object <datetime_offsets>` as a ``bucket_interval``:
|
|
173
|
+
|
|
174
|
+
.. testcode::
|
|
175
|
+
:skipif: not is_supported_bucket_units_for_tick_generator()
|
|
176
|
+
|
|
177
|
+
t = otp.Tick(A=1, bucket_interval=otp.Day(1))
|
|
178
|
+
df = otp.run(t, start=otp.dt(2023, 1, 1), end=otp.dt(2023, 1, 5))
|
|
179
|
+
print(df)
|
|
180
|
+
|
|
181
|
+
.. testoutput::
|
|
182
|
+
|
|
183
|
+
Time A
|
|
184
|
+
0 2023-01-01 1
|
|
185
|
+
1 2023-01-02 1
|
|
186
|
+
2 2023-01-03 1
|
|
187
|
+
3 2023-01-04 1
|
|
188
|
+
"""
|
|
189
|
+
|
|
190
|
+
if self._try_default_constructor(**kwargs):
|
|
191
|
+
return
|
|
192
|
+
|
|
193
|
+
if data and kwargs:
|
|
194
|
+
raise ValueError("Parameters 'data' and **kwargs can't be used at the same time")
|
|
195
|
+
|
|
196
|
+
if data:
|
|
197
|
+
kwargs = data
|
|
198
|
+
|
|
199
|
+
if len(kwargs) == 0:
|
|
200
|
+
raise ValueError("It is not allowed to have a tick without fields")
|
|
201
|
+
|
|
202
|
+
if isinstance(offset, ott.OTPBaseTimeOffset):
|
|
203
|
+
offset, offset_part = offset.get_offset()
|
|
204
|
+
|
|
205
|
+
if offset < 0:
|
|
206
|
+
raise ValueError("Negative offset not allowed")
|
|
207
|
+
|
|
208
|
+
if offset_part not in {
|
|
209
|
+
"nanosecond", "millisecond", "second", "minute", "hour", "day", "week", "month", "quarter", "year"
|
|
210
|
+
}:
|
|
211
|
+
raise ValueError(f"Unsupported DatePart passed to offset: {offset_part}")
|
|
212
|
+
elif isinstance(offset, ott.timedelta):
|
|
213
|
+
offset, offset_part = offset._get_offset()
|
|
214
|
+
|
|
215
|
+
if time is not None and offset != 0:
|
|
216
|
+
raise ValueError("It's not allowed to set parameter 'time' and set non-zero offset at the same time")
|
|
217
|
+
|
|
218
|
+
bucket_time = self._get_bucket_time(bucket_time)
|
|
219
|
+
if bucket_time == "BUCKET_END" and offset != 0:
|
|
220
|
+
raise ValueError(
|
|
221
|
+
"It's not allowed to set parameter 'bucket_time' to 'end' and set non-zero offset at the same time"
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
if isinstance(bucket_interval, ott.OTPBaseTimeOffset):
|
|
225
|
+
bucket_interval, bucket_units = get_bucket_interval_from_datepart(bucket_interval)
|
|
226
|
+
|
|
227
|
+
if date:
|
|
228
|
+
# TODO: write a warning in that case
|
|
229
|
+
start, end = get_start_end_by_date(date)
|
|
230
|
+
|
|
231
|
+
columns = {}
|
|
232
|
+
for key, value in kwargs.items():
|
|
233
|
+
# the way to skip a field
|
|
234
|
+
if value is None:
|
|
235
|
+
continue
|
|
236
|
+
|
|
237
|
+
if inspect.isclass(value):
|
|
238
|
+
raise TypeError(f"Tick constructor expects values but not types, {value}")
|
|
239
|
+
else:
|
|
240
|
+
value_type = ott.get_object_type(value)
|
|
241
|
+
|
|
242
|
+
if value_type is str:
|
|
243
|
+
if isinstance(value, _Operation) or is_arithmetical(value):
|
|
244
|
+
if value.dtype is not str:
|
|
245
|
+
value_type = value.dtype
|
|
246
|
+
elif len(value) > ott.string.DEFAULT_LENGTH:
|
|
247
|
+
value_type = ott.string[len(value)]
|
|
248
|
+
|
|
249
|
+
if value_type is bool:
|
|
250
|
+
value_type = float
|
|
251
|
+
|
|
252
|
+
if issubclass(value_type, (ott.datetime, ott.date, dt.datetime, dt.date, pd.Timestamp)):
|
|
253
|
+
value_type = ott.nsectime
|
|
254
|
+
|
|
255
|
+
columns[key] = value_type
|
|
256
|
+
|
|
257
|
+
super().__init__(
|
|
258
|
+
_symbols=symbol,
|
|
259
|
+
_start=start,
|
|
260
|
+
_end=end,
|
|
261
|
+
_base_ep_func=lambda: self.base_ep(db=db,
|
|
262
|
+
tick_type=tick_type,
|
|
263
|
+
offset=offset,
|
|
264
|
+
offset_part=offset_part,
|
|
265
|
+
time=time,
|
|
266
|
+
timezone_for_time=timezone_for_time,
|
|
267
|
+
columns=columns,
|
|
268
|
+
bucket_time=bucket_time,
|
|
269
|
+
bucket_interval=bucket_interval,
|
|
270
|
+
bucket_units=bucket_units,
|
|
271
|
+
num_ticks_per_timestamp=num_ticks_per_timestamp,
|
|
272
|
+
**kwargs),
|
|
273
|
+
schema=columns,
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
def base_ep(self,
|
|
277
|
+
db=utils.adaptive_to_default,
|
|
278
|
+
tick_type=utils.adaptive,
|
|
279
|
+
offset=0,
|
|
280
|
+
offset_part='millisecond',
|
|
281
|
+
time=None,
|
|
282
|
+
timezone_for_time=None,
|
|
283
|
+
columns=None,
|
|
284
|
+
bucket_time="start",
|
|
285
|
+
bucket_interval=0,
|
|
286
|
+
bucket_units=utils.adaptive,
|
|
287
|
+
num_ticks_per_timestamp=1,
|
|
288
|
+
**kwargs):
|
|
289
|
+
if columns is None:
|
|
290
|
+
columns = {}
|
|
291
|
+
|
|
292
|
+
params = ",".join(
|
|
293
|
+
ott.type2str(columns[key]) + " " + str(key) + "=" + ott.value2str(value)
|
|
294
|
+
for key, value in kwargs.items()
|
|
295
|
+
if value is not None
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
tick_generator_kwargs = {}
|
|
299
|
+
if bucket_units is not utils.adaptive:
|
|
300
|
+
if is_supported_bucket_units_for_tick_generator(throw_warning=True):
|
|
301
|
+
tick_generator_kwargs['bucket_interval_units'] = bucket_units.upper()
|
|
302
|
+
elif bucket_units != 'seconds':
|
|
303
|
+
raise ValueError("Parameter 'bucket_units' in otp.Tick is not supported on this OneTick version")
|
|
304
|
+
|
|
305
|
+
src = Source(
|
|
306
|
+
otq.TickGenerator(
|
|
307
|
+
bucket_interval=bucket_interval,
|
|
308
|
+
bucket_time=bucket_time,
|
|
309
|
+
fields=params,
|
|
310
|
+
num_ticks_per_timestamp=num_ticks_per_timestamp,
|
|
311
|
+
**tick_generator_kwargs,
|
|
312
|
+
),
|
|
313
|
+
schema=columns,
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
update_node_tick_type(src, tick_type, db)
|
|
317
|
+
|
|
318
|
+
# TIMESTAMP += offset will add redundant nodes to sort the timestamps.
|
|
319
|
+
# No sorting needed for a single tick.
|
|
320
|
+
|
|
321
|
+
if offset:
|
|
322
|
+
src.sink(
|
|
323
|
+
otq.UpdateField(
|
|
324
|
+
field="TIMESTAMP",
|
|
325
|
+
value=get_case_expr_with_datetime_limited_by_end_time(
|
|
326
|
+
f"dateadd('{offset_part}', {offset}, TIMESTAMP, _TIMEZONE)"
|
|
327
|
+
)
|
|
328
|
+
)
|
|
329
|
+
)
|
|
330
|
+
elif time:
|
|
331
|
+
src.sink(otq.UpdateField(field="TIMESTAMP",
|
|
332
|
+
value=ott.datetime2expr(time, timezone_naive=timezone_for_time)))
|
|
333
|
+
return src
|
|
334
|
+
|
|
335
|
+
@staticmethod
|
|
336
|
+
def _get_bucket_time(bucket_time):
|
|
337
|
+
if bucket_time == "BUCKET_START":
|
|
338
|
+
warnings.warn("BUCKET_START value is deprecated. Please, use 'start' instead", FutureWarning)
|
|
339
|
+
elif bucket_time == "BUCKET_END":
|
|
340
|
+
warnings.warn("BUCKET_END value is deprecated. Please, use 'end' instead", FutureWarning)
|
|
341
|
+
elif bucket_time == "start":
|
|
342
|
+
bucket_time = "BUCKET_START"
|
|
343
|
+
elif bucket_time == "end":
|
|
344
|
+
bucket_time = "BUCKET_END"
|
|
345
|
+
else:
|
|
346
|
+
raise ValueError(f"Only 'start' and 'end' values supported as bucket time, but you've passed {bucket_time}")
|
|
347
|
+
return bucket_time
|
|
348
|
+
|
|
349
|
+
|
|
350
|
+
def Ticks(data=None, # NOSONAR
|
|
351
|
+
symbol=utils.adaptive_to_default,
|
|
352
|
+
db=utils.adaptive_to_default,
|
|
353
|
+
start=utils.adaptive,
|
|
354
|
+
end=utils.adaptive,
|
|
355
|
+
tick_type: Optional[AdaptiveTickType] = utils.adaptive,
|
|
356
|
+
timezone_for_time=None,
|
|
357
|
+
offset=utils.adaptive,
|
|
358
|
+
**inplace_data):
|
|
359
|
+
"""
|
|
360
|
+
Data source that generates ticks.
|
|
361
|
+
|
|
362
|
+
By default ticks are placed with the 1 millisecond offset from
|
|
363
|
+
each other starting from the start of the query interval.
|
|
364
|
+
|
|
365
|
+
The offset for each tick can be changed using the
|
|
366
|
+
special reserved field name ``offset``, that specifies the time offset from the query start time.
|
|
367
|
+
``offset`` can be an integer, :ref:`datetime offset <datetime_offsets>` object
|
|
368
|
+
or :py:class:`otp.timedelta <onetick.py.timedelta>`.
|
|
369
|
+
|
|
370
|
+
Parameters
|
|
371
|
+
----------
|
|
372
|
+
data: dict, list or :pandas:`pandas.DataFrame`, optional
|
|
373
|
+
Ticks values
|
|
374
|
+
|
|
375
|
+
* ``dict`` -- <field_name>: <values>
|
|
376
|
+
|
|
377
|
+
* ``list`` -- [[<field_names>], [<first_tick_values>], ..., [<n_tick_values>]]
|
|
378
|
+
|
|
379
|
+
* :pandas:`DataFrame <pandas.DataFrame>`
|
|
380
|
+
|
|
381
|
+
* ``None`` -- ``inplace_data`` will be used
|
|
382
|
+
|
|
383
|
+
symbol: str, list of str, :class:`Source`, :class:`query`, :py:func:`eval query <onetick.py.eval>`
|
|
384
|
+
Symbol(s) from which data should be taken.
|
|
385
|
+
db: str
|
|
386
|
+
Database to use for tick generation
|
|
387
|
+
start, end: :py:class:`datetime.datetime`, :py:class:`otp.datetime <onetick.py.datetime>`, \
|
|
388
|
+
:py:class:`onetick.py.adaptive`
|
|
389
|
+
Timestamp for data generation
|
|
390
|
+
tick_type: str
|
|
391
|
+
tick type for data generation
|
|
392
|
+
timezone_for_time: str
|
|
393
|
+
timezone for data generation
|
|
394
|
+
offset: int, :ref:`datetime offset <datetime_offsets>` or :py:class:`otp.timedelta <onetick.py.timedelta>` \
|
|
395
|
+
or list of such values or None
|
|
396
|
+
Specifies the time offset for each tick from the query start time.
|
|
397
|
+
Should be specified as the list of values, one for each tick,
|
|
398
|
+
or as a single value that will be the same for all ticks.
|
|
399
|
+
Special value None will disable changing timestamps for each tick,
|
|
400
|
+
so all timestamps will be set to the query start time.
|
|
401
|
+
Can't be used at the same time with the column `offset`.
|
|
402
|
+
**inplace_data: list
|
|
403
|
+
<field_name>: list(<field_values>)
|
|
404
|
+
|
|
405
|
+
See also
|
|
406
|
+
--------
|
|
407
|
+
| **TICK_GENERATOR** OneTick event processor
|
|
408
|
+
| **CSV_FILE_LISTING** OneTick event processor
|
|
409
|
+
| :py:class:`otp.Tick <onetick.py.Tick>`
|
|
410
|
+
|
|
411
|
+
Examples
|
|
412
|
+
--------
|
|
413
|
+
|
|
414
|
+
Pass the data as a dictionary:
|
|
415
|
+
|
|
416
|
+
>>> d = otp.Ticks({'A': [1, 2, 3], 'B': [4, 5, 6]})
|
|
417
|
+
>>> otp.run(d)
|
|
418
|
+
Time A B
|
|
419
|
+
0 2003-12-01 00:00:00.000 1 4
|
|
420
|
+
1 2003-12-01 00:00:00.001 2 5
|
|
421
|
+
2 2003-12-01 00:00:00.002 3 6
|
|
422
|
+
|
|
423
|
+
Pass the data using key-value arguments:
|
|
424
|
+
|
|
425
|
+
>>> d = otp.Ticks(A=[1, 2, 3], B=[4, 5, 6])
|
|
426
|
+
>>> otp.run(d)
|
|
427
|
+
Time A B
|
|
428
|
+
0 2003-12-01 00:00:00.000 1 4
|
|
429
|
+
1 2003-12-01 00:00:00.001 2 5
|
|
430
|
+
2 2003-12-01 00:00:00.002 3 6
|
|
431
|
+
|
|
432
|
+
Pass the data using list:
|
|
433
|
+
|
|
434
|
+
>>> d = otp.Ticks([['A', 'B'],
|
|
435
|
+
... [1, 4],
|
|
436
|
+
... [2, 5],
|
|
437
|
+
... [3, 6]])
|
|
438
|
+
>>> otp.run(d)
|
|
439
|
+
Time A B
|
|
440
|
+
0 2003-12-01 00:00:00.000 1 4
|
|
441
|
+
1 2003-12-01 00:00:00.001 2 5
|
|
442
|
+
2 2003-12-01 00:00:00.002 3 6
|
|
443
|
+
|
|
444
|
+
Pass the data using :pandas:`pandas.DataFrame`.
|
|
445
|
+
DataFrame should have a ``Time`` column containing datetime objects.
|
|
446
|
+
|
|
447
|
+
>>> start_datetime = datetime.datetime(2023, 1, 1, 12)
|
|
448
|
+
>>> time_array = [start_datetime + otp.Hour(1) + otp.Nano(1)]
|
|
449
|
+
>>> a_array = [start_datetime - otp.Day(15) - otp.Nano(7)]
|
|
450
|
+
>>> df = pd.DataFrame({'Time': time_array,'A': a_array})
|
|
451
|
+
>>> data = otp.Ticks(df)
|
|
452
|
+
>>> otp.run(data, start=start_datetime, end=start_datetime + otp.Day(1))
|
|
453
|
+
Time A
|
|
454
|
+
0 2023-01-01 13:00:00.000000001 2022-12-17 11:59:59.999999993
|
|
455
|
+
|
|
456
|
+
Example with setting ``offset`` for each tick:
|
|
457
|
+
|
|
458
|
+
>>> data = otp.Ticks(X=[1, 2, 3], offset=[0, otp.Nano(1), 1])
|
|
459
|
+
>>> otp.run(data)
|
|
460
|
+
Time X
|
|
461
|
+
0 2003-12-01 00:00:00.000000000 1
|
|
462
|
+
1 2003-12-01 00:00:00.000000001 2
|
|
463
|
+
2 2003-12-01 00:00:00.001000000 3
|
|
464
|
+
|
|
465
|
+
Remove the ``offset`` for all ticks, in this case the timestamp of each tick is set to the start time of the query:
|
|
466
|
+
|
|
467
|
+
>>> data = otp.Ticks(X=[1, 2, 3], offset=None)
|
|
468
|
+
>>> otp.run(data)
|
|
469
|
+
Time X
|
|
470
|
+
0 2003-12-01 1
|
|
471
|
+
1 2003-12-01 2
|
|
472
|
+
2 2003-12-01 3
|
|
473
|
+
|
|
474
|
+
Parameter ``offset`` allows to set the same value for all ticks:
|
|
475
|
+
|
|
476
|
+
>>> data = otp.Ticks(X=[1, 2, 3], offset=otp.Nano(13))
|
|
477
|
+
>>> otp.run(data)
|
|
478
|
+
Time X
|
|
479
|
+
0 2003-12-01 00:00:00.000000013 1
|
|
480
|
+
1 2003-12-01 00:00:00.000000013 2
|
|
481
|
+
2 2003-12-01 00:00:00.000000013 3
|
|
482
|
+
"""
|
|
483
|
+
|
|
484
|
+
if db is utils.adaptive_to_default:
|
|
485
|
+
db = configuration.config.get('default_db')
|
|
486
|
+
|
|
487
|
+
if isinstance(data, pd.DataFrame):
|
|
488
|
+
if offset is not utils.adaptive:
|
|
489
|
+
raise ValueError("Parameter 'offset' can't be set when passing pandas.DataFrame.")
|
|
490
|
+
if data.empty:
|
|
491
|
+
warnings.warn('otp.Ticks got empty DataFrame as input, returning otp.Empty', stacklevel=2)
|
|
492
|
+
return Empty(db=db, symbol=symbol, tick_type=tick_type, start=start, end=end)
|
|
493
|
+
if 'Time' not in data.columns:
|
|
494
|
+
raise ValueError('Field `Time` is required for constructing an `otp.Source` from `pandas.DataFrame`')
|
|
495
|
+
data = data.rename(columns={"Time": "time"})
|
|
496
|
+
# to_dict('list') doesn't work correctly with pandas timestamps on some versions
|
|
497
|
+
data = data.to_dict('series')
|
|
498
|
+
data = {column_name: series.to_list() for column_name, series in data.items()}
|
|
499
|
+
|
|
500
|
+
if data and len(inplace_data) != 0:
|
|
501
|
+
raise ValueError("Data can be passed only using either the `data` parameter "
|
|
502
|
+
"or inplace through the key-value args")
|
|
503
|
+
|
|
504
|
+
if isinstance(data, list):
|
|
505
|
+
reform = {}
|
|
506
|
+
for inx, key in enumerate(data[0]):
|
|
507
|
+
reform[key] = [sub_list[inx] for sub_list in data[1:]]
|
|
508
|
+
|
|
509
|
+
data = reform
|
|
510
|
+
|
|
511
|
+
if data is None:
|
|
512
|
+
if inplace_data:
|
|
513
|
+
data = inplace_data
|
|
514
|
+
else:
|
|
515
|
+
raise ValueError("You don't specify any date to create ticks from. "
|
|
516
|
+
"Please, use otp.Empty for creating empty data source")
|
|
517
|
+
else:
|
|
518
|
+
data = data.copy()
|
|
519
|
+
|
|
520
|
+
def check_value_len(_data):
|
|
521
|
+
# check all columns to have the same number of rows
|
|
522
|
+
value_len = None
|
|
523
|
+
for key, value in _data.items():
|
|
524
|
+
if value_len is None:
|
|
525
|
+
value_len = len(value)
|
|
526
|
+
elif value_len != len(value):
|
|
527
|
+
raise ValueError(
|
|
528
|
+
f"It is not allowed to have different columns of different lengths, "
|
|
529
|
+
f"some of columns have {value_len} length, but column '{key}', as instance, has {len(value)}"
|
|
530
|
+
)
|
|
531
|
+
return value_len
|
|
532
|
+
|
|
533
|
+
value_len = check_value_len(data)
|
|
534
|
+
|
|
535
|
+
use_absolute_time = False
|
|
536
|
+
|
|
537
|
+
if 'offset' in data:
|
|
538
|
+
if offset is not utils.adaptive:
|
|
539
|
+
raise ValueError("Parameter 'offset' and column 'offset' can't be set at the same time.")
|
|
540
|
+
else:
|
|
541
|
+
offset = data['offset']
|
|
542
|
+
|
|
543
|
+
if offset is not utils.adaptive:
|
|
544
|
+
if "time" in data:
|
|
545
|
+
raise ValueError("You cannot specify 'offset' column/parameter and 'time' column at the same time.")
|
|
546
|
+
elif "time" in data:
|
|
547
|
+
use_absolute_time = True
|
|
548
|
+
else:
|
|
549
|
+
# by default the difference between each offset is 1 (1 millisecond)
|
|
550
|
+
offset = list(range(value_len))
|
|
551
|
+
|
|
552
|
+
disable_offsets = False
|
|
553
|
+
if offset is None:
|
|
554
|
+
disable_offsets = True
|
|
555
|
+
elif offset is not utils.adaptive and not isinstance(offset, Sequence):
|
|
556
|
+
# if offset is set as a single value then just copy-paste it for all rows
|
|
557
|
+
offset = [offset] * value_len
|
|
558
|
+
|
|
559
|
+
if offset is not utils.adaptive and offset is not None:
|
|
560
|
+
data['offset'] = offset
|
|
561
|
+
check_value_len(data)
|
|
562
|
+
|
|
563
|
+
if not use_absolute_time and not disable_offsets:
|
|
564
|
+
offset_values = []
|
|
565
|
+
offset_parts = []
|
|
566
|
+
for ofv in data['offset']:
|
|
567
|
+
if isinstance(ofv, ott.offsets.Tick):
|
|
568
|
+
offset_values.append(ofv.n)
|
|
569
|
+
try:
|
|
570
|
+
str_repr = str(ofv.datepart)[1:-1]
|
|
571
|
+
except Exception:
|
|
572
|
+
str_repr = str(ofv.base)[1:-1].lower()
|
|
573
|
+
offset_parts.append(str_repr)
|
|
574
|
+
elif isinstance(ofv, ott.timedelta):
|
|
575
|
+
value, str_repr = ofv._get_offset()
|
|
576
|
+
offset_values.append(value)
|
|
577
|
+
offset_parts.append(str_repr)
|
|
578
|
+
else:
|
|
579
|
+
offset_values.append(ofv)
|
|
580
|
+
offset_parts.append('millisecond')
|
|
581
|
+
data['offset'] = offset_values
|
|
582
|
+
data['offset_part'] = offset_parts
|
|
583
|
+
|
|
584
|
+
def split_data_for_tick(columns):
|
|
585
|
+
tick_parameters = {}
|
|
586
|
+
tick_columns = {}
|
|
587
|
+
for key, value in columns.items():
|
|
588
|
+
if key in {'offset', 'offset_part', 'time'}:
|
|
589
|
+
tick_parameters[key] = value
|
|
590
|
+
else:
|
|
591
|
+
tick_columns[key] = value
|
|
592
|
+
return tick_columns, tick_parameters
|
|
593
|
+
|
|
594
|
+
if value_len == 1:
|
|
595
|
+
columns = {key: value[0] for key, value in data.items()}
|
|
596
|
+
tick_columns, tick_parameters = split_data_for_tick(columns)
|
|
597
|
+
return Tick(tick_columns, db=db, symbol=symbol, tick_type=tick_type, start=start, end=end,
|
|
598
|
+
timezone_for_time=timezone_for_time, **tick_parameters)
|
|
599
|
+
else:
|
|
600
|
+
# select only columns that do not contain None there to support
|
|
601
|
+
# heterogeneous data
|
|
602
|
+
not_none_columns = []
|
|
603
|
+
for key in data.keys():
|
|
604
|
+
data[key] = [float(elem) if isinstance(elem, bool) else elem for elem in data[key]]
|
|
605
|
+
for key, value in data.items():
|
|
606
|
+
add = True
|
|
607
|
+
for v in value:
|
|
608
|
+
# we need it, because can't use _Column instances in if-clauses
|
|
609
|
+
if isinstance(v, _Column):
|
|
610
|
+
continue
|
|
611
|
+
if v is None:
|
|
612
|
+
add = False
|
|
613
|
+
break
|
|
614
|
+
|
|
615
|
+
if add:
|
|
616
|
+
not_none_columns.append(key)
|
|
617
|
+
|
|
618
|
+
# if a field is a onetick operation, it cannot be csv'd (it's dynamic)
|
|
619
|
+
is_outside_data_dependent = False
|
|
620
|
+
for key, value in data.items():
|
|
621
|
+
for v in value:
|
|
622
|
+
if isinstance(v, _Operation):
|
|
623
|
+
is_outside_data_dependent = True
|
|
624
|
+
break
|
|
625
|
+
|
|
626
|
+
# infinity() and (on windows) nan() cannot be natively read from a csv
|
|
627
|
+
has_special_values = False
|
|
628
|
+
for key, value in data.items():
|
|
629
|
+
for v in value:
|
|
630
|
+
if isinstance(v, ott._inf) or \
|
|
631
|
+
(isinstance(v, ott._nan) or isinstance(v, float) and math.isnan(v)) \
|
|
632
|
+
and sys.platform.startswith("win"):
|
|
633
|
+
has_special_values = True
|
|
634
|
+
break
|
|
635
|
+
|
|
636
|
+
if (len(not_none_columns) == len(data)) and (not is_outside_data_dependent) and (not has_special_values):
|
|
637
|
+
# Data is homogenous; CSV backing can be used
|
|
638
|
+
return _DataCSV(data, value_len, db=db, symbol=symbol, tick_type=tick_type, start=start, end=end,
|
|
639
|
+
timezone_for_time=timezone_for_time, use_absolute_time=use_absolute_time,
|
|
640
|
+
disable_offsets=disable_offsets)
|
|
641
|
+
else:
|
|
642
|
+
# Fallback is a merge of individual ticks
|
|
643
|
+
ticks = []
|
|
644
|
+
|
|
645
|
+
for inx in range(value_len):
|
|
646
|
+
columns = {key: value[inx] for key, value in data.items()}
|
|
647
|
+
tick_columns, tick_parameters = split_data_for_tick(columns)
|
|
648
|
+
ticks.append(Tick(tick_columns, db=db, symbol=symbol, tick_type=tick_type, start=start, end=end,
|
|
649
|
+
timezone_for_time=timezone_for_time, **tick_parameters))
|
|
650
|
+
|
|
651
|
+
return onetick.py.functions.merge(ticks, align_schema=not_none_columns)
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
class _DataCSV(Source):
|
|
655
|
+
def __init__(
|
|
656
|
+
self,
|
|
657
|
+
data=None,
|
|
658
|
+
length=None,
|
|
659
|
+
db=utils.adaptive_to_default,
|
|
660
|
+
symbol=utils.adaptive_to_default,
|
|
661
|
+
tick_type=utils.adaptive,
|
|
662
|
+
start=utils.adaptive,
|
|
663
|
+
end=utils.adaptive,
|
|
664
|
+
use_absolute_time=False,
|
|
665
|
+
timezone_for_time=None,
|
|
666
|
+
disable_offsets=False,
|
|
667
|
+
**kwargs,
|
|
668
|
+
):
|
|
669
|
+
if self._try_default_constructor(**kwargs):
|
|
670
|
+
return
|
|
671
|
+
|
|
672
|
+
if data is None or length is None:
|
|
673
|
+
raise ValueError("'data' and 'length' parameters can't be None")
|
|
674
|
+
|
|
675
|
+
def datetime_to_expr(v):
|
|
676
|
+
if ott.is_time_type(v):
|
|
677
|
+
return ott.datetime2expr(v, timezone_naive=timezone_for_time)
|
|
678
|
+
if isinstance(v, ott.nsectime):
|
|
679
|
+
# TODO: change to ott.value2str after PY-441
|
|
680
|
+
return f'NSECTIME({v})'
|
|
681
|
+
if isinstance(v, ott.msectime):
|
|
682
|
+
return ott.value2str(v)
|
|
683
|
+
raise ValueError(f"Can't convert value {v} to datetime expression")
|
|
684
|
+
|
|
685
|
+
if use_absolute_time:
|
|
686
|
+
# converting values of "time" column to onetick expressions
|
|
687
|
+
converted_times = []
|
|
688
|
+
for d in data["time"]:
|
|
689
|
+
converted_times.append(datetime_to_expr(d))
|
|
690
|
+
data["time"] = converted_times
|
|
691
|
+
|
|
692
|
+
def csv_rep(value):
|
|
693
|
+
if issubclass(type(value), str):
|
|
694
|
+
return '"' + value.replace("\\", "\\\\").replace('"', '\\"') + '"'
|
|
695
|
+
else:
|
|
696
|
+
return str(value)
|
|
697
|
+
|
|
698
|
+
def get_type_of_column(key):
|
|
699
|
+
def get_type_of_value(value):
|
|
700
|
+
t = ott.get_object_type(value)
|
|
701
|
+
|
|
702
|
+
if ott.is_time_type(t):
|
|
703
|
+
return ott.nsectime
|
|
704
|
+
elif t is str:
|
|
705
|
+
if len(value) <= ott.string.DEFAULT_LENGTH:
|
|
706
|
+
return str
|
|
707
|
+
else:
|
|
708
|
+
return ott.string[len(value)]
|
|
709
|
+
else:
|
|
710
|
+
return t
|
|
711
|
+
|
|
712
|
+
types = [get_type_of_value(v) for v in data[key]]
|
|
713
|
+
res, _ = utils.get_type_that_includes(types)
|
|
714
|
+
return res
|
|
715
|
+
|
|
716
|
+
columns = {key: get_type_of_column(key) for key in data}
|
|
717
|
+
|
|
718
|
+
expression_columns = []
|
|
719
|
+
header_columns = {}
|
|
720
|
+
for key in list(columns):
|
|
721
|
+
header_columns[key] = columns[key]
|
|
722
|
+
# converting values of datetime columns to onetick expressions
|
|
723
|
+
if columns[key] is ott.nsectime:
|
|
724
|
+
data[key] = [datetime_to_expr(v) for v in data[key]]
|
|
725
|
+
header_columns[key] = get_type_of_column(key)
|
|
726
|
+
expression_columns.append(key)
|
|
727
|
+
|
|
728
|
+
transposed_data = [[csv_rep(value[i]) for key, value in data.items()] for i in range(length)]
|
|
729
|
+
|
|
730
|
+
text_header = ",".join(f"{ott.type2str(v)} {k}" for k, v in header_columns.items())
|
|
731
|
+
text_data = "\n".join([",".join(data_row) for data_row in transposed_data])
|
|
732
|
+
|
|
733
|
+
if use_absolute_time:
|
|
734
|
+
del columns["time"]
|
|
735
|
+
elif not disable_offsets:
|
|
736
|
+
del columns["offset"]
|
|
737
|
+
del columns["offset_part"]
|
|
738
|
+
|
|
739
|
+
super().__init__(
|
|
740
|
+
_symbols=symbol,
|
|
741
|
+
_start=start,
|
|
742
|
+
_end=end,
|
|
743
|
+
_base_ep_func=lambda: self.base_ep(columns=columns,
|
|
744
|
+
db=db,
|
|
745
|
+
tick_type=tick_type,
|
|
746
|
+
use_absolute_time=use_absolute_time,
|
|
747
|
+
text_header=text_header,
|
|
748
|
+
text_data=text_data,
|
|
749
|
+
expression_columns=expression_columns,
|
|
750
|
+
disable_offsets=disable_offsets),
|
|
751
|
+
schema=columns,
|
|
752
|
+
)
|
|
753
|
+
|
|
754
|
+
def base_ep(self, columns, db, tick_type, use_absolute_time, text_header, text_data, expression_columns=None,
|
|
755
|
+
disable_offsets=False):
|
|
756
|
+
|
|
757
|
+
node = Source(
|
|
758
|
+
otq.CsvFileListing(
|
|
759
|
+
discard_timestamp_column=True,
|
|
760
|
+
time_assignment="_START_TIME",
|
|
761
|
+
field_delimiters="','",
|
|
762
|
+
quote_chars='"""',
|
|
763
|
+
handle_escaped_chars=True,
|
|
764
|
+
file_contents=text_data,
|
|
765
|
+
first_line_is_title=False,
|
|
766
|
+
fields=text_header,
|
|
767
|
+
),
|
|
768
|
+
schema=columns,
|
|
769
|
+
)
|
|
770
|
+
|
|
771
|
+
update_node_tick_type(node, tick_type, db)
|
|
772
|
+
|
|
773
|
+
if use_absolute_time:
|
|
774
|
+
# don't trust UpdateField
|
|
775
|
+
node.sink(otq.AddField(field='____TMP____', value="EVAL_EXPRESSION(time, 'datetime')"))
|
|
776
|
+
node.sink(otq.UpdateField(field="TIMESTAMP", value="____TMP____"))
|
|
777
|
+
node.sink(otq.Passthrough(fields="time,____TMP____", drop_fields="True"))
|
|
778
|
+
node.sink(otq.OrderBy(order_by="TIMESTAMP ASC"))
|
|
779
|
+
elif not disable_offsets:
|
|
780
|
+
node.sink(
|
|
781
|
+
otq.AddField(
|
|
782
|
+
field='nsectime ____TMP____',
|
|
783
|
+
value=get_case_expr_with_datetime_limited_by_end_time(
|
|
784
|
+
"dateadd(offset_part, offset, TIMESTAMP, _TIMEZONE)"
|
|
785
|
+
)
|
|
786
|
+
)
|
|
787
|
+
)
|
|
788
|
+
node.sink(otq.OrderBy(order_by="____TMP____ ASC"))
|
|
789
|
+
node.sink(otq.UpdateField(field="TIMESTAMP", value="____TMP____"))
|
|
790
|
+
node.sink(otq.Passthrough(fields="offset,offset_part,____TMP____", drop_fields="True"))
|
|
791
|
+
node.sink(otq.OrderBy(order_by="TIMESTAMP ASC"))
|
|
792
|
+
|
|
793
|
+
for column in expression_columns or []:
|
|
794
|
+
# don't trust UpdateField
|
|
795
|
+
node.sink(otq.RenameFields(f'{column}=____TMP____'))
|
|
796
|
+
node.sink(otq.AddField(field=column, value="EVAL_EXPRESSION(____TMP____, 'datetime')"))
|
|
797
|
+
node.sink(otq.Passthrough(fields='____TMP____', drop_fields=True))
|
|
798
|
+
node.sink(otq.Table(keep_input_fields=True,
|
|
799
|
+
fields=', '.join(f'nsectime {column}' for column in expression_columns)))
|
|
800
|
+
|
|
801
|
+
return node
|
|
802
|
+
|
|
803
|
+
|
|
804
|
+
def TTicks(data): # NOSONAR
|
|
805
|
+
"""
|
|
806
|
+
.. deprecated:: 1.3.101
|
|
807
|
+
|
|
808
|
+
Transposed Ticks format.
|
|
809
|
+
|
|
810
|
+
Parameters
|
|
811
|
+
----------
|
|
812
|
+
data: list
|
|
813
|
+
list of list, where the first sublist is the header, and other are values
|
|
814
|
+
"""
|
|
815
|
+
|
|
816
|
+
warnings.warn("The nice and helpful function `TTicks` is going to be deprecated. "
|
|
817
|
+
"You could use the `Ticks` to pass data in the same format there",
|
|
818
|
+
FutureWarning)
|
|
819
|
+
|
|
820
|
+
data_dict = {}
|
|
821
|
+
|
|
822
|
+
for inx, key in enumerate(data[0]):
|
|
823
|
+
data_dict[key] = [sub_list[inx] for sub_list in data[1:]]
|
|
824
|
+
|
|
825
|
+
return Ticks(data_dict)
|