onetick-py 1.162.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. locator_parser/__init__.py +0 -0
  2. locator_parser/acl.py +73 -0
  3. locator_parser/actions.py +266 -0
  4. locator_parser/common.py +365 -0
  5. locator_parser/io.py +41 -0
  6. locator_parser/locator.py +150 -0
  7. onetick/__init__.py +101 -0
  8. onetick/doc_utilities/__init__.py +3 -0
  9. onetick/doc_utilities/napoleon.py +40 -0
  10. onetick/doc_utilities/ot_doctest.py +140 -0
  11. onetick/doc_utilities/snippets.py +280 -0
  12. onetick/lib/__init__.py +4 -0
  13. onetick/lib/instance.py +138 -0
  14. onetick/py/__init__.py +290 -0
  15. onetick/py/_stack_info.py +89 -0
  16. onetick/py/_version.py +2 -0
  17. onetick/py/aggregations/__init__.py +11 -0
  18. onetick/py/aggregations/_base.py +645 -0
  19. onetick/py/aggregations/_docs.py +912 -0
  20. onetick/py/aggregations/compute.py +286 -0
  21. onetick/py/aggregations/functions.py +2216 -0
  22. onetick/py/aggregations/generic.py +104 -0
  23. onetick/py/aggregations/high_low.py +80 -0
  24. onetick/py/aggregations/num_distinct.py +83 -0
  25. onetick/py/aggregations/order_book.py +427 -0
  26. onetick/py/aggregations/other.py +1014 -0
  27. onetick/py/backports.py +26 -0
  28. onetick/py/cache.py +373 -0
  29. onetick/py/callback/__init__.py +5 -0
  30. onetick/py/callback/callback.py +275 -0
  31. onetick/py/callback/callbacks.py +131 -0
  32. onetick/py/compatibility.py +752 -0
  33. onetick/py/configuration.py +736 -0
  34. onetick/py/core/__init__.py +0 -0
  35. onetick/py/core/_csv_inspector.py +93 -0
  36. onetick/py/core/_internal/__init__.py +0 -0
  37. onetick/py/core/_internal/_manually_bound_value.py +6 -0
  38. onetick/py/core/_internal/_nodes_history.py +250 -0
  39. onetick/py/core/_internal/_op_utils/__init__.py +0 -0
  40. onetick/py/core/_internal/_op_utils/every_operand.py +9 -0
  41. onetick/py/core/_internal/_op_utils/is_const.py +10 -0
  42. onetick/py/core/_internal/_per_tick_scripts/tick_list_sort_template.script +121 -0
  43. onetick/py/core/_internal/_proxy_node.py +140 -0
  44. onetick/py/core/_internal/_state_objects.py +2307 -0
  45. onetick/py/core/_internal/_state_vars.py +87 -0
  46. onetick/py/core/_source/__init__.py +0 -0
  47. onetick/py/core/_source/_symbol_param.py +95 -0
  48. onetick/py/core/_source/schema.py +97 -0
  49. onetick/py/core/_source/source_methods/__init__.py +0 -0
  50. onetick/py/core/_source/source_methods/aggregations.py +810 -0
  51. onetick/py/core/_source/source_methods/applyers.py +296 -0
  52. onetick/py/core/_source/source_methods/columns.py +141 -0
  53. onetick/py/core/_source/source_methods/data_quality.py +301 -0
  54. onetick/py/core/_source/source_methods/debugs.py +270 -0
  55. onetick/py/core/_source/source_methods/drops.py +120 -0
  56. onetick/py/core/_source/source_methods/fields.py +619 -0
  57. onetick/py/core/_source/source_methods/filters.py +1001 -0
  58. onetick/py/core/_source/source_methods/joins.py +1393 -0
  59. onetick/py/core/_source/source_methods/merges.py +566 -0
  60. onetick/py/core/_source/source_methods/misc.py +1325 -0
  61. onetick/py/core/_source/source_methods/pandases.py +155 -0
  62. onetick/py/core/_source/source_methods/renames.py +356 -0
  63. onetick/py/core/_source/source_methods/sorts.py +183 -0
  64. onetick/py/core/_source/source_methods/switches.py +142 -0
  65. onetick/py/core/_source/source_methods/symbols.py +117 -0
  66. onetick/py/core/_source/source_methods/times.py +627 -0
  67. onetick/py/core/_source/source_methods/writes.py +702 -0
  68. onetick/py/core/_source/symbol.py +202 -0
  69. onetick/py/core/_source/tmp_otq.py +222 -0
  70. onetick/py/core/column.py +209 -0
  71. onetick/py/core/column_operations/__init__.py +0 -0
  72. onetick/py/core/column_operations/_methods/__init__.py +4 -0
  73. onetick/py/core/column_operations/_methods/_internal.py +28 -0
  74. onetick/py/core/column_operations/_methods/conversions.py +215 -0
  75. onetick/py/core/column_operations/_methods/methods.py +294 -0
  76. onetick/py/core/column_operations/_methods/op_types.py +150 -0
  77. onetick/py/core/column_operations/accessors/__init__.py +0 -0
  78. onetick/py/core/column_operations/accessors/_accessor.py +30 -0
  79. onetick/py/core/column_operations/accessors/decimal_accessor.py +92 -0
  80. onetick/py/core/column_operations/accessors/dt_accessor.py +464 -0
  81. onetick/py/core/column_operations/accessors/float_accessor.py +160 -0
  82. onetick/py/core/column_operations/accessors/str_accessor.py +1374 -0
  83. onetick/py/core/column_operations/base.py +1061 -0
  84. onetick/py/core/cut_builder.py +149 -0
  85. onetick/py/core/db_constants.py +20 -0
  86. onetick/py/core/eval_query.py +244 -0
  87. onetick/py/core/lambda_object.py +442 -0
  88. onetick/py/core/multi_output_source.py +193 -0
  89. onetick/py/core/per_tick_script.py +2253 -0
  90. onetick/py/core/query_inspector.py +465 -0
  91. onetick/py/core/source.py +1663 -0
  92. onetick/py/db/__init__.py +2 -0
  93. onetick/py/db/_inspection.py +1042 -0
  94. onetick/py/db/db.py +1423 -0
  95. onetick/py/db/utils.py +64 -0
  96. onetick/py/docs/__init__.py +0 -0
  97. onetick/py/docs/docstring_parser.py +112 -0
  98. onetick/py/docs/utils.py +81 -0
  99. onetick/py/functions.py +2354 -0
  100. onetick/py/license.py +188 -0
  101. onetick/py/log.py +88 -0
  102. onetick/py/math.py +947 -0
  103. onetick/py/misc.py +437 -0
  104. onetick/py/oqd/__init__.py +22 -0
  105. onetick/py/oqd/eps.py +1195 -0
  106. onetick/py/oqd/sources.py +325 -0
  107. onetick/py/otq.py +211 -0
  108. onetick/py/pyomd_mock.py +47 -0
  109. onetick/py/run.py +841 -0
  110. onetick/py/servers.py +173 -0
  111. onetick/py/session.py +1342 -0
  112. onetick/py/sources/__init__.py +19 -0
  113. onetick/py/sources/cache.py +167 -0
  114. onetick/py/sources/common.py +126 -0
  115. onetick/py/sources/csv.py +642 -0
  116. onetick/py/sources/custom.py +85 -0
  117. onetick/py/sources/data_file.py +305 -0
  118. onetick/py/sources/data_source.py +1049 -0
  119. onetick/py/sources/empty.py +94 -0
  120. onetick/py/sources/odbc.py +337 -0
  121. onetick/py/sources/order_book.py +238 -0
  122. onetick/py/sources/parquet.py +168 -0
  123. onetick/py/sources/pit.py +191 -0
  124. onetick/py/sources/query.py +495 -0
  125. onetick/py/sources/snapshots.py +419 -0
  126. onetick/py/sources/split_query_output_by_symbol.py +198 -0
  127. onetick/py/sources/symbology_mapping.py +123 -0
  128. onetick/py/sources/symbols.py +357 -0
  129. onetick/py/sources/ticks.py +825 -0
  130. onetick/py/sql.py +70 -0
  131. onetick/py/state.py +256 -0
  132. onetick/py/types.py +2056 -0
  133. onetick/py/utils/__init__.py +70 -0
  134. onetick/py/utils/acl.py +93 -0
  135. onetick/py/utils/config.py +186 -0
  136. onetick/py/utils/default.py +49 -0
  137. onetick/py/utils/file.py +38 -0
  138. onetick/py/utils/helpers.py +76 -0
  139. onetick/py/utils/locator.py +94 -0
  140. onetick/py/utils/perf.py +499 -0
  141. onetick/py/utils/query.py +49 -0
  142. onetick/py/utils/render.py +1139 -0
  143. onetick/py/utils/script.py +244 -0
  144. onetick/py/utils/temp.py +471 -0
  145. onetick/py/utils/types.py +118 -0
  146. onetick/py/utils/tz.py +82 -0
  147. onetick_py-1.162.2.dist-info/METADATA +148 -0
  148. onetick_py-1.162.2.dist-info/RECORD +152 -0
  149. onetick_py-1.162.2.dist-info/WHEEL +5 -0
  150. onetick_py-1.162.2.dist-info/entry_points.txt +2 -0
  151. onetick_py-1.162.2.dist-info/licenses/LICENSE +21 -0
  152. onetick_py-1.162.2.dist-info/top_level.txt +2 -0
@@ -0,0 +1,1663 @@
1
+ import os
2
+ import re
3
+ import uuid
4
+ import warnings
5
+ from collections import defaultdict
6
+ from datetime import datetime, date
7
+ import pandas as pd
8
+ from typing import Optional, Tuple
9
+
10
+ from onetick.py.otq import otq
11
+
12
+ import onetick.py.functions
13
+ import onetick.py.sources
14
+ from onetick import py as otp
15
+ from onetick.py import types as ott
16
+ from onetick.py import utils, configuration
17
+ from onetick.py.core._internal._manually_bound_value import _ManuallyBoundValue
18
+ from onetick.py.core._internal._proxy_node import _ProxyNode
19
+ from onetick.py.core._internal._state_vars import StateVars
20
+ from onetick.py.core._source._symbol_param import _SymbolParamColumn, _SymbolParamSource
21
+ from onetick.py.core._source.schema import Schema
22
+ from onetick.py.core._source.symbol import Symbol
23
+ from onetick.py.core._source.tmp_otq import TmpOtq
24
+ from onetick.py.core.column import _Column
25
+ from onetick.py.core.column_operations.base import _Operation, OnetickParameter
26
+ from onetick.py.core.query_inspector import get_query_parameter_list
27
+ from onetick.py.utils import adaptive, adaptive_to_default, default
28
+
29
+
30
+ def _is_dict_required(symbols):
31
+ """
32
+ Depending on symbols, determine if output of otp.run() or Source.__call__() should always be a dictionary
33
+ of {symbol: dataframe} even if only one symbol is present in the results
34
+ """
35
+ if isinstance(symbols, (list, tuple)):
36
+ if len(symbols) == 0:
37
+ return False
38
+ elif len(symbols) > 1:
39
+ return True
40
+ else:
41
+ symbols = symbols[0]
42
+
43
+ if isinstance(symbols, otp.Source):
44
+ return True
45
+ if isinstance(symbols, otq.Symbol):
46
+ symbols = symbols.name
47
+ if isinstance(symbols, str) and 'eval' in symbols:
48
+ return True
49
+ return False
50
+
51
+
52
+ class MetaFields:
53
+ """
54
+ OneTick defines several pseudo-columns that can be treated as if they were columns of every tick.
55
+
56
+ These columns can be accessed directly via :py:meth:`onetick.py.Source.__getitem__` method.
57
+
58
+ But in case they are used in :py:class:`~onetick.py.core.column_operations.base.Expr`
59
+ they can be accessed via ``onetick.py.Source.meta_fields``.
60
+
61
+ Examples
62
+ --------
63
+
64
+ Accessing pseudo-fields as columns or as class properties
65
+
66
+ >>> data = otp.Tick(A=1)
67
+ >>> data['X'] = data['_START_TIME']
68
+ >>> data['Y'] = otp.Source.meta_fields['_TIMEZONE']
69
+ >>> otp.run(data, start=otp.dt(2003, 12, 2), timezone='GMT')
70
+ Time A X Y
71
+ 0 2003-12-02 1 2003-12-02 GMT
72
+ """
73
+ def __init__(self):
74
+ self.timestamp = _Column('TIMESTAMP', dtype=ott.nsectime)
75
+ self.time = self.timestamp
76
+ self.start_time = _Column('_START_TIME', dtype=ott.nsectime)
77
+ self.start = self.start_time
78
+ self.end_time = _Column('_END_TIME', dtype=ott.nsectime)
79
+ self.end = self.end_time
80
+ self.timezone = _Column('_TIMEZONE', dtype=str)
81
+ self.db_name = _Column('_DBNAME', dtype=str)
82
+ self.symbol_name = _Column('_SYMBOL_NAME', dtype=str)
83
+ self.tick_type = _Column('_TICK_TYPE', dtype=str)
84
+ self.symbol_time = _Column('_SYMBOL_TIME', dtype=otp.nsectime)
85
+ self.__fields = set(map(str, self.__dict__.values())) | {'Time'}
86
+
87
+ def get_onetick_fields_and_types(self):
88
+ return {
89
+ column.name: column.dtype
90
+ for name, column in self.__dict__.items()
91
+ if not name.startswith('_') and name != 'time'
92
+ }
93
+
94
+ def __iter__(self):
95
+ yield from self.__fields
96
+
97
+ def __contains__(self, item):
98
+ return item in self.__fields
99
+
100
+ def __len__(self):
101
+ return len(self.__fields)
102
+
103
+ def __getitem__(self, item):
104
+ """
105
+ These fields are available:
106
+
107
+ * ``TIMESTAMP`` (or ``Time``)
108
+ * ``START_TIME`` (or ``_START_TIME``)
109
+ * ``END_TIME`` (or ``_END_TIME``)
110
+ * ``TIMEZONE`` (or ``_TIMEZONE``)
111
+ * ``DBNAME`` (or ``_DBNAME``)
112
+ * ``SYMBOL_NAME`` (or ``_SYMBOL_NAME``)
113
+ * ``TICK_TYPE`` (or ``_TICK_TYPE``)
114
+ * ``SYMBOL_TIME`` (or ``_SYMBOL_TIME``)
115
+ """
116
+ return {
117
+ 'TIMESTAMP': self.timestamp,
118
+ 'Time': self.time,
119
+ 'START_TIME': self.start_time,
120
+ '_START_TIME': self.start_time,
121
+ 'END_TIME': self.end_time,
122
+ '_END_TIME': self.end_time,
123
+ 'TIMEZONE': self.timezone,
124
+ '_TIMEZONE': self.timezone,
125
+ 'DB_NAME': self.db_name,
126
+ 'DBNAME': self.db_name,
127
+ '_DBNAME': self.db_name,
128
+ 'SYMBOL_NAME': self.symbol_name,
129
+ '_SYMBOL_NAME': self.symbol_name,
130
+ 'TICK_TYPE': self.tick_type,
131
+ '_TICK_TYPE': self.tick_type,
132
+ 'SYMBOL_TIME': self.symbol_time,
133
+ '_SYMBOL_TIME': self.symbol_time,
134
+ }[item]
135
+
136
+
137
+ class Source:
138
+ """
139
+ Base class for representing Onetick execution graph.
140
+ All :ref:`onetick-py sources <api/sources/root:sources>` are derived from this class
141
+ and have access to all its methods.
142
+
143
+ Examples
144
+ --------
145
+ >>> data = otp.Tick(A=1)
146
+ >>> isinstance(data, otp.Source)
147
+ True
148
+
149
+ Also this class can be used to initialize raw source
150
+ with the help of ``onetick.query`` classes, but
151
+ it should be done with caution as the user is required to set
152
+ such properties as symbol name and tick type manually.
153
+
154
+ >>> data = otp.Source(otq.TickGenerator(bucket_interval=0, fields='long A = 123').tick_type('TT'))
155
+ >>> otp.run(data, symbols='LOCAL::')
156
+ Time A
157
+ 0 2003-12-04 123
158
+ """
159
+
160
+ # TODO: need to support transactions for every _source
161
+ # transaction is set of calls between _source creation and call or between two calls
162
+ # if transaction have the same operations, then it seems we should add only one set of operations
163
+
164
+ _PROPERTIES = [
165
+ "__node",
166
+ "__hash",
167
+ "__sources_keys_dates",
168
+ "__sources_modify_query_times",
169
+ "__sources_base_ep_func",
170
+ "__sources_symbols",
171
+ "__source_has_output",
172
+ "__name",
173
+ "_tmp_otq"
174
+ ]
175
+ _OT_META_FIELDS = ["_START_TIME", "_END_TIME", "_SYMBOL_NAME", "_DBNAME", "_TICK_TYPE", '_TIMEZONE']
176
+ meta_fields = MetaFields()
177
+ Symbol = Symbol # NOSONAR
178
+
179
+ def __init__(
180
+ self,
181
+ node=None,
182
+ schema=None,
183
+ _symbols=None,
184
+ _start=adaptive,
185
+ _end=adaptive,
186
+ _base_ep_func=None,
187
+ _has_output=True,
188
+ **kwargs,
189
+ ):
190
+
191
+ self._tmp_otq = TmpOtq()
192
+ self.__name = None
193
+
194
+ if isinstance(_symbols, OnetickParameter):
195
+ _symbols = _symbols.parameter_expression
196
+
197
+ schema = self._select_schema(schema, kwargs)
198
+
199
+ for key in schema:
200
+ if self._check_key_in_properties(key):
201
+ raise ValueError(f"Can't set class property {key}")
202
+ if self._check_key_is_meta(key):
203
+ if key == 'TIMESTAMP':
204
+ # for backward-compatibility
205
+ warnings.warn(f"Setting meta field {key} in schema is not needed", FutureWarning, stacklevel=2)
206
+ else:
207
+ raise ValueError(f"Can't set meta field {key}")
208
+
209
+ schema.update(
210
+ self.meta_fields.get_onetick_fields_and_types()
211
+ )
212
+
213
+ for key, value in schema.items():
214
+ # calculate value type
215
+ value_type = ott.get_source_base_type(value)
216
+ self.__dict__[key] = _Column(name=key, dtype=value_type, obj_ref=self)
217
+
218
+ # just an alias to Timestamp
219
+ self.__dict__['Time'] = self.__dict__['TIMESTAMP']
220
+ self.__dict__['_state_vars'] = StateVars(self)
221
+
222
+ if node is None:
223
+ node = otq.Passthrough()
224
+
225
+ if isinstance(_symbols, _SymbolParamColumn):
226
+ symbol_node = otq.ModifySymbolName(str(self.Symbol[_symbols.name, str]))
227
+ node = node.sink(symbol_node)
228
+ _symbols = None
229
+
230
+ self.__hash = uuid.uuid4()
231
+ self.__sources_keys_dates = {}
232
+ self.__sources_modify_query_times = {}
233
+ self.__sources_base_ep_func = {}
234
+ self.__sources_symbols = {}
235
+ self.__source_has_output = _has_output
236
+
237
+ if isinstance(node, _ProxyNode):
238
+ self.__node = _ProxyNode(*node.copy_graph(), refresh_func=self.__refresh_hash)
239
+ else:
240
+ self.__node = _ProxyNode(*self.__from_ep_to_proxy(node), refresh_func=self.__refresh_hash)
241
+ self.__sources_keys_dates[self.__node.key()] = (_start, _end)
242
+ self.__sources_modify_query_times[self.__node.key()] = False
243
+ self.__sources_base_ep_func[self.__node.key()] = _base_ep_func
244
+ self.__sources_symbols[self.__node.key()] = _symbols
245
+
246
+ def _try_default_constructor(self, *args, node=None, schema=None, **kwargs):
247
+ if node is not None:
248
+ # Source.copy() method will use this way
249
+ # all info from original source will be copied by copy() method after
250
+ Source.__init__(self, *args, node=node, schema=schema, **kwargs)
251
+ return True
252
+ return False
253
+
254
+ def base_ep(self, **_kwargs):
255
+ # default implementation
256
+ # real implementation should return a Source object
257
+ return None
258
+
259
+ def _select_schema(self, schema, kwargs) -> dict:
260
+ """
261
+ Selects schema definition from ``schema`` or ``kwargs`` parameters
262
+ for the time of deprecation of ``kwargs`` parameter.
263
+ """
264
+ if schema is None:
265
+ if kwargs:
266
+ warnings.warn(
267
+ f'Setting `{self.__class__.__name__}` schema via `**kwargs` is deprecated. '
268
+ 'Please use `schema` parameter for this. '
269
+ f'Passed kwargs are: {kwargs}.',
270
+ FutureWarning,
271
+ stacklevel=2,
272
+ )
273
+ return kwargs
274
+ else:
275
+ return {}
276
+ elif kwargs:
277
+ raise ValueError(
278
+ "Specifying schema through both `**kwargs` and `schema` is prohibited. "
279
+ f"Passed kwargs are: {kwargs}."
280
+ )
281
+
282
+ return schema.copy()
283
+
284
+ def _clean_sources_dates(self):
285
+ self.__sources_keys_dates = {}
286
+ self.__sources_modify_query_times = {}
287
+ self.__sources_base_ep_func = {}
288
+ self.__sources_symbols = {}
289
+
290
+ def _set_sources_dates(self, other, copy_symbols=True):
291
+ self.__sources_keys_dates.update(other._get_sources_dates())
292
+ self.__sources_modify_query_times.update(other._get_sources_modify_query_times())
293
+ self.__sources_base_ep_func.update(other._get_sources_base_ep_func())
294
+ if copy_symbols:
295
+ self.__sources_symbols.update(other._get_sources_symbols())
296
+ else:
297
+ # this branch is applicable for the bound symbols with callbacks,
298
+ # where we drop all adaptive symbols and keep only manually specified
299
+ # symbols
300
+ manually_bound = {
301
+ key: _ManuallyBoundValue(value)
302
+ for key, value in other._get_sources_symbols().items()
303
+ if value is not adaptive and value is not adaptive_to_default
304
+ }
305
+ self.__sources_symbols.update(manually_bound)
306
+
307
+ self.__source_has_output = other._get_source_has_output()
308
+
309
+ def _change_sources_keys(self, keys: dict):
310
+ """
311
+ Change keys in sources dictionaries.
312
+ Need to do it, for example, after rebuilding the node history with new keys.
313
+
314
+ Parameters
315
+ ----------
316
+ keys: dict
317
+ Mapping from old key to new key
318
+ """
319
+ sources = (self.__sources_keys_dates,
320
+ self.__sources_modify_query_times,
321
+ self.__sources_base_ep_func,
322
+ self.__sources_symbols)
323
+ for dictionary in sources:
324
+ for key in list(dictionary):
325
+ dictionary[keys[key]] = dictionary.pop(key)
326
+
327
+ def _get_source_has_output(self):
328
+ return self.__source_has_output
329
+
330
+ def _get_sources_dates(self):
331
+ return self.__sources_keys_dates
332
+
333
+ def _get_sources_modify_query_times(self):
334
+ return self.__sources_modify_query_times
335
+
336
+ def _get_sources_base_ep_func(self):
337
+ return self.__sources_base_ep_func
338
+
339
+ def _get_sources_symbols(self):
340
+ return self.__sources_symbols
341
+
342
+ def _check_key_in_properties(self, key: str) -> bool:
343
+ if key in self.__class__._PROPERTIES:
344
+ return True
345
+ if key.replace('_' + Source.__name__.lstrip('_'), "") in self.__class__._PROPERTIES:
346
+ return True
347
+ if key.replace(self.__class__.__name__, "") in self.__class__._PROPERTIES:
348
+ return True
349
+ return False
350
+
351
+ def _check_key_is_meta(self, key: str) -> bool:
352
+ return key in self.__class__.meta_fields
353
+
354
+ def _check_key_is_reserved(self, key: str) -> bool:
355
+ return self._check_key_in_properties(key) or self._check_key_is_meta(key)
356
+
357
+ def _set_field_by_tuple(self, name, dtype):
358
+ warnings.warn('Using _set_field_by_tuple() is not recommended,'
359
+ ' change your code to use otp.Source.schema object.', DeprecationWarning)
360
+ if name not in self.__dict__:
361
+ if dtype is None:
362
+ raise KeyError(f'Column name {name} is not in the schema. Please, check that this column '
363
+ 'is in the schema or add it using the .schema property')
364
+
365
+ if name == 0 or name == 1:
366
+ raise ValueError(f"constant {name} are not supported for indexing for now, please use otp.Empty")
367
+ if type(name) in (int, float):
368
+ raise ValueError("integer indexes are not supported")
369
+ self.__dict__[name] = _Column(name, dtype, self)
370
+ else:
371
+ if not isinstance(self.__dict__[name], _Column):
372
+ raise AttributeError(f"There is no '{name}' column")
373
+
374
+ if dtype:
375
+ type1, type2 = self.__dict__[name].dtype, dtype
376
+ b_type1, b_type2 = ott.get_base_type(type1), ott.get_base_type(type2)
377
+
378
+ if b_type1 != b_type2:
379
+ if {type1, type2} == {int, float}:
380
+ self.__dict__[name]._dtype = float
381
+ else:
382
+ raise Warning(
383
+ f"Column '{name}' was declared as '{type1}', but you want to change it to '{type2}', "
384
+ "that is not possible without setting type directly via assigning value"
385
+ )
386
+
387
+ else:
388
+ if issubclass(b_type1, str):
389
+ t1_length = ott.string.DEFAULT_LENGTH if type1 is str else type1.length
390
+ t2_length = ott.string.DEFAULT_LENGTH if type2 is str else type2.length
391
+
392
+ self.__dict__[name]._dtype = type2 if t1_length < t2_length else type1
393
+ if {type1, type2} == {ott.nsectime, ott.msectime}:
394
+ self.__dict__[name]._dtype = ott.nsectime
395
+ return self.__dict__[name]
396
+
397
+ def _hash(self):
398
+ return self.__hash
399
+
400
+ def _merge_tmp_otq(self, source):
401
+ self._tmp_otq.merge(source._tmp_otq)
402
+
403
+ def __prepare_graph(self, symbols=None, start=None, end=None, has_output=False):
404
+ # We copy object here, because we will change it according to passed
405
+ # symbols and date ranges. For example, we can add modify_query_times EP
406
+ # if it is necessary
407
+
408
+ obj = self.copy()
409
+ if has_output:
410
+ obj.sink(otq.Passthrough())
411
+ start, end, symbols = obj._set_date_range_and_symbols(symbols, start, end)
412
+ if start is adaptive:
413
+ start = None
414
+ if end is adaptive:
415
+ end = None
416
+ if symbols is not None and isinstance(symbols, pd.DataFrame):
417
+ symbols = utils.get_symbol_list_from_df(symbols)
418
+ if symbols is not None and not isinstance(symbols, list):
419
+ symbols = [symbols]
420
+ elif symbols is None:
421
+ symbols = []
422
+ _symbols = []
423
+ for sym in symbols:
424
+ _symbols.append(self._convert_symbol_to_string(sym, tmp_otq=obj._tmp_otq, start=start, end=end))
425
+
426
+ return obj, start, end, _symbols
427
+
428
+ def to_otq(self, file_name=None, file_suffix=None, query_name=None, symbols=None, start=None, end=None,
429
+ timezone=None, raw=None, add_passthrough=True,
430
+ running=False,
431
+ start_time_expression=None,
432
+ end_time_expression=None,
433
+ symbol_date=None):
434
+ """
435
+ Save data source to .otq file and return path to the saved file.
436
+
437
+ Parameters
438
+ ----------
439
+ file_name: str
440
+ Absolute or relative path to the saved file.
441
+ If ``None``, create temporary file and name it randomly.
442
+ file_suffix: str
443
+ Suffix to add to the saved file name (including extension).
444
+ Can be specified if ``file_name`` is ``None``
445
+ to distinguish between different temporary files.
446
+ Default: ".to_otq.otq"
447
+ query_name: str
448
+ Name of the main query in the created file.
449
+ If ``None``, take name from this Source object.
450
+ If that name is empty, set name to "query".
451
+ symbols: str, list, :pandas:`DataFrame <pandas.DataFrame>`, :class:`Source`
452
+ symbols to save query with
453
+ start: :py:class:`otp.datetime <onetick.py.datetime>`
454
+ start time to save query with
455
+ end: :py:class:`otp.datetime <onetick.py.datetime>`
456
+ end time to save query with
457
+ timezone: str
458
+ timezone to save query with
459
+ raw
460
+
461
+ .. deprecated:: 1.4.17
462
+
463
+ add_passthrough: bool
464
+ will add :py:class:`onetick.query.Passthrough` event processor at the end of resulting graph
465
+ running: bool
466
+ Indicates whether a query is CEP or not.
467
+ start_time_expression: str, :py:class:`~onetick.py.Operation`, optional
468
+ Start time onetick expression of the query. If specified, it will take precedence over ``start``.
469
+ end_time_expression: str, :py:class:`~onetick.py.Operation`, optional
470
+ End time onetick expression of the query. If specified, it will take precedence over ``end``.
471
+ symbol_date: :py:class:`otp.datetime <onetick.py.datetime>` or :py:class:`datetime.datetime` or int
472
+ Symbol date for the query or integer in the YYYYMMDD format.
473
+ Will be applied only to the main query.
474
+
475
+ Returns
476
+ -------
477
+
478
+ result: str
479
+ Relative (if ``file_name`` is relative) or absolute path to the created query
480
+ in the format ``file_name::query_name``
481
+ """
482
+ if raw is not None:
483
+ warnings.warn('The "raw" flag is deprecated and makes no effect', FutureWarning)
484
+
485
+ if timezone is None:
486
+ timezone = configuration.config.tz
487
+
488
+ file_path = str(file_name) if file_name is not None else None
489
+ if file_suffix is None:
490
+ file_suffix = self._name_suffix('to_otq.otq')
491
+
492
+ if query_name is None:
493
+ query_name = self.get_name(remove_invalid_symbols=True)
494
+ if query_name is None:
495
+ query_name = 'query'
496
+
497
+ if isinstance(start, _Operation) and start_time_expression is None:
498
+ start_time_expression = str(start)
499
+ start = utils.adaptive
500
+ if isinstance(end, _Operation) and end_time_expression is None:
501
+ end_time_expression = str(end)
502
+ end = utils.adaptive
503
+
504
+ if isinstance(start_time_expression, _Operation):
505
+ start_time_expression = str(start_time_expression)
506
+ if isinstance(end_time_expression, _Operation):
507
+ end_time_expression = str(end_time_expression)
508
+
509
+ obj, start, end, symbols = self.__prepare_graph(symbols, start, end)
510
+
511
+ graph = obj._to_graph(add_passthrough=add_passthrough)
512
+ graph.set_symbols(symbols)
513
+
514
+ return obj._tmp_otq.save_to_file(query=graph, query_name=query_name, file_path=file_path,
515
+ file_suffix=file_suffix, start=start, end=end, timezone=timezone,
516
+ running_query_flag=running,
517
+ start_time_expression=start_time_expression,
518
+ end_time_expression=end_time_expression,
519
+ symbol_date=symbol_date)
520
+
521
+ def _store_in_tmp_otq(self, tmp_otq, operation_suffix="tmp_query", symbols=None, start=None, end=None,
522
+ raw=None, add_passthrough=True, name=None, timezone=None, symbol_date=None):
523
+ """
524
+ Adds this source to the tmp_otq storage
525
+
526
+ Parameters
527
+ ----------
528
+ tmp_otq: TmpOtq
529
+ Storage object
530
+ operation_suffix: str
531
+ Suffix string to be added to the autogenerated graph name in the otq file
532
+ name: str, optional
533
+ If specified, this ``name`` will be used to save query
534
+ and ``suffix`` parameter will be ignored.
535
+
536
+ Returns
537
+ -------
538
+ result: str
539
+ String with the name of the saved graph (starting with THIS::)
540
+ """
541
+ if raw is not None:
542
+ warnings.warn('The "raw" flag is deprecated and makes no effect', FutureWarning)
543
+
544
+ obj, start, end, symbols = self.__prepare_graph(symbols, start, end)
545
+ tmp_otq.merge(obj._tmp_otq)
546
+
547
+ if isinstance(start, ott.dt): # OT save_to_file checks for the datetime time
548
+ start = datetime.fromtimestamp(start.timestamp())
549
+ elif isinstance(start, date):
550
+ start = datetime(start.year, start.month, start.day)
551
+ if isinstance(end, ott.dt):
552
+ end = datetime.fromtimestamp(end.timestamp())
553
+ elif isinstance(end, date):
554
+ end = datetime(end.year, end.month, end.day)
555
+
556
+ if timezone is None:
557
+ timezone = configuration.config.tz
558
+
559
+ graph = obj._to_graph(add_passthrough=add_passthrough)
560
+ graph.set_start_time(start)
561
+ graph.set_end_time(end)
562
+ graph.set_symbols(symbols)
563
+ if timezone is not None:
564
+ if otq.webapi:
565
+ graph.set_timezone(timezone)
566
+ else:
567
+ graph.time_interval_properties().set_timezone(timezone)
568
+
569
+ params = {'symbol_date': symbol_date} if symbol_date is not None else {}
570
+ suffix = self._name_suffix(suffix=operation_suffix, separator='__', remove_invalid_symbols=True)
571
+ return tmp_otq.add_query(graph, suffix=suffix, name=name, params=params)
572
+
573
+ def __refresh_hash(self):
574
+ """
575
+ This internal function refreshes hash for every graph modification.
576
+ It is used only in _ProxyNode, because it tracks nodes changes
577
+ """
578
+ self.__hash = uuid.uuid3(uuid.NAMESPACE_DNS, str(self.__hash))
579
+
580
+ def _prepare_for_execution(self, symbols=None, start=None, end=None, start_time_expression=None,
581
+ end_time_expression=None, timezone=None, has_output=None,
582
+ running_query_flag=None, require_dict=False, node_name=None,
583
+ symbol_date=None):
584
+ if has_output is None:
585
+ has_output = self.__source_has_output
586
+
587
+ if timezone is None:
588
+ timezone = configuration.config.tz
589
+
590
+ obj, start, end, symbols = self.__prepare_graph(symbols, start, end, has_output)
591
+ require_dict = require_dict or _is_dict_required(symbols)
592
+
593
+ if node_name is None:
594
+ node_name = 'SOURCE_CALL_MAIN_OUT_NODE'
595
+ obj.node().node_name(node_name)
596
+
597
+ graph = obj._to_graph(add_passthrough=False)
598
+
599
+ graph.set_symbols(symbols)
600
+
601
+ # create name and suffix for generated .otq file
602
+ if otp.config.main_query_generated_filename:
603
+ name = otp.config.main_query_generated_filename
604
+ if name.endswith('.otq'):
605
+ suffix = ''
606
+ else:
607
+ suffix = '.otq'
608
+ force = True
609
+ else:
610
+ name = ''
611
+ suffix = self._name_suffix('run.otq')
612
+ force = False
613
+
614
+ clean_up = default
615
+ if otp.config.otq_debug_mode:
616
+ clean_up = False
617
+ base_dir = None
618
+ if os.getenv('OTP_WEBAPI_TEST_MODE'):
619
+ from onetick.py.otq import _tmp_otq_path
620
+ base_dir = _tmp_otq_path()
621
+ tmp_file = utils.TmpFile(name=name,
622
+ suffix=suffix,
623
+ force=force,
624
+ base_dir=base_dir,
625
+ clean_up=clean_up)
626
+
627
+ query_to_run = obj._tmp_otq.save_to_file(query=graph,
628
+ query_name=self.get_name(remove_invalid_symbols=True)
629
+ if self.get_name(remove_invalid_symbols=True) else "main_query",
630
+ file_path=tmp_file.path,
631
+ start=start, end=end,
632
+ running_query_flag=running_query_flag,
633
+ start_time_expression=start_time_expression,
634
+ end_time_expression=end_time_expression,
635
+ timezone=timezone,
636
+ symbol_date=symbol_date)
637
+
638
+ return query_to_run, require_dict, node_name
639
+
640
+ def __call__(self, *args, **kwargs):
641
+ """
642
+ .. deprecated:: 1.48.3
643
+ Use :py:func:`otp.run <onetick.py.run>` instead.
644
+ """
645
+ warnings.warn('__call__() method is deprecated, use otp.run() instead', FutureWarning, stacklevel=2)
646
+ return otp.run(self, *args, **kwargs)
647
+
648
+ def to_df(self, symbols=None, **kwargs):
649
+ """
650
+ .. deprecated:: 1.48.3
651
+ Use :py:func:`otp.run <onetick.py.run>` instead.
652
+ """
653
+ warnings.warn('to_df() method is deprecated, use otp.run() instead', FutureWarning, stacklevel=2)
654
+ # For backward compatibility: otp.run() does not accept "symbols" as a non-keyword argument
655
+ if symbols is not None:
656
+ kwargs['symbols'] = symbols
657
+ return otp.run(self, **kwargs)
658
+
659
+ to_dataframe = to_df
660
+
661
+ def print_api_graph(self):
662
+ self.node().copy_graph(print_out=True)
663
+
664
+ def _add_table(self, strict=False):
665
+ table = otq.Table(
666
+ fields=",".join(
667
+ ott.type2str(dtype) + " " + name for name, dtype in self.columns(skip_meta_fields=True).items()
668
+ ),
669
+ keep_input_fields=not strict,
670
+ )
671
+ self.sink(table)
672
+
673
+ def _is_unbound_required(self):
674
+ """ Check whether a graph needs unbound symbol or not """
675
+
676
+ for symbol in self.__sources_symbols.values():
677
+ if symbol is adaptive or symbol is adaptive_to_default:
678
+ return True
679
+ return False
680
+
681
+ def _get_widest_time_range(self):
682
+ """
683
+ Get minimum start time and maximum end time.
684
+ If time is not found, None is returned.
685
+ """
686
+ start_times = []
687
+ end_times = []
688
+
689
+ for start, end in self.__sources_keys_dates.values():
690
+ if start is not adaptive:
691
+ start_times.append(start)
692
+ if end is not adaptive:
693
+ end_times.append(end)
694
+
695
+ start = min(start_times) if start_times else None
696
+ end = max(end_times) if end_times else None
697
+ return start, end
698
+
699
+ def __get_common_symbol(self):
700
+ need_to_bind_symbol = False
701
+ common_symbol = None
702
+
703
+ # let's try to understand whether we could use common symbol for all sources
704
+ # or we need to bound symbols instead
705
+ first_symbol = None
706
+
707
+ for symbol in self.__sources_symbols.values():
708
+ if first_symbol is None:
709
+ first_symbol = symbol
710
+
711
+ if isinstance(first_symbol, _ManuallyBoundValue):
712
+ # Mark that we need to bound, but keep common_symbol equal to None.
713
+ # It is applicable for the bound symbols inside the merge with bound
714
+ # symbols, for example.
715
+ need_to_bind_symbol = True
716
+ else:
717
+ common_symbol = symbol
718
+
719
+ continue
720
+
721
+ if symbol and symbol != first_symbol:
722
+ need_to_bind_symbol = True
723
+ common_symbol = None
724
+ break
725
+
726
+ # symbol is specified nowhere - just set unbound to the default one
727
+ if (first_symbol is adaptive or first_symbol is adaptive_to_default) and (
728
+ common_symbol is adaptive or common_symbol is adaptive_to_default
729
+ ):
730
+ common_symbol = configuration.config.default_symbol
731
+
732
+ return common_symbol, need_to_bind_symbol
733
+
734
+ def __get_modify_query_times(self, key, start, end, sources_start, sources_end):
735
+ # determine whether we have to add modify_query_times to a src
736
+
737
+ if self.__sources_modify_query_times[key]:
738
+ return None
739
+
740
+ start_date, end_date = self.__sources_keys_dates[key]
741
+
742
+ if start_date is adaptive and end_date is adaptive:
743
+ return None
744
+
745
+ # if some of the end is specified, then it means
746
+ # we need to check whether it is worth to wrap into the modify_query_times
747
+ if start_date is adaptive:
748
+ if start is None:
749
+ start_date = sources_start
750
+ else:
751
+ start_date = start
752
+
753
+ if end_date is adaptive:
754
+ if end is None:
755
+ end_date = sources_end
756
+ else:
757
+ end_date = end
758
+
759
+ if start_date is adaptive or end_date is adaptive:
760
+ return None
761
+
762
+ # it might happen when either sources_start/end are adaptive
763
+ # or start/end are adaptive
764
+ if (
765
+ (start is None and sources_start is not adaptive and start_date != sources_start)
766
+ or (start is not None and start_date != start)
767
+ or (end is None and sources_end is not adaptive and end_date != sources_end)
768
+ or (end is not None and end_date != end)
769
+ ):
770
+ mqt_format = 'parse_time("%Y-%m-%d %H:%M:%S.%q","{}", _TIMEZONE)'
771
+ mqt_ep = otq.ModifyQueryTimes(
772
+ start_time=mqt_format.format(start_date.strftime("%Y-%m-%d %H:%M:%S.%f")),
773
+ end_time=mqt_format.format(end_date.strftime("%Y-%m-%d %H:%M:%S.%f")),
774
+ output_timestamp="TIMESTAMP",
775
+ )
776
+
777
+ return mqt_ep
778
+
779
+ def _set_date_range_and_symbols(self, symbols=None, start=None, end=None):
780
+ # will modify self
781
+
782
+ if symbols is None:
783
+ common_symbol, need_to_bind_symbol = self.__get_common_symbol()
784
+ else:
785
+ # when unbound symbols passed
786
+ common_symbol = symbols
787
+ need_to_bind_symbol = True # use to check all sources whether some has bound symbols
788
+
789
+ # Find max and min for _source data ranges
790
+ sources_start, sources_end = self._get_widest_time_range()
791
+ sources_start = sources_start or configuration.config.get('default_start_time', adaptive)
792
+ sources_end = sources_end or configuration.config.get('default_end_time', adaptive)
793
+
794
+ for key in self.__sources_keys_dates:
795
+
796
+ # find a function that builds _source
797
+ func = self.__sources_base_ep_func[key]
798
+ if not func:
799
+ continue
800
+
801
+ src = func()
802
+
803
+ mqt_ep = self.__get_modify_query_times(key, start, end, sources_start, sources_end)
804
+ if mqt_ep:
805
+ self.__sources_modify_query_times[key] = True
806
+ src.sink(mqt_ep)
807
+
808
+ if need_to_bind_symbol:
809
+ bound = None
810
+ if key in self.__sources_symbols: # TODO: this is wrong, we need to know about symbols
811
+ # it happens when we do not copy symbols when apply
812
+ # merge with bound symbols.
813
+ # Wrong, in that case merge with bound symbol is
814
+ # non distinguishable from the manually passed None
815
+ # for external queries
816
+ bound = self.__sources_symbols[key]
817
+ if isinstance(bound, _ManuallyBoundValue):
818
+ bound = bound.value
819
+
820
+ if bound and bound is not adaptive and bound is not adaptive_to_default:
821
+ src.__node.symbol(bound)
822
+ else:
823
+ # if key is not in __sources_symbols, then
824
+ # it means that symbol was not specified, and
825
+ # therefor use unbound symbol
826
+ if common_symbol is None:
827
+ if bound is adaptive_to_default:
828
+ src.__node.symbol(configuration.config.default_symbol)
829
+ # TODO: write test validated this
830
+ # else:
831
+ # raise Exception("One of the branch does not have symbol specified")
832
+
833
+ # --------------------------
834
+ # glue _source with the main graph
835
+ self.node().add_rules(src.node().copy_rules())
836
+ self.source_by_key(src.node().copy_graph(), key)
837
+ self._merge_tmp_otq(src)
838
+
839
+ if start is None:
840
+ start = sources_start
841
+
842
+ if end is None:
843
+ end = sources_end
844
+
845
+ return start, end, common_symbol
846
+
847
+ def _to_graph(self, add_passthrough=True):
848
+ """
849
+ Construct the graph. Only for internal usage.
850
+
851
+ It is private, because it constructs the raw graph assuming that a graph
852
+ is already defined, and might confuse an end user, because by default Source
853
+ is not fully defined; it becomes fully defined only when symbols, start and
854
+ end datetime are specified.
855
+ """
856
+ constructed_obj = self.copy()
857
+
858
+ # we add it for case when the last EP has a pin output
859
+ if add_passthrough:
860
+ constructed_obj.sink(otq.Passthrough())
861
+
862
+ return otq.GraphQuery(constructed_obj.node().get())
863
+
864
+ def to_graph(self, raw=None, symbols=None, start=None, end=None, *, add_passthrough=True):
865
+ """
866
+ Construct an :py:class:`onetick.query.GraphQuery` object.
867
+
868
+ Parameters
869
+ ----------
870
+ raw:
871
+ .. deprecated:: 1.4.17 has no effect
872
+
873
+ symbols:
874
+ symbols query to add to otq.GraphQuery
875
+ start: :py:class:`otp.datetime <onetick.py.datetime>`
876
+ start time of a query
877
+ end: :py:class:`otp.datetime <onetick.py.datetime>`
878
+ end time of a query
879
+ add_passthrough: bool
880
+ add additional :py:class:`onetick.query.Passthrough` event processor to the end of a resulted graph
881
+
882
+ Returns
883
+ -------
884
+ otq.GraphQuery
885
+
886
+ See Also
887
+ --------
888
+ :meth:`render`
889
+ """
890
+
891
+ if raw is not None:
892
+ warnings.warn('The "raw" flag is deprecated and makes not effect', FutureWarning)
893
+
894
+ _obj, _start, _end, _symbols = self.__prepare_graph(symbols, start, end)
895
+
896
+ if _obj._tmp_otq.queries:
897
+ warnings.warn('Using .to_graph() for a Source object that uses sub-queries! '
898
+ 'This operation is deprecated and is not guaranteed to work as expected. '
899
+ 'Such a Source should be executed using otp.run() or saved to disk using to_otq()',
900
+ FutureWarning)
901
+ _obj.sink(otq.Passthrough().output_pin_name('OUT_FOR_TO_GRAPH'))
902
+ _graph = _obj._to_graph(add_passthrough=False)
903
+ _graph.set_start_time(_start)
904
+ _graph.set_end_time(_end)
905
+ _graph.set_symbols(_symbols)
906
+
907
+ query = _obj._tmp_otq.save_to_file(query=_graph, file_suffix='_to_graph.otq')
908
+ query_path, query_name = query.split('::')
909
+ query_params = get_query_parameter_list(query_path, query_name)
910
+
911
+ source_with_nested_query = otp.Query(otp.query(query,
912
+ **{param: f'${param}' for param in query_params}),
913
+ out_pin='OUT_FOR_TO_GRAPH')
914
+ return source_with_nested_query.to_graph(
915
+ symbols=_symbols, start=_start, end=_end,
916
+ add_passthrough=add_passthrough)
917
+ else:
918
+ return _obj._to_graph(add_passthrough=add_passthrough)
919
+
920
+ def render(self, **kwargs):
921
+ """
922
+ Renders a calculation graph using the ``graphviz`` library.
923
+ Every node is the onetick query language event processor.
924
+ Nodes in nested queries, first stage queries and eval queries are not shown.
925
+ Could be useful for debugging and in jupyter to learn the underlying graph.
926
+
927
+ Note that it's required to have :graphviz:`graphviz <>` package installed.
928
+
929
+ Examples
930
+ --------
931
+ >>> data = otp.Tick(X=3)
932
+ >>> data1, data2 = data[(data['X'] > 2)]
933
+ >>> data = otp.merge([data1, data2])
934
+ >>> data.render() # doctest: +SKIP
935
+
936
+ .. graphviz:: ../../static/render_example.dot
937
+ """
938
+ kwargs.setdefault('verbose', True)
939
+ self.to_graph().render(**kwargs)
940
+
941
+ def copy(self, ep=None, columns=None, deep=False) -> 'Source':
942
+ """
943
+ Build an object with copied calculation graph.
944
+
945
+ Every node of the resulting graph has the same id as in the original. It means that
946
+ if the original and copied graphs are merged or joined together further then all common
947
+ nodes (all that created before the .copy() method) will be glued.
948
+
949
+ For example, let's imagine that you have the following calculation graph ``G``
950
+
951
+ .. graphviz::
952
+
953
+ digraph {
954
+ rankdir="LR";
955
+ A -> B;
956
+ }
957
+
958
+ where ``A`` is a source and ``B`` is some operation on it.
959
+
960
+ Then we copy it to the ``G'`` and assign a new operation there
961
+
962
+ .. graphviz::
963
+
964
+ digraph {
965
+ rankdir="LR";
966
+ A -> B -> C;
967
+ }
968
+
969
+ After that we decided to merge ``G`` and ``G'``. The resulting calculation graph will be:
970
+
971
+ .. graphviz::
972
+
973
+ digraph {
974
+ rankdir="LR";
975
+ A -> B -> C -> MERGE;
976
+ B -> MERGE;
977
+ }
978
+
979
+ Please use the :meth:`Source.deepcopy` if you want to get the following calculation graph after merges and joins
980
+
981
+ .. graphviz::
982
+
983
+ digraph {
984
+ rankdir="LR";
985
+ A -> B -> C -> MERGE;
986
+ "A'" -> "B'" -> "C'" -> MERGE;
987
+ }
988
+
989
+ Returns
990
+ -------
991
+ Source
992
+
993
+ See Also
994
+ --------
995
+ Source.deepcopy
996
+ """
997
+ if columns is None:
998
+ columns = self.columns(skip_meta_fields=True)
999
+
1000
+ if ep:
1001
+ result = self.__class__(node=ep, schema=columns)
1002
+ result.source(self.node().copy_graph())
1003
+ # we need to clean it, because ep is not a _source
1004
+ result._clean_sources_dates()
1005
+ else:
1006
+ result = self.__class__(node=self.node(), schema=columns)
1007
+
1008
+ result.node().add_rules(self.node().copy_rules(deep=deep))
1009
+ result._set_sources_dates(self)
1010
+ if deep:
1011
+ # generating all new uuids for node history and for sources
1012
+ # after they were initialized
1013
+ keys = defaultdict(uuid.uuid4) # type: ignore
1014
+ result.node().rebuild_graph(keys)
1015
+ result._change_sources_keys(keys)
1016
+
1017
+ # add state
1018
+ result._copy_state_vars_from(self)
1019
+
1020
+ result._tmp_otq = self._tmp_otq.copy()
1021
+ result.__name = self.__name #noqa
1022
+
1023
+ result._copy_properties_from(self)
1024
+
1025
+ return result
1026
+
1027
+ def deepcopy(self, ep=None, columns=None) -> 'onetick.py.Source':
1028
+ """
1029
+ Copy all graph and change ids for every node.
1030
+ More details could be found in :meth:`Source.copy`
1031
+
1032
+ See Also
1033
+ --------
1034
+ Source.copy
1035
+ """
1036
+ return self.copy(ep, columns, deep=True)
1037
+
1038
+ def _copy_properties_from(self, obj):
1039
+ # needed if we are doing copy of a child with custom properties
1040
+ for attr in set(self.__class__._PROPERTIES) - set(Source._PROPERTIES):
1041
+ setattr(self, attr, getattr(obj, attr))
1042
+
1043
+ def _copy_state_vars_from(self, objs):
1044
+ self.__dict__["_state_vars"] = StateVars(self, objs)
1045
+
1046
+ def columns(self, skip_meta_fields=False):
1047
+ """
1048
+ Return columns in data source
1049
+
1050
+ Parameters
1051
+ ----------
1052
+ skip_meta_fields: bool, default=False
1053
+ do not add meta fields
1054
+ Returns
1055
+ -------
1056
+ dict
1057
+ """
1058
+ result = {}
1059
+
1060
+ for key, value in self.__dict__.items():
1061
+ if skip_meta_fields and self._check_key_is_meta(key):
1062
+ continue
1063
+
1064
+ if self._check_key_in_properties(key):
1065
+ continue
1066
+
1067
+ if isinstance(value, _Column):
1068
+ result[value.name] = value.dtype
1069
+
1070
+ return result
1071
+
1072
+ def drop_columns(self):
1073
+ """
1074
+ Method removes all columns in the python representation, but don't
1075
+ drop columns on the data.
1076
+
1077
+ It is used when external query is applied, because we don't know how
1078
+ data schema has changed.
1079
+ """
1080
+
1081
+ items = []
1082
+
1083
+ for key, value in self.__dict__.items():
1084
+ if self._check_key_is_reserved(key):
1085
+ continue
1086
+
1087
+ if isinstance(value, _Column):
1088
+ items.append(key)
1089
+
1090
+ for item in items:
1091
+ del self.__dict__[item]
1092
+
1093
+ def node(self):
1094
+ return self.__node
1095
+
1096
+ def tick_type(self, tt):
1097
+ self.__node.tick_type(tt)
1098
+ return self
1099
+
1100
+ def symbol(self, symbol): # NOSONAR
1101
+ """
1102
+ Apply symbol to graph
1103
+
1104
+ .. deprecated:: 1.3.31
1105
+
1106
+ """
1107
+ warnings.warn("symbol method is deprecated, please specify symbol during creation", FutureWarning)
1108
+ self.__node.symbol(symbol)
1109
+ return self
1110
+
1111
+ def node_name(self, name=None, key=None):
1112
+ return self.__node.node_name(name, key)
1113
+
1114
+ def _fix_varstrings(self):
1115
+ """
1116
+ PY-556: converting to varstring results in string with null-characters
1117
+ """
1118
+ varstring_columns = {
1119
+ name: self[name]
1120
+ for name, dtype in self.schema.items()
1121
+ if dtype is ott.varstring
1122
+ }
1123
+ # just updating the column removes null-characters
1124
+ if varstring_columns:
1125
+ self.update(varstring_columns, inplace=True)
1126
+
1127
+ def __from_ep_to_proxy(self, ep):
1128
+ in_pin, out_pin = None, None
1129
+ if isinstance(ep, otq.graph_components.EpBase.PinnedEp):
1130
+ if hasattr(ep, "_output_name"):
1131
+ out_pin = getattr(ep, "_output_name")
1132
+ else:
1133
+ in_pin = getattr(ep, "_input_name")
1134
+
1135
+ ep = getattr(ep, "_ep")
1136
+
1137
+ return ep, uuid.uuid4(), in_pin, out_pin
1138
+
1139
+ def sink(self, ep, out_pin=None, inplace: bool = True):
1140
+ """
1141
+ Appends ``ep`` node to this source (inplace by default).
1142
+ Connects ``out_pin`` of this source to ``ep``.
1143
+
1144
+ Can be used to connect onetick.query objects to :class:`onetick.py.Source`.
1145
+
1146
+ Data schema changes (added or deleted columns) will not be detected automatically
1147
+ after applying this function, so the user must change the schema himself
1148
+ by updating :meth:`onetick.py.Source.schema` property.
1149
+
1150
+ Parameters
1151
+ ----------
1152
+ ep: otq.graph_components.EpBase,\
1153
+ otq.graph_components.EpBase.PinnedEp,\
1154
+ Tuple[otq.graph_components.EpBase, uuid.uuid4, Optional[str], Optional[str]]
1155
+ onetick.query EP object to append to source.
1156
+ out_pin: Optional[str], default=None
1157
+ name of the out pin to connect to ``ep``
1158
+ inplace: bool, default=False
1159
+ if `True` method will modify current object,
1160
+ otherwise it will return modified copy of the object.
1161
+
1162
+ Returns
1163
+ ----------
1164
+ :class:`Source` or ``None``
1165
+ Returns ``None`` if ``inplace=True``.
1166
+
1167
+ See Also
1168
+ --------
1169
+ onetick.py.Source.schema
1170
+ onetick.py.core._source.schema.Schema
1171
+
1172
+ Examples
1173
+ --------
1174
+ Adding column 'B' directly with onetick.query EP.
1175
+
1176
+ >>> data = otp.Tick(A=1)
1177
+ >>> data.sink(otq.AddField(field='B', value=2)) # OTdirective: skip-snippet:;
1178
+ >>> otp.run(data) # OTdirective: skip-snippet:;
1179
+ Time A B
1180
+ 0 2003-12-01 1 2
1181
+
1182
+ But we can't use this column with `onetick.py` methods yet:
1183
+
1184
+ >>> data['C'] = data['B'] # OTdirective: skip-snippet:; # doctest: +ELLIPSIS
1185
+ Traceback (most recent call last):
1186
+ ...
1187
+ AttributeError: There is no 'B' column
1188
+
1189
+ We should manually change source's schema:
1190
+
1191
+ >>> data.schema.update(B=int) # OTdirective: skip-snippet:;
1192
+ >>> data['C'] = data['B']
1193
+ >>> otp.run(data)
1194
+ Time A B C
1195
+ 0 2003-12-01 1 2 2
1196
+
1197
+ Use parameter ``inplace=False`` to return modified copy of the source:
1198
+
1199
+ >>> data = otp.Tick(A=1)
1200
+ >>> new_data = data.sink(otq.AddField(field='B', value=2), inplace=False)
1201
+ >>> otp.run(data)
1202
+ Time A
1203
+ 0 2003-12-01 1
1204
+ >>> otp.run(new_data)
1205
+ Time A B
1206
+ 0 2003-12-01 1 2
1207
+ """
1208
+ if not (
1209
+ issubclass(type(ep), otq.graph_components.EpBase)
1210
+ or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1211
+ or type(ep) is tuple
1212
+ ):
1213
+ raise TypeError("sinking is allowed only for EpBase instances")
1214
+
1215
+ if inplace:
1216
+ obj = self
1217
+ else:
1218
+ obj = self.copy()
1219
+
1220
+ if type(ep) is tuple:
1221
+ # for already existed EP fetched from _ProxyNode
1222
+ obj.__node.sink(out_pin, *ep)
1223
+ else:
1224
+ obj.__node.sink(out_pin, *obj.__from_ep_to_proxy(ep))
1225
+
1226
+ if inplace:
1227
+ return None
1228
+ return obj
1229
+
1230
+ def __rshift__(self, ep):
1231
+ """ duplicates sink() method, but returns new object """
1232
+ new_source = self.copy()
1233
+ new_source.sink(ep)
1234
+ return new_source
1235
+
1236
+ def __irshift__(self, ep):
1237
+ """ duplicates sink() method, but assigns source new object """
1238
+ new_source = self.copy()
1239
+ new_source.sink(ep)
1240
+ return new_source
1241
+
1242
+ def source(self, ep, in_pin=None):
1243
+ """ Add node as source to root node """
1244
+ if not (
1245
+ issubclass(type(ep), otq.graph_components.EpBase)
1246
+ or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1247
+ or type(ep) is tuple
1248
+ ):
1249
+ raise TypeError("sourcing is allowed only for EpBase instances")
1250
+
1251
+ if type(ep) is tuple:
1252
+ # for already existed EP fetched from _ProxyNode
1253
+ return self.__node.source(in_pin, *ep)
1254
+ else:
1255
+ return self.__node.source(in_pin, *self.__from_ep_to_proxy(ep))
1256
+
1257
+ def source_by_key(self, ep, to_key):
1258
+ """ Add node as source to graph node by key"""
1259
+ if not (
1260
+ issubclass(type(ep), otq.graph_components.EpBase)
1261
+ or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1262
+ or type(ep) is tuple
1263
+ ):
1264
+ raise TypeError("sourcing is allowed only for EpBase instances")
1265
+
1266
+ if type(ep) is tuple:
1267
+ # for already existed EP fetched from _ProxyNode
1268
+ return self.__node.source_by_key(to_key, *ep)
1269
+ else:
1270
+ return self.__node.source_by_key(to_key, *self.__from_ep_to_proxy(ep))
1271
+
1272
+ def to_symbol_param(self):
1273
+ """
1274
+ Creates a read-only instance with the same columns except Time.
1275
+ It is used as a result of a first stage query with symbol params.
1276
+
1277
+ See also
1278
+ --------
1279
+ :ref:`static/concepts/symbols:Symbol parameters`
1280
+
1281
+ Examples
1282
+ --------
1283
+ >>> symbols = otp.Ticks({'SYMBOL_NAME': ['S1', 'S2'], 'PARAM': ['A', 'B']})
1284
+ >>> symbol_params = symbols.to_symbol_param()
1285
+ >>> t = otp.DataSource('SOME_DB', tick_type='TT')
1286
+ >>> t['S_PARAM'] = symbol_params['PARAM']
1287
+ >>> result = otp.run(t, symbols=symbols)
1288
+ >>> result['S1']
1289
+ Time X S_PARAM
1290
+ 0 2003-12-01 00:00:00.000 1 A
1291
+ 1 2003-12-01 00:00:00.001 2 A
1292
+ 2 2003-12-01 00:00:00.002 3 A
1293
+ """
1294
+ return _SymbolParamSource(**self.columns())
1295
+
1296
+ @staticmethod
1297
+ def _convert_symbol_to_string(symbol, tmp_otq=None, start=None, end=None, timezone=None, symbol_date=None):
1298
+ if start is adaptive:
1299
+ start = None
1300
+ if end is adaptive:
1301
+ end = None
1302
+
1303
+ if isinstance(symbol, Source):
1304
+ symbol = otp.eval(symbol).to_eval_string(tmp_otq=tmp_otq,
1305
+ start=start, end=end, timezone=timezone,
1306
+ operation_suffix='symbol',
1307
+ query_name=None,
1308
+ file_suffix=symbol._name_suffix('symbol.otq'),
1309
+ symbol_date=symbol_date)
1310
+
1311
+ if isinstance(symbol, otp.query):
1312
+ return symbol.to_eval_string()
1313
+
1314
+ if isinstance(symbol, otq.GraphQuery):
1315
+ params = {'symbol_date': symbol_date} if symbol_date is not None else {}
1316
+ query_name = tmp_otq.add_query(symbol, suffix='__symbol', params=params)
1317
+ return f'eval(THIS::{query_name})'
1318
+
1319
+ return symbol
1320
+
1321
+ @staticmethod
1322
+ def _construct_multi_branch_graph(branches):
1323
+ # TODO: add various checks, e.g. that branches have common parts
1324
+ main = branches[0].copy()
1325
+ for branch in branches[1:]:
1326
+ main.node().add_rules(branch.node().copy_rules())
1327
+ main._merge_tmp_otq(branch)
1328
+ return main
1329
+
1330
+ def _apply_side_branches(self, side_branches):
1331
+ for side_branch in side_branches:
1332
+ self.node().add_rules(side_branch.node().copy_rules())
1333
+ self._merge_tmp_otq(side_branch)
1334
+ self.__sources_keys_dates.update(side_branch.__sources_keys_dates)
1335
+ self.__sources_modify_query_times.update(side_branch.__sources_modify_query_times)
1336
+ self.__sources_base_ep_func.update(side_branch.__sources_base_ep_func)
1337
+ self.__sources_symbols.update(side_branch.__sources_symbols)
1338
+
1339
+ @property
1340
+ def state_vars(self) -> StateVars:
1341
+ """
1342
+ Provides access to state variables
1343
+
1344
+ Returns
1345
+ -------
1346
+ State Variables: Dict[str, state variable]
1347
+ State variables, you can access one with its name.
1348
+
1349
+ See Also
1350
+ --------
1351
+ | `State Variables \
1352
+ <../../static/getting_started/variables_and_data_structures.html#variables-and-data-structures>`_
1353
+ | **DECLARE_STATE_VARIABLES** OneTick event processor
1354
+
1355
+ """
1356
+ return self.__dict__['_state_vars']
1357
+
1358
+ # non word characters are not supported
1359
+ __invalid_query_name_symbols_regex = re.compile(r'\W')
1360
+
1361
+ def __remove_invalid_symbols(self, s):
1362
+ """
1363
+ Replaces symbols that cannot be put in query names with '_'
1364
+ """
1365
+ return self.__invalid_query_name_symbols_regex.sub('_', s)
1366
+
1367
+ def get_name(self, remove_invalid_symbols=False) -> Optional[str]:
1368
+ """
1369
+ Returns source name.
1370
+
1371
+ Parameters
1372
+ ----------
1373
+ remove_invalid_symbols: bool
1374
+ If True, all characters not supported in query names in `.otq` file will be replaced,
1375
+ because only alphanumeric, minus and underscore characters are supported in query names.
1376
+
1377
+ See also
1378
+ --------
1379
+ :meth:`set_name`
1380
+ """
1381
+ if remove_invalid_symbols and self.__name:
1382
+ return self.__remove_invalid_symbols(self.__name)
1383
+ else:
1384
+ return self.__name
1385
+
1386
+ def set_name(self, new_name):
1387
+ """
1388
+ Sets source name.
1389
+ It's an internal onetick-py name of the source that is only used
1390
+ as a part of the resulting .otq file name and as the name of the query inside this file.
1391
+
1392
+ This method doesn't set the name of the OneTick graph node.
1393
+
1394
+ Parameters
1395
+ ----------
1396
+ new_name: str
1397
+ New name of the source.
1398
+
1399
+ Only alphanumeric, minus and underscore characters are supported.
1400
+ All other characters will be replaced in the resulting query name.
1401
+
1402
+ See also
1403
+ --------
1404
+ :meth:`get_name`
1405
+
1406
+ Examples
1407
+ --------
1408
+ >>> t = otp.Tick(A=1)
1409
+
1410
+ By default source has no name and some predefined values are used when generating .otq file:
1411
+
1412
+ >>> t.to_otq() # doctest: +SKIP
1413
+ '/tmp/test_user/run_20240126_152546_1391/magnificent-wolverine.to_otq.otq::query'
1414
+
1415
+ Changed name will be used as a part of the resulting .otq file name
1416
+ and as the name of the query inside this file:
1417
+
1418
+ >>> t.set_name('main')
1419
+ >>> t.to_otq() # doctest: +SKIP
1420
+ '/tmp/test_user/run_20240126_152546_1391/dandelion-angelfish.main.to_otq.otq::main'
1421
+ """
1422
+ assert isinstance(new_name, str) or new_name is None, "Source name must be a string or None."
1423
+ if new_name is not None:
1424
+ assert new_name != '', "Source name must be a non-empty string."
1425
+ self.__name = new_name
1426
+
1427
+ def _name_suffix(self, suffix, separator='.', remove_invalid_symbols=False):
1428
+ if remove_invalid_symbols:
1429
+ suffix = self.__remove_invalid_symbols(suffix)
1430
+ separator = self.__remove_invalid_symbols(separator)
1431
+ name = self.get_name(remove_invalid_symbols=True)
1432
+ else:
1433
+ name = self.__name
1434
+ return f'{separator}{name}{separator}{suffix}' if name else f'{separator}{suffix}'
1435
+
1436
+ @property
1437
+ def schema(self) -> Schema:
1438
+ """
1439
+ Represents actual python data schema in the column-name -> type format.
1440
+ For example, could be used after the :meth:`Source.sink` to adjust
1441
+ the schema.
1442
+
1443
+ Returns
1444
+ -------
1445
+ Schema
1446
+
1447
+ See Also
1448
+ --------
1449
+ Source.sink
1450
+
1451
+ Examples
1452
+ --------
1453
+
1454
+ >>> data = otp.Ticks([['X', 'Y', 'Z'],
1455
+ ... [ 1, 0.5, 'abc']])
1456
+ >>> data['T'] = data['Time']
1457
+ >>> data.schema
1458
+ {'X': <class 'int'>, 'Y': <class 'float'>, 'Z': <class 'str'>, 'T': <class 'onetick.py.types.nsectime'>}
1459
+
1460
+ >>> data.schema['X']
1461
+ <class 'int'>
1462
+
1463
+ >>> data.schema['X'] = float
1464
+ >>> data.schema['X']
1465
+ <class 'float'>
1466
+
1467
+ >>> 'W' in data.schema
1468
+ False
1469
+ >>> data.schema['W'] = otp.nsectime
1470
+ >>> 'W' in data.schema
1471
+ True
1472
+ >>> data.schema['W']
1473
+ <class 'onetick.py.types.nsectime'>
1474
+ """
1475
+ schema = self.columns(skip_meta_fields=True)
1476
+ # meta fields will be in schema, but hidden
1477
+ hidden_columns = {
1478
+ k: v
1479
+ for k, v in self.columns(skip_meta_fields=False).items()
1480
+ if self._check_key_is_meta(k)
1481
+ }
1482
+ if 'TIMESTAMP' in hidden_columns:
1483
+ hidden_columns['Time'] = hidden_columns['TIMESTAMP']
1484
+ return Schema(_base_source=self, _hidden_columns=hidden_columns, **schema)
1485
+
1486
+ def set_schema(self, **kwargs):
1487
+ """
1488
+ Set schema of the source.
1489
+ Note: this method affect python part only and won't make any db queries. It used to set schema after db reading/
1490
+ complex query.
1491
+
1492
+ .. deprecated:: 1.14.9
1493
+
1494
+ Please use the :property:`Source.schema` to access and adjust the schema.
1495
+
1496
+ Parameters
1497
+ ----------
1498
+ kwargs
1499
+ schema in the column_name=type format
1500
+
1501
+ Examples
1502
+ --------
1503
+ Python can't follow low level change of column, e.g. complex query or pertick script can be sink.
1504
+
1505
+ >>> data = otp.Ticks(dict(A=[1, 2], B=["a", "b"]))
1506
+ >>> data.sink(otq.AddField(field='Z', value='5'))
1507
+ >>> data.columns(skip_meta_fields=True)
1508
+ {'A': <class 'int'>, 'B': <class 'str'>}
1509
+ >>> # OTdirective: snippet-name: Arrange.schema.set;
1510
+ >>> data.set_schema(A=int, B=str, Z=int)
1511
+ >>> data.columns(skip_meta_fields=True)
1512
+ {'A': <class 'int'>, 'B': <class 'str'>, 'Z': <class 'int'>}
1513
+ """
1514
+ self.drop_columns()
1515
+ for name, dtype in kwargs.items():
1516
+ dtype = ott.get_source_base_type(dtype)
1517
+ if self._check_key_is_meta(name):
1518
+ warnings.warn(f"Setting type in schema for meta field {name}", stacklevel=2)
1519
+ if self._check_key_in_properties(name):
1520
+ raise ValueError(f"Can't set type in schema for class property {name}")
1521
+ self.__dict__[name] = _Column(name, dtype, self)
1522
+
1523
+ def has_start_end_time(self) -> Tuple[bool, bool]:
1524
+ """
1525
+ Check if at least one of query sources has start and end time
1526
+ """
1527
+ has_start_time = False
1528
+ has_end_time = False
1529
+
1530
+ for start, end in self._get_sources_dates().values():
1531
+ if not has_start_time and start is not adaptive and start is not None:
1532
+ has_start_time = True
1533
+
1534
+ if not has_end_time and end is not adaptive and end is not None:
1535
+ has_end_time = True
1536
+
1537
+ return has_start_time, has_end_time
1538
+
1539
+ from ._source.source_methods.aggregations import ( # type: ignore[misc]
1540
+ agg,
1541
+ high, low, first, last, distinct, high_time, low_time,
1542
+ ob_snapshot, ob_snapshot_wide, ob_snapshot_flat, ob_summary,
1543
+ ob_size, ob_vwap, ob_num_levels,
1544
+ ranking, percentile, find_value_for_percentile,
1545
+ exp_w_average, exp_tw_average, standardized_moment,
1546
+ portfolio_price, multi_portfolio_price, return_ep, implied_vol,
1547
+ linear_regression,
1548
+ process_by_group,
1549
+ )
1550
+ from ._source.source_methods.joins import ( # type: ignore[misc]
1551
+ _process_keep_time_param,
1552
+ _get_columns_with_prefix,
1553
+ join_with_collection,
1554
+ join_with_query,
1555
+ point_in_time,
1556
+ join_with_snapshot,
1557
+ )
1558
+ from ._source.source_methods.times import ( # type: ignore[misc]
1559
+ update_timestamp,
1560
+ modify_query_times,
1561
+ time_interval_shift,
1562
+ time_interval_change,
1563
+ )
1564
+ from ._source.source_methods.fields import ( # type: ignore[misc]
1565
+ _add_field,
1566
+ _update_timestamp,
1567
+ _update_field,
1568
+ __setattr__,
1569
+ __setitem__,
1570
+ add_fields,
1571
+ table,
1572
+ update,
1573
+ )
1574
+ from ._source.source_methods.filters import ( # type: ignore[misc]
1575
+ if_else,
1576
+ where_clause,
1577
+ where,
1578
+ _get_integer_slice,
1579
+ __getitem__,
1580
+ dropna,
1581
+ time_filter,
1582
+ skip_bad_tick,
1583
+ character_present,
1584
+ )
1585
+ from ._source.source_methods.drops import ( # type: ignore[misc]
1586
+ drop,
1587
+ __delitem__,
1588
+ )
1589
+ from ._source.source_methods.writes import ( # type: ignore[misc]
1590
+ write,
1591
+ write_parquet,
1592
+ save_snapshot,
1593
+ )
1594
+ from ._source.source_methods.renames import ( # type: ignore[misc]
1595
+ _add_prefix_and_suffix,
1596
+ add_prefix,
1597
+ add_suffix,
1598
+ rename,
1599
+ )
1600
+ from ._source.source_methods.pandases import ( # type: ignore[misc]
1601
+ plot,
1602
+ count,
1603
+ head,
1604
+ tail,
1605
+ )
1606
+ from ._source.source_methods.sorts import ( # type: ignore[misc]
1607
+ sort_values,
1608
+ sort,
1609
+ )
1610
+ from ._source.source_methods.debugs import ( # type: ignore[misc]
1611
+ dump,
1612
+ throw,
1613
+ logf,
1614
+ )
1615
+ from ._source.source_methods.applyers import ( # type: ignore[misc]
1616
+ apply_query,
1617
+ apply,
1618
+ script,
1619
+ )
1620
+ from ._source.source_methods.symbols import ( # type: ignore[misc]
1621
+ show_symbol_name_in_db,
1622
+ modify_symbol_name,
1623
+ )
1624
+ from ._source.source_methods.columns import ( # type: ignore[misc]
1625
+ mean,
1626
+ unite_columns,
1627
+ )
1628
+ from ._source.source_methods.switches import ( # type: ignore[misc]
1629
+ switch,
1630
+ split,
1631
+ )
1632
+ from ._source.source_methods.misc import ( # type: ignore[misc]
1633
+ pause,
1634
+ insert_tick,
1635
+ transpose,
1636
+ cache,
1637
+ pnl_realized,
1638
+ execute,
1639
+ _columns_names_regex,
1640
+ fillna,
1641
+ mkt_activity,
1642
+ book_diff,
1643
+ limit,
1644
+ virtual_ob,
1645
+ corp_actions,
1646
+ )
1647
+ from ._source.source_methods.merges import ( # type: ignore[misc]
1648
+ __add__,
1649
+ append,
1650
+ diff,
1651
+ lee_and_ready,
1652
+ estimate_ts_delay,
1653
+ )
1654
+ from ._source.source_methods.data_quality import ( # type: ignore[misc]
1655
+ show_data_quality,
1656
+ insert_data_quality_event,
1657
+ intercept_data_quality,
1658
+ show_symbol_errors,
1659
+ intercept_symbol_errors,
1660
+ )
1661
+
1662
+
1663
+ _Source = Source # Backward compatibility