onetick-py 1.177.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. locator_parser/__init__.py +0 -0
  2. locator_parser/acl.py +73 -0
  3. locator_parser/actions.py +262 -0
  4. locator_parser/common.py +368 -0
  5. locator_parser/io.py +43 -0
  6. locator_parser/locator.py +150 -0
  7. onetick/__init__.py +101 -0
  8. onetick/doc_utilities/__init__.py +3 -0
  9. onetick/doc_utilities/napoleon.py +40 -0
  10. onetick/doc_utilities/ot_doctest.py +140 -0
  11. onetick/doc_utilities/snippets.py +279 -0
  12. onetick/lib/__init__.py +4 -0
  13. onetick/lib/instance.py +141 -0
  14. onetick/py/__init__.py +293 -0
  15. onetick/py/_stack_info.py +89 -0
  16. onetick/py/_version.py +2 -0
  17. onetick/py/aggregations/__init__.py +11 -0
  18. onetick/py/aggregations/_base.py +648 -0
  19. onetick/py/aggregations/_docs.py +948 -0
  20. onetick/py/aggregations/compute.py +286 -0
  21. onetick/py/aggregations/functions.py +2216 -0
  22. onetick/py/aggregations/generic.py +104 -0
  23. onetick/py/aggregations/high_low.py +80 -0
  24. onetick/py/aggregations/num_distinct.py +83 -0
  25. onetick/py/aggregations/order_book.py +501 -0
  26. onetick/py/aggregations/other.py +1014 -0
  27. onetick/py/backports.py +26 -0
  28. onetick/py/cache.py +374 -0
  29. onetick/py/callback/__init__.py +5 -0
  30. onetick/py/callback/callback.py +276 -0
  31. onetick/py/callback/callbacks.py +131 -0
  32. onetick/py/compatibility.py +798 -0
  33. onetick/py/configuration.py +771 -0
  34. onetick/py/core/__init__.py +0 -0
  35. onetick/py/core/_csv_inspector.py +93 -0
  36. onetick/py/core/_internal/__init__.py +0 -0
  37. onetick/py/core/_internal/_manually_bound_value.py +6 -0
  38. onetick/py/core/_internal/_nodes_history.py +250 -0
  39. onetick/py/core/_internal/_op_utils/__init__.py +0 -0
  40. onetick/py/core/_internal/_op_utils/every_operand.py +9 -0
  41. onetick/py/core/_internal/_op_utils/is_const.py +10 -0
  42. onetick/py/core/_internal/_per_tick_scripts/tick_list_sort_template.script +121 -0
  43. onetick/py/core/_internal/_proxy_node.py +140 -0
  44. onetick/py/core/_internal/_state_objects.py +2312 -0
  45. onetick/py/core/_internal/_state_vars.py +93 -0
  46. onetick/py/core/_source/__init__.py +0 -0
  47. onetick/py/core/_source/_symbol_param.py +95 -0
  48. onetick/py/core/_source/schema.py +97 -0
  49. onetick/py/core/_source/source_methods/__init__.py +0 -0
  50. onetick/py/core/_source/source_methods/aggregations.py +809 -0
  51. onetick/py/core/_source/source_methods/applyers.py +296 -0
  52. onetick/py/core/_source/source_methods/columns.py +141 -0
  53. onetick/py/core/_source/source_methods/data_quality.py +301 -0
  54. onetick/py/core/_source/source_methods/debugs.py +272 -0
  55. onetick/py/core/_source/source_methods/drops.py +120 -0
  56. onetick/py/core/_source/source_methods/fields.py +619 -0
  57. onetick/py/core/_source/source_methods/filters.py +1002 -0
  58. onetick/py/core/_source/source_methods/joins.py +1413 -0
  59. onetick/py/core/_source/source_methods/merges.py +605 -0
  60. onetick/py/core/_source/source_methods/misc.py +1455 -0
  61. onetick/py/core/_source/source_methods/pandases.py +155 -0
  62. onetick/py/core/_source/source_methods/renames.py +356 -0
  63. onetick/py/core/_source/source_methods/sorts.py +183 -0
  64. onetick/py/core/_source/source_methods/switches.py +142 -0
  65. onetick/py/core/_source/source_methods/symbols.py +117 -0
  66. onetick/py/core/_source/source_methods/times.py +627 -0
  67. onetick/py/core/_source/source_methods/writes.py +986 -0
  68. onetick/py/core/_source/symbol.py +205 -0
  69. onetick/py/core/_source/tmp_otq.py +222 -0
  70. onetick/py/core/column.py +209 -0
  71. onetick/py/core/column_operations/__init__.py +0 -0
  72. onetick/py/core/column_operations/_methods/__init__.py +4 -0
  73. onetick/py/core/column_operations/_methods/_internal.py +28 -0
  74. onetick/py/core/column_operations/_methods/conversions.py +216 -0
  75. onetick/py/core/column_operations/_methods/methods.py +292 -0
  76. onetick/py/core/column_operations/_methods/op_types.py +160 -0
  77. onetick/py/core/column_operations/accessors/__init__.py +0 -0
  78. onetick/py/core/column_operations/accessors/_accessor.py +28 -0
  79. onetick/py/core/column_operations/accessors/decimal_accessor.py +104 -0
  80. onetick/py/core/column_operations/accessors/dt_accessor.py +537 -0
  81. onetick/py/core/column_operations/accessors/float_accessor.py +184 -0
  82. onetick/py/core/column_operations/accessors/str_accessor.py +1367 -0
  83. onetick/py/core/column_operations/base.py +1121 -0
  84. onetick/py/core/cut_builder.py +150 -0
  85. onetick/py/core/db_constants.py +20 -0
  86. onetick/py/core/eval_query.py +245 -0
  87. onetick/py/core/lambda_object.py +441 -0
  88. onetick/py/core/multi_output_source.py +232 -0
  89. onetick/py/core/per_tick_script.py +2256 -0
  90. onetick/py/core/query_inspector.py +464 -0
  91. onetick/py/core/source.py +1744 -0
  92. onetick/py/db/__init__.py +2 -0
  93. onetick/py/db/_inspection.py +1128 -0
  94. onetick/py/db/db.py +1327 -0
  95. onetick/py/db/utils.py +64 -0
  96. onetick/py/docs/__init__.py +0 -0
  97. onetick/py/docs/docstring_parser.py +112 -0
  98. onetick/py/docs/utils.py +81 -0
  99. onetick/py/functions.py +2398 -0
  100. onetick/py/license.py +190 -0
  101. onetick/py/log.py +88 -0
  102. onetick/py/math.py +935 -0
  103. onetick/py/misc.py +470 -0
  104. onetick/py/oqd/__init__.py +22 -0
  105. onetick/py/oqd/eps.py +1195 -0
  106. onetick/py/oqd/sources.py +325 -0
  107. onetick/py/otq.py +216 -0
  108. onetick/py/pyomd_mock.py +47 -0
  109. onetick/py/run.py +916 -0
  110. onetick/py/servers.py +173 -0
  111. onetick/py/session.py +1347 -0
  112. onetick/py/sources/__init__.py +19 -0
  113. onetick/py/sources/cache.py +167 -0
  114. onetick/py/sources/common.py +128 -0
  115. onetick/py/sources/csv.py +642 -0
  116. onetick/py/sources/custom.py +85 -0
  117. onetick/py/sources/data_file.py +305 -0
  118. onetick/py/sources/data_source.py +1045 -0
  119. onetick/py/sources/empty.py +94 -0
  120. onetick/py/sources/odbc.py +337 -0
  121. onetick/py/sources/order_book.py +271 -0
  122. onetick/py/sources/parquet.py +168 -0
  123. onetick/py/sources/pit.py +191 -0
  124. onetick/py/sources/query.py +495 -0
  125. onetick/py/sources/snapshots.py +419 -0
  126. onetick/py/sources/split_query_output_by_symbol.py +198 -0
  127. onetick/py/sources/symbology_mapping.py +123 -0
  128. onetick/py/sources/symbols.py +374 -0
  129. onetick/py/sources/ticks.py +825 -0
  130. onetick/py/sql.py +70 -0
  131. onetick/py/state.py +251 -0
  132. onetick/py/types.py +2131 -0
  133. onetick/py/utils/__init__.py +70 -0
  134. onetick/py/utils/acl.py +93 -0
  135. onetick/py/utils/config.py +186 -0
  136. onetick/py/utils/default.py +49 -0
  137. onetick/py/utils/file.py +38 -0
  138. onetick/py/utils/helpers.py +76 -0
  139. onetick/py/utils/locator.py +94 -0
  140. onetick/py/utils/perf.py +498 -0
  141. onetick/py/utils/query.py +49 -0
  142. onetick/py/utils/render.py +1374 -0
  143. onetick/py/utils/script.py +244 -0
  144. onetick/py/utils/temp.py +471 -0
  145. onetick/py/utils/types.py +120 -0
  146. onetick/py/utils/tz.py +84 -0
  147. onetick_py-1.177.0.dist-info/METADATA +137 -0
  148. onetick_py-1.177.0.dist-info/RECORD +152 -0
  149. onetick_py-1.177.0.dist-info/WHEEL +5 -0
  150. onetick_py-1.177.0.dist-info/entry_points.txt +2 -0
  151. onetick_py-1.177.0.dist-info/licenses/LICENSE +21 -0
  152. onetick_py-1.177.0.dist-info/top_level.txt +2 -0
@@ -0,0 +1,1744 @@
1
+ import os
2
+ import re
3
+ import uuid
4
+ import warnings
5
+ from collections import defaultdict
6
+ from datetime import datetime, date
7
+ import pandas as pd
8
+ from typing import Optional, Tuple
9
+
10
+ from onetick.py.otq import otq
11
+
12
+ import onetick.py.functions
13
+ import onetick.py.sources
14
+ from onetick import py as otp
15
+ from onetick.py import types as ott
16
+ from onetick.py import utils, configuration
17
+ from onetick.py.core._internal._manually_bound_value import _ManuallyBoundValue
18
+ from onetick.py.core._internal._proxy_node import _ProxyNode
19
+ from onetick.py.core._internal._state_vars import StateVars
20
+ from onetick.py.core._source._symbol_param import _SymbolParamColumn, _SymbolParamSource
21
+ from onetick.py.core._source.schema import Schema
22
+ from onetick.py.core._source.symbol import Symbol
23
+ from onetick.py.core._source.tmp_otq import TmpOtq
24
+ from onetick.py.core.column import _Column
25
+ from onetick.py.core.column_operations.base import _Operation, OnetickParameter
26
+ from onetick.py.core.query_inspector import get_query_parameter_list
27
+ from onetick.py.utils import adaptive, adaptive_to_default, default, render_otq
28
+
29
+
30
+ def _is_dict_required(symbols):
31
+ """
32
+ Depending on symbols, determine if output of otp.run() or Source.__call__() should always be a dictionary
33
+ of {symbol: dataframe} even if only one symbol is present in the results
34
+ """
35
+ if isinstance(symbols, (list, tuple)):
36
+ if len(symbols) == 0:
37
+ return False
38
+ elif len(symbols) > 1:
39
+ return True
40
+ else:
41
+ symbols = symbols[0]
42
+
43
+ if isinstance(symbols, otp.Source):
44
+ return True
45
+ if isinstance(symbols, otq.Symbol):
46
+ symbols = symbols.name
47
+ if isinstance(symbols, str) and 'eval' in symbols:
48
+ return True
49
+ return False
50
+
51
+
52
+ class MetaFields:
53
+ """
54
+ OneTick defines several pseudo-columns that can be treated as if they were columns of every tick.
55
+
56
+ These columns can be accessed directly via :py:meth:`onetick.py.Source.__getitem__` method.
57
+
58
+ But in case they are used in :py:class:`~onetick.py.core.column_operations.base.Expr`
59
+ they can be accessed via ``onetick.py.Source.meta_fields``.
60
+
61
+ Examples
62
+ --------
63
+
64
+ Accessing pseudo-fields as columns or as class properties
65
+
66
+ >>> data = otp.Tick(A=1)
67
+ >>> data['X'] = data['_START_TIME']
68
+ >>> data['Y'] = otp.Source.meta_fields['_TIMEZONE']
69
+ >>> otp.run(data, start=otp.dt(2003, 12, 2), timezone='GMT')
70
+ Time A X Y
71
+ 0 2003-12-02 1 2003-12-02 GMT
72
+ """
73
+ def __init__(self):
74
+ self.timestamp = _Column('TIMESTAMP', dtype=ott.nsectime)
75
+ self.time = self.timestamp
76
+ self.start_time = _Column('_START_TIME', dtype=ott.nsectime)
77
+ self.start = self.start_time
78
+ self.end_time = _Column('_END_TIME', dtype=ott.nsectime)
79
+ self.end = self.end_time
80
+ self.timezone = _Column('_TIMEZONE', dtype=str)
81
+ self.db_name = _Column('_DBNAME', dtype=str)
82
+ self.symbol_name = _Column('_SYMBOL_NAME', dtype=str)
83
+ self.tick_type = _Column('_TICK_TYPE', dtype=str)
84
+ self.symbol_time = _Column('_SYMBOL_TIME', dtype=otp.nsectime)
85
+ self.__fields = set(map(str, self.__dict__.values())) | {'Time'}
86
+
87
+ def get_onetick_fields_and_types(self):
88
+ return {
89
+ column.name: column.dtype
90
+ for name, column in self.__dict__.items()
91
+ if not name.startswith('_') and name != 'time'
92
+ }
93
+
94
+ def __iter__(self):
95
+ yield from self.__fields
96
+
97
+ def __contains__(self, item):
98
+ return item in self.__fields
99
+
100
+ def __len__(self):
101
+ return len(self.__fields)
102
+
103
+ def __getitem__(self, item):
104
+ """
105
+ These fields are available:
106
+
107
+ * ``TIMESTAMP`` (or ``Time``)
108
+ * ``START_TIME`` (or ``_START_TIME``)
109
+ * ``END_TIME`` (or ``_END_TIME``)
110
+ * ``TIMEZONE`` (or ``_TIMEZONE``)
111
+ * ``DBNAME`` (or ``_DBNAME``)
112
+ * ``SYMBOL_NAME`` (or ``_SYMBOL_NAME``)
113
+ * ``TICK_TYPE`` (or ``_TICK_TYPE``)
114
+ * ``SYMBOL_TIME`` (or ``_SYMBOL_TIME``)
115
+ """
116
+ return {
117
+ 'TIMESTAMP': self.timestamp,
118
+ 'Time': self.time,
119
+ 'START_TIME': self.start_time,
120
+ '_START_TIME': self.start_time,
121
+ 'END_TIME': self.end_time,
122
+ '_END_TIME': self.end_time,
123
+ 'TIMEZONE': self.timezone,
124
+ '_TIMEZONE': self.timezone,
125
+ 'DB_NAME': self.db_name,
126
+ 'DBNAME': self.db_name,
127
+ '_DBNAME': self.db_name,
128
+ 'SYMBOL_NAME': self.symbol_name,
129
+ '_SYMBOL_NAME': self.symbol_name,
130
+ 'TICK_TYPE': self.tick_type,
131
+ '_TICK_TYPE': self.tick_type,
132
+ 'SYMBOL_TIME': self.symbol_time,
133
+ '_SYMBOL_TIME': self.symbol_time,
134
+ }[item]
135
+
136
+
137
+ class Source:
138
+ """
139
+ Base class for representing Onetick execution graph.
140
+ All :ref:`onetick-py sources <api/sources/root:sources>` are derived from this class
141
+ and have access to all its methods.
142
+
143
+ Examples
144
+ --------
145
+ >>> data = otp.Tick(A=1)
146
+ >>> isinstance(data, otp.Source)
147
+ True
148
+
149
+ Also this class can be used to initialize raw source
150
+ with the help of ``onetick.query`` classes, but
151
+ it should be done with caution as the user is required to set
152
+ such properties as symbol name and tick type manually.
153
+
154
+ >>> data = otp.Source(otq.TickGenerator(bucket_interval=0, fields='long A = 123').tick_type('TT'))
155
+ >>> otp.run(data, symbols='LOCAL::')
156
+ Time A
157
+ 0 2003-12-04 123
158
+ """
159
+
160
+ # TODO: need to support transactions for every _source
161
+ # transaction is set of calls between _source creation and call or between two calls
162
+ # if transaction have the same operations, then it seems we should add only one set of operations
163
+
164
+ _PROPERTIES = [
165
+ "__node",
166
+ "__hash",
167
+ "__sources_keys_dates",
168
+ "__sources_modify_query_times",
169
+ "__sources_base_ep_func",
170
+ "__sources_symbols",
171
+ "__source_has_output",
172
+ "__name",
173
+ "_tmp_otq"
174
+ ]
175
+ _OT_META_FIELDS = ["_START_TIME", "_END_TIME", "_SYMBOL_NAME", "_DBNAME", "_TICK_TYPE", '_TIMEZONE']
176
+ meta_fields = MetaFields()
177
+ Symbol = Symbol # NOSONAR
178
+
179
+ def __init__(
180
+ self,
181
+ node=None,
182
+ schema=None,
183
+ _symbols=None,
184
+ _start=adaptive,
185
+ _end=adaptive,
186
+ _base_ep_func=None,
187
+ _has_output=True,
188
+ **kwargs,
189
+ ):
190
+
191
+ self._tmp_otq = TmpOtq()
192
+ self.__name = None
193
+
194
+ if isinstance(_symbols, OnetickParameter):
195
+ _symbols = _symbols.parameter_expression
196
+
197
+ schema = self._select_schema(schema, kwargs)
198
+
199
+ for key in schema:
200
+ if self._check_key_in_properties(key):
201
+ raise ValueError(f"Can't set class property {key}")
202
+ if self._check_key_is_meta(key):
203
+ if key == 'TIMESTAMP':
204
+ # for backward-compatibility
205
+ warnings.warn(f"Setting meta field {key} in schema is not needed", FutureWarning, stacklevel=2)
206
+ else:
207
+ raise ValueError(f"Can't set meta field {key}")
208
+
209
+ schema.update(
210
+ self.meta_fields.get_onetick_fields_and_types()
211
+ )
212
+
213
+ for key, value in schema.items():
214
+ # calculate value type
215
+ value_type = ott.get_source_base_type(value)
216
+ self.__dict__[key] = _Column(name=key, dtype=value_type, obj_ref=self)
217
+
218
+ # just an alias to Timestamp
219
+ self.__dict__['Time'] = self.__dict__['TIMESTAMP']
220
+ self.__dict__['_state_vars'] = StateVars(self)
221
+
222
+ if node is None:
223
+ node = otq.Passthrough()
224
+
225
+ if isinstance(_symbols, _SymbolParamColumn):
226
+ symbol_node = otq.ModifySymbolName(str(self.Symbol[_symbols.name, str]))
227
+ node = node.sink(symbol_node)
228
+ _symbols = None
229
+
230
+ self.__hash = uuid.uuid4()
231
+ self.__sources_keys_dates = {}
232
+ self.__sources_modify_query_times = {}
233
+ self.__sources_base_ep_func = {}
234
+ self.__sources_symbols = {}
235
+ self.__source_has_output = _has_output
236
+
237
+ if isinstance(node, _ProxyNode):
238
+ self.__node = _ProxyNode(*node.copy_graph(), refresh_func=self.__refresh_hash)
239
+ else:
240
+ self.__node = _ProxyNode(*self.__from_ep_to_proxy(node), refresh_func=self.__refresh_hash)
241
+ self.__sources_keys_dates[self.__node.key()] = (_start, _end)
242
+ self.__sources_modify_query_times[self.__node.key()] = False
243
+ self.__sources_base_ep_func[self.__node.key()] = _base_ep_func
244
+ self.__sources_symbols[self.__node.key()] = _symbols
245
+
246
+ def _try_default_constructor(self, *args, node=None, schema=None, **kwargs):
247
+ if node is not None:
248
+ # Source.copy() method will use this way
249
+ # all info from original source will be copied by copy() method after
250
+ Source.__init__(self, *args, node=node, schema=schema, **kwargs)
251
+ return True
252
+ return False
253
+
254
+ def base_ep(self, **_kwargs):
255
+ # default implementation
256
+ # real implementation should return a Source object
257
+ return None
258
+
259
+ def _select_schema(self, schema, kwargs) -> dict:
260
+ """
261
+ Selects schema definition from ``schema`` or ``kwargs`` parameters
262
+ for the time of deprecation of ``kwargs`` parameter.
263
+ """
264
+ if schema is None:
265
+ if kwargs:
266
+ warnings.warn(
267
+ f'Setting `{self.__class__.__name__}` schema via `**kwargs` is deprecated. '
268
+ 'Please use `schema` parameter for this. '
269
+ f'Passed kwargs are: {kwargs}.',
270
+ FutureWarning,
271
+ stacklevel=2,
272
+ )
273
+ return kwargs
274
+ else:
275
+ return {}
276
+ elif kwargs:
277
+ raise ValueError(
278
+ "Specifying schema through both `**kwargs` and `schema` is prohibited. "
279
+ f"Passed kwargs are: {kwargs}."
280
+ )
281
+
282
+ return schema.copy()
283
+
284
+ def _clean_sources_dates(self):
285
+ self.__sources_keys_dates = {}
286
+ self.__sources_modify_query_times = {}
287
+ self.__sources_base_ep_func = {}
288
+ self.__sources_symbols = {}
289
+
290
+ def _set_sources_dates(self, other, copy_symbols=True):
291
+ self.__sources_keys_dates.update(other._get_sources_dates())
292
+ self.__sources_modify_query_times.update(other._get_sources_modify_query_times())
293
+ self.__sources_base_ep_func.update(other._get_sources_base_ep_func())
294
+ if copy_symbols:
295
+ self.__sources_symbols.update(other._get_sources_symbols())
296
+ else:
297
+ # this branch is applicable for the bound symbols with callbacks,
298
+ # where we drop all adaptive symbols and keep only manually specified
299
+ # symbols
300
+ manually_bound = {
301
+ key: _ManuallyBoundValue(value)
302
+ for key, value in other._get_sources_symbols().items()
303
+ if value is not adaptive and value is not adaptive_to_default
304
+ }
305
+ self.__sources_symbols.update(manually_bound)
306
+
307
+ self.__source_has_output = other._get_source_has_output()
308
+
309
+ def _change_sources_keys(self, keys: dict):
310
+ """
311
+ Change keys in sources dictionaries.
312
+ Need to do it, for example, after rebuilding the node history with new keys.
313
+
314
+ Parameters
315
+ ----------
316
+ keys: dict
317
+ Mapping from old key to new key
318
+ """
319
+ sources = (self.__sources_keys_dates,
320
+ self.__sources_modify_query_times,
321
+ self.__sources_base_ep_func,
322
+ self.__sources_symbols)
323
+ for dictionary in sources:
324
+ for key in list(dictionary):
325
+ dictionary[keys[key]] = dictionary.pop(key)
326
+
327
+ def _get_source_has_output(self):
328
+ return self.__source_has_output
329
+
330
+ def _get_sources_dates(self):
331
+ return self.__sources_keys_dates
332
+
333
+ def _get_sources_modify_query_times(self):
334
+ return self.__sources_modify_query_times
335
+
336
+ def _get_sources_base_ep_func(self):
337
+ return self.__sources_base_ep_func
338
+
339
+ def _get_sources_symbols(self):
340
+ return self.__sources_symbols
341
+
342
+ def _check_key_in_properties(self, key: str) -> bool:
343
+ if key in self.__class__._PROPERTIES:
344
+ return True
345
+ if key.replace('_' + Source.__name__.lstrip('_'), "") in self.__class__._PROPERTIES:
346
+ return True
347
+ if key.replace(self.__class__.__name__, "") in self.__class__._PROPERTIES:
348
+ return True
349
+ return False
350
+
351
+ def _check_key_is_meta(self, key: str) -> bool:
352
+ return key in self.__class__.meta_fields
353
+
354
+ def _check_key_is_reserved(self, key: str) -> bool:
355
+ return self._check_key_in_properties(key) or self._check_key_is_meta(key)
356
+
357
+ def _set_field_by_tuple(self, name, dtype):
358
+ warnings.warn('Using _set_field_by_tuple() is not recommended,'
359
+ ' change your code to use otp.Source.schema object.', DeprecationWarning)
360
+ if name not in self.__dict__:
361
+ if dtype is None:
362
+ raise KeyError(f'Column name {name} is not in the schema. Please, check that this column '
363
+ 'is in the schema or add it using the .schema property')
364
+
365
+ if name in (0, 1):
366
+ raise ValueError(f"constant {name} are not supported for indexing for now, please use otp.Empty")
367
+ if isinstance(name, (int, float)):
368
+ raise ValueError("integer indexes are not supported")
369
+ self.__dict__[name] = _Column(name, dtype, self)
370
+ else:
371
+ if not isinstance(self.__dict__[name], _Column):
372
+ raise AttributeError(f"There is no '{name}' column")
373
+
374
+ if dtype:
375
+ type1, type2 = self.__dict__[name].dtype, dtype
376
+ b_type1, b_type2 = ott.get_base_type(type1), ott.get_base_type(type2)
377
+
378
+ if b_type1 != b_type2:
379
+ if {type1, type2} == {int, float}:
380
+ self.__dict__[name]._dtype = float
381
+ else:
382
+ raise Warning(
383
+ f"Column '{name}' was declared as '{type1}', but you want to change it to '{type2}', "
384
+ "that is not possible without setting type directly via assigning value"
385
+ )
386
+
387
+ else:
388
+ if issubclass(b_type1, str):
389
+ t1_length = ott.string.DEFAULT_LENGTH if type1 is str else type1.length
390
+ t2_length = ott.string.DEFAULT_LENGTH if type2 is str else type2.length
391
+
392
+ self.__dict__[name]._dtype = type2 if t1_length < t2_length else type1
393
+ if {type1, type2} == {ott.nsectime, ott.msectime}:
394
+ self.__dict__[name]._dtype = ott.nsectime
395
+ return self.__dict__[name]
396
+
397
+ def _hash(self):
398
+ return self.__hash
399
+
400
+ def _merge_tmp_otq(self, source):
401
+ self._tmp_otq.merge(source._tmp_otq)
402
+
403
+ def __prepare_graph(self, symbols=None, start=None, end=None, has_output=False):
404
+ # We copy object here, because we will change it according to passed
405
+ # symbols and date ranges. For example, we can add modify_query_times EP
406
+ # if it is necessary
407
+
408
+ obj = self.copy()
409
+ if has_output:
410
+ obj.sink(otq.Passthrough())
411
+ start, end, symbols = obj._set_date_range_and_symbols(symbols, start, end)
412
+ if start is adaptive:
413
+ start = None
414
+ if end is adaptive:
415
+ end = None
416
+ if symbols is not None and isinstance(symbols, pd.DataFrame):
417
+ symbols = utils.get_symbol_list_from_df(symbols)
418
+ if symbols is not None and not isinstance(symbols, list):
419
+ symbols = [symbols]
420
+ elif symbols is None:
421
+ symbols = []
422
+ _symbols = []
423
+ for sym in symbols:
424
+ _symbols.append(self._convert_symbol_to_string(sym, tmp_otq=obj._tmp_otq, start=start, end=end))
425
+
426
+ return obj, start, end, _symbols
427
+
428
+ def to_otq(self, file_name=None, file_suffix=None, query_name=None, symbols=None, start=None, end=None,
429
+ timezone=None, raw=None, add_passthrough=True,
430
+ running=False,
431
+ start_time_expression=None,
432
+ end_time_expression=None,
433
+ symbol_date=None):
434
+ """
435
+ Save data source to .otq file and return path to the saved file.
436
+
437
+ Parameters
438
+ ----------
439
+ file_name: str
440
+ Absolute or relative path to the saved file.
441
+ If ``None``, create temporary file and name it randomly.
442
+ file_suffix: str
443
+ Suffix to add to the saved file name (including extension).
444
+ Can be specified if ``file_name`` is ``None``
445
+ to distinguish between different temporary files.
446
+ Default: ".to_otq.otq"
447
+ query_name: str
448
+ Name of the main query in the created file.
449
+ If ``None``, take name from this Source object.
450
+ If that name is empty, set name to "query".
451
+ symbols: str, list, :pandas:`DataFrame <pandas.DataFrame>`, :class:`Source`
452
+ symbols to save query with
453
+ start: :py:class:`otp.datetime <onetick.py.datetime>`
454
+ start time to save query with
455
+ end: :py:class:`otp.datetime <onetick.py.datetime>`
456
+ end time to save query with
457
+ timezone: str
458
+ timezone to save query with
459
+ raw
460
+
461
+ .. deprecated:: 1.4.17
462
+
463
+ add_passthrough: bool
464
+ will add :py:class:`onetick.query.Passthrough` event processor at the end of resulting graph
465
+ running: bool
466
+ Indicates whether a query is CEP or not.
467
+ start_time_expression: str, :py:class:`~onetick.py.Operation`, optional
468
+ Start time onetick expression of the query. If specified, it will take precedence over ``start``.
469
+ end_time_expression: str, :py:class:`~onetick.py.Operation`, optional
470
+ End time onetick expression of the query. If specified, it will take precedence over ``end``.
471
+ symbol_date: :py:class:`otp.datetime <onetick.py.datetime>` or :py:class:`datetime.datetime` or int
472
+ Symbol date for the query or integer in the YYYYMMDD format.
473
+ Will be applied only to the main query.
474
+
475
+ Returns
476
+ -------
477
+
478
+ result: str
479
+ Relative (if ``file_name`` is relative) or absolute path to the created query
480
+ in the format ``file_name::query_name``
481
+ """
482
+ if raw is not None:
483
+ warnings.warn('The "raw" flag is deprecated and makes no effect', FutureWarning)
484
+
485
+ if timezone is None:
486
+ timezone = configuration.config.tz
487
+
488
+ file_path = str(file_name) if file_name is not None else None
489
+ if file_suffix is None:
490
+ file_suffix = self._name_suffix('to_otq.otq')
491
+
492
+ if query_name is None:
493
+ query_name = self.get_name(remove_invalid_symbols=True)
494
+ if query_name is None:
495
+ query_name = 'query'
496
+
497
+ if isinstance(start, _Operation) and start_time_expression is None:
498
+ start_time_expression = str(start)
499
+ start = utils.adaptive
500
+ if isinstance(end, _Operation) and end_time_expression is None:
501
+ end_time_expression = str(end)
502
+ end = utils.adaptive
503
+
504
+ if isinstance(start_time_expression, _Operation):
505
+ start_time_expression = str(start_time_expression)
506
+ if isinstance(end_time_expression, _Operation):
507
+ end_time_expression = str(end_time_expression)
508
+
509
+ obj, start, end, symbols = self.__prepare_graph(symbols, start, end)
510
+
511
+ graph = obj._to_graph(add_passthrough=add_passthrough)
512
+ graph.set_symbols(symbols)
513
+
514
+ return obj._tmp_otq.save_to_file(query=graph, query_name=query_name, file_path=file_path,
515
+ file_suffix=file_suffix, start=start, end=end, timezone=timezone,
516
+ running_query_flag=running,
517
+ start_time_expression=start_time_expression,
518
+ end_time_expression=end_time_expression,
519
+ symbol_date=symbol_date)
520
+
521
+ def _store_in_tmp_otq(self, tmp_otq, operation_suffix="tmp_query", symbols=None, start=None, end=None,
522
+ raw=None, add_passthrough=True, name=None, timezone=None, symbol_date=None):
523
+ """
524
+ Adds this source to the tmp_otq storage
525
+
526
+ Parameters
527
+ ----------
528
+ tmp_otq: TmpOtq
529
+ Storage object
530
+ operation_suffix: str
531
+ Suffix string to be added to the autogenerated graph name in the otq file
532
+ name: str, optional
533
+ If specified, this ``name`` will be used to save query
534
+ and ``suffix`` parameter will be ignored.
535
+
536
+ Returns
537
+ -------
538
+ result: str
539
+ String with the name of the saved graph (starting with THIS::)
540
+ """
541
+ if raw is not None:
542
+ warnings.warn('The "raw" flag is deprecated and makes no effect', FutureWarning)
543
+
544
+ obj, start, end, symbols = self.__prepare_graph(symbols, start, end)
545
+ tmp_otq.merge(obj._tmp_otq)
546
+
547
+ if isinstance(start, ott.dt): # OT save_to_file checks for the datetime time
548
+ start = datetime.fromtimestamp(start.timestamp())
549
+ elif isinstance(start, date):
550
+ start = datetime(start.year, start.month, start.day)
551
+ if isinstance(end, ott.dt):
552
+ end = datetime.fromtimestamp(end.timestamp())
553
+ elif isinstance(end, date):
554
+ end = datetime(end.year, end.month, end.day)
555
+
556
+ if timezone is None:
557
+ timezone = configuration.config.tz
558
+
559
+ graph = obj._to_graph(add_passthrough=add_passthrough)
560
+ graph.set_start_time(start)
561
+ graph.set_end_time(end)
562
+ graph.set_symbols(symbols)
563
+ if timezone is not None:
564
+ if otq.webapi:
565
+ graph.set_timezone(timezone)
566
+ else:
567
+ graph.time_interval_properties().set_timezone(timezone)
568
+
569
+ params = {'symbol_date': symbol_date} if symbol_date is not None else {}
570
+ suffix = self._name_suffix(suffix=operation_suffix, separator='__', remove_invalid_symbols=True)
571
+ return tmp_otq.add_query(graph, suffix=suffix, name=name, params=params)
572
+
573
+ def __refresh_hash(self):
574
+ """
575
+ This internal function refreshes hash for every graph modification.
576
+ It is used only in _ProxyNode, because it tracks nodes changes
577
+ """
578
+ self.__hash = uuid.uuid3(uuid.NAMESPACE_DNS, str(self.__hash))
579
+
580
+ def _prepare_for_execution(self, symbols=None, start=None, end=None, start_time_expression=None,
581
+ end_time_expression=None, timezone=None, has_output=None,
582
+ running_query_flag=None, require_dict=False, node_name=None,
583
+ symbol_date=None):
584
+ if has_output is None:
585
+ has_output = self.__source_has_output
586
+
587
+ if timezone is None:
588
+ timezone = configuration.config.tz
589
+
590
+ obj, start, end, symbols = self.__prepare_graph(symbols, start, end, has_output)
591
+ require_dict = require_dict or _is_dict_required(symbols)
592
+
593
+ if node_name is None:
594
+ node_name = 'SOURCE_CALL_MAIN_OUT_NODE'
595
+ obj.node().node_name(node_name)
596
+
597
+ graph = obj._to_graph(add_passthrough=False)
598
+
599
+ graph.set_symbols(symbols)
600
+
601
+ # create name and suffix for generated .otq file
602
+ if otp.config.main_query_generated_filename:
603
+ name = otp.config.main_query_generated_filename
604
+ if name.endswith('.otq'):
605
+ suffix = ''
606
+ else:
607
+ suffix = '.otq'
608
+ force = True
609
+ else:
610
+ name = ''
611
+ suffix = self._name_suffix('run.otq')
612
+ force = False
613
+
614
+ clean_up = default
615
+ if otp.config.otq_debug_mode:
616
+ clean_up = False
617
+ base_dir = None
618
+ if os.getenv('OTP_WEBAPI_TEST_MODE'):
619
+ from onetick.py.otq import _tmp_otq_path
620
+ base_dir = _tmp_otq_path()
621
+ tmp_file = utils.TmpFile(name=name,
622
+ suffix=suffix,
623
+ force=force,
624
+ base_dir=base_dir,
625
+ clean_up=clean_up)
626
+
627
+ query_to_run = obj._tmp_otq.save_to_file(query=graph,
628
+ query_name=self.get_name(remove_invalid_symbols=True)
629
+ if self.get_name(remove_invalid_symbols=True) else "main_query",
630
+ file_path=tmp_file.path,
631
+ start=start, end=end,
632
+ running_query_flag=running_query_flag,
633
+ start_time_expression=start_time_expression,
634
+ end_time_expression=end_time_expression,
635
+ timezone=timezone,
636
+ symbol_date=symbol_date)
637
+
638
+ return query_to_run, require_dict, node_name
639
+
640
+ def __call__(self, *args, **kwargs):
641
+ """
642
+ .. deprecated:: 1.48.3
643
+ Use :py:func:`otp.run <onetick.py.run>` instead.
644
+ """
645
+ warnings.warn('__call__() method is deprecated, use otp.run() instead', FutureWarning, stacklevel=2)
646
+ return otp.run(self, *args, **kwargs)
647
+
648
+ def to_df(self, symbols=None, **kwargs):
649
+ """
650
+ .. deprecated:: 1.48.3
651
+ Use :py:func:`otp.run <onetick.py.run>` instead.
652
+ """
653
+ warnings.warn('to_df() method is deprecated, use otp.run() instead', FutureWarning, stacklevel=2)
654
+ # For backward compatibility: otp.run() does not accept "symbols" as a non-keyword argument
655
+ if symbols is not None:
656
+ kwargs['symbols'] = symbols
657
+ return otp.run(self, **kwargs)
658
+
659
+ to_dataframe = to_df
660
+
661
+ def print_api_graph(self):
662
+ self.node().copy_graph(print_out=True)
663
+
664
+ def _add_table(self, strict=False):
665
+ table = otq.Table(
666
+ fields=",".join(
667
+ ott.type2str(dtype) + " " + name for name, dtype in self.columns(skip_meta_fields=True).items()
668
+ ),
669
+ keep_input_fields=not strict,
670
+ )
671
+ self.sink(table)
672
+
673
+ def _is_unbound_required(self):
674
+ """ Check whether a graph needs unbound symbol or not """
675
+
676
+ for symbol in self.__sources_symbols.values():
677
+ if symbol is adaptive or symbol is adaptive_to_default:
678
+ return True
679
+ return False
680
+
681
+ def _get_widest_time_range(self):
682
+ """
683
+ Get minimum start time and maximum end time.
684
+ If time is not found, None is returned.
685
+ """
686
+ start_times = []
687
+ end_times = []
688
+
689
+ for start, end in self.__sources_keys_dates.values():
690
+ if start is not adaptive:
691
+ start_times.append(start)
692
+ if end is not adaptive:
693
+ end_times.append(end)
694
+
695
+ start = min(start_times) if start_times else None
696
+ end = max(end_times) if end_times else None
697
+ return start, end
698
+
699
+ def __get_common_symbol(self):
700
+ need_to_bind_symbol = False
701
+ common_symbol = None
702
+
703
+ # let's try to understand whether we could use common symbol for all sources
704
+ # or we need to bound symbols instead
705
+ first_symbol = None
706
+
707
+ for symbol in self.__sources_symbols.values():
708
+ if first_symbol is None:
709
+ first_symbol = symbol
710
+
711
+ if isinstance(first_symbol, _ManuallyBoundValue):
712
+ # Mark that we need to bound, but keep common_symbol equal to None.
713
+ # It is applicable for the bound symbols inside the merge with bound
714
+ # symbols, for example.
715
+ need_to_bind_symbol = True
716
+ else:
717
+ common_symbol = symbol
718
+
719
+ continue
720
+
721
+ if symbol and symbol != first_symbol:
722
+ need_to_bind_symbol = True
723
+ common_symbol = None
724
+ break
725
+
726
+ # symbol is specified nowhere - just set unbound to the default one
727
+ if (first_symbol is adaptive or first_symbol is adaptive_to_default) and (
728
+ common_symbol is adaptive or common_symbol is adaptive_to_default
729
+ ):
730
+ common_symbol = configuration.config.default_symbol
731
+
732
+ return common_symbol, need_to_bind_symbol
733
+
734
+ def __get_modify_query_times(self, key, start, end, sources_start, sources_end):
735
+ # determine whether we have to add modify_query_times to a src
736
+
737
+ if self.__sources_modify_query_times[key]:
738
+ return None
739
+
740
+ start_date, end_date = self.__sources_keys_dates[key]
741
+
742
+ if start_date is adaptive and end_date is adaptive:
743
+ return None
744
+
745
+ # if some of the end is specified, then it means
746
+ # we need to check whether it is worth to wrap into the modify_query_times
747
+ if start_date is adaptive:
748
+ if start is None:
749
+ start_date = sources_start
750
+ else:
751
+ start_date = start
752
+
753
+ if end_date is adaptive:
754
+ if end is None:
755
+ end_date = sources_end
756
+ else:
757
+ end_date = end
758
+
759
+ if start_date is adaptive or end_date is adaptive:
760
+ return None
761
+
762
+ # it might happen when either sources_start/end are adaptive
763
+ # or start/end are adaptive
764
+ if (
765
+ (start is None and sources_start is not adaptive and start_date != sources_start)
766
+ or (start is not None and start_date != start)
767
+ or (end is None and sources_end is not adaptive and end_date != sources_end)
768
+ or (end is not None and end_date != end)
769
+ ):
770
+ mqt_format = 'parse_time("%Y-%m-%d %H:%M:%S.%q","{}", _TIMEZONE)'
771
+ mqt_ep = otq.ModifyQueryTimes(
772
+ start_time=mqt_format.format(start_date.strftime("%Y-%m-%d %H:%M:%S.%f")),
773
+ end_time=mqt_format.format(end_date.strftime("%Y-%m-%d %H:%M:%S.%f")),
774
+ output_timestamp="TIMESTAMP",
775
+ )
776
+
777
+ return mqt_ep
778
+ return None
779
+
780
+ def _set_date_range_and_symbols(self, symbols=None, start=None, end=None):
781
+ # will modify self
782
+
783
+ if symbols is None:
784
+ common_symbol, need_to_bind_symbol = self.__get_common_symbol()
785
+ else:
786
+ # when unbound symbols passed
787
+ common_symbol = symbols
788
+ need_to_bind_symbol = True # use to check all sources whether some has bound symbols
789
+
790
+ # Find max and min for _source data ranges
791
+ sources_start, sources_end = self._get_widest_time_range()
792
+ sources_start = sources_start or configuration.config.get('default_start_time', adaptive)
793
+ sources_end = sources_end or configuration.config.get('default_end_time', adaptive)
794
+
795
+ for key in self.__sources_keys_dates:
796
+
797
+ # find a function that builds _source
798
+ func = self.__sources_base_ep_func[key]
799
+ if not func:
800
+ continue
801
+
802
+ src = func()
803
+
804
+ mqt_ep = self.__get_modify_query_times(key, start, end, sources_start, sources_end)
805
+ if mqt_ep:
806
+ self.__sources_modify_query_times[key] = True
807
+ src.sink(mqt_ep)
808
+
809
+ if need_to_bind_symbol:
810
+ bound = None
811
+ if key in self.__sources_symbols: # TODO: this is wrong, we need to know about symbols
812
+ # it happens when we do not copy symbols when apply
813
+ # merge with bound symbols.
814
+ # Wrong, in that case merge with bound symbol is
815
+ # non distinguishable from the manually passed None
816
+ # for external queries
817
+ bound = self.__sources_symbols[key]
818
+ if isinstance(bound, _ManuallyBoundValue):
819
+ bound = bound.value
820
+
821
+ if bound and bound is not adaptive and bound is not adaptive_to_default:
822
+ src.__node.symbol(bound)
823
+ else:
824
+ # if key is not in __sources_symbols, then
825
+ # it means that symbol was not specified, and
826
+ # therefor use unbound symbol
827
+ if common_symbol is None:
828
+ if bound is adaptive_to_default:
829
+ src.__node.symbol(configuration.config.default_symbol)
830
+ # TODO: write test validated this
831
+ # else:
832
+ # raise Exception("One of the branch does not have symbol specified")
833
+
834
+ # --------------------------
835
+ # glue _source with the main graph
836
+ self.node().add_rules(src.node().copy_rules())
837
+ self.source_by_key(src.node().copy_graph(), key)
838
+ self._merge_tmp_otq(src)
839
+
840
+ if start is None:
841
+ start = sources_start
842
+
843
+ if end is None:
844
+ end = sources_end
845
+
846
+ return start, end, common_symbol
847
+
848
+ def _to_graph(self, add_passthrough=True):
849
+ """
850
+ Construct the graph. Only for internal usage.
851
+
852
+ It is private, because it constructs the raw graph assuming that a graph
853
+ is already defined, and might confuse an end user, because by default Source
854
+ is not fully defined; it becomes fully defined only when symbols, start and
855
+ end datetime are specified.
856
+ """
857
+ constructed_obj = self.copy()
858
+
859
+ # we add it for case when the last EP has a pin output
860
+ if add_passthrough:
861
+ constructed_obj.sink(otq.Passthrough())
862
+
863
+ return otq.GraphQuery(constructed_obj.node().get())
864
+
865
+ def to_graph(self, raw=None, symbols=None, start=None, end=None, *, add_passthrough=True):
866
+ """
867
+ Construct an :py:class:`onetick.query.GraphQuery` object.
868
+
869
+ Parameters
870
+ ----------
871
+ raw:
872
+ .. deprecated:: 1.4.17 has no effect
873
+
874
+ symbols:
875
+ symbols query to add to otq.GraphQuery
876
+ start: :py:class:`otp.datetime <onetick.py.datetime>`
877
+ start time of a query
878
+ end: :py:class:`otp.datetime <onetick.py.datetime>`
879
+ end time of a query
880
+ add_passthrough: bool
881
+ add additional :py:class:`onetick.query.Passthrough` event processor to the end of a resulted graph
882
+
883
+ Returns
884
+ -------
885
+ otq.GraphQuery
886
+
887
+ See Also
888
+ --------
889
+ :meth:`render`
890
+ """
891
+
892
+ if raw is not None:
893
+ warnings.warn('The "raw" flag is deprecated and makes not effect', FutureWarning)
894
+
895
+ _obj, _start, _end, _symbols = self.__prepare_graph(symbols, start, end)
896
+
897
+ if _obj._tmp_otq.queries:
898
+ warnings.warn('Using .to_graph() for a Source object that uses sub-queries! '
899
+ 'This operation is deprecated and is not guaranteed to work as expected. '
900
+ 'Such a Source should be executed using otp.run() or saved to disk using to_otq()',
901
+ FutureWarning)
902
+ _obj.sink(otq.Passthrough().output_pin_name('OUT_FOR_TO_GRAPH'))
903
+ _graph = _obj._to_graph(add_passthrough=False)
904
+ _graph.set_start_time(_start)
905
+ _graph.set_end_time(_end)
906
+ _graph.set_symbols(_symbols)
907
+
908
+ query = _obj._tmp_otq.save_to_file(query=_graph, file_suffix='_to_graph.otq')
909
+ query_path, query_name = query.split('::')
910
+ query_params = get_query_parameter_list(query_path, query_name)
911
+
912
+ source_with_nested_query = otp.Query(otp.query(query,
913
+ **{param: f'${param}' for param in query_params}),
914
+ out_pin='OUT_FOR_TO_GRAPH')
915
+ return source_with_nested_query.to_graph(
916
+ symbols=_symbols, start=_start, end=_end,
917
+ add_passthrough=add_passthrough)
918
+ else:
919
+ return _obj._to_graph(add_passthrough=add_passthrough)
920
+
921
+ def render(self, **kwargs):
922
+ """
923
+ Renders a calculation graph using the ``graphviz`` library.
924
+ Every node is the onetick query language event processor.
925
+ Nodes in nested queries, first stage queries and eval queries are not shown.
926
+ Could be useful for debugging and in jupyter to learn the underlying graph.
927
+
928
+ Note that it's required to have :graphviz:`graphviz <>` package installed.
929
+
930
+ Examples
931
+ --------
932
+ >>> data = otp.Tick(X=3)
933
+ >>> data1, data2 = data[(data['X'] > 2)]
934
+ >>> data = otp.merge([data1, data2])
935
+ >>> data.render() # doctest: +SKIP
936
+
937
+ .. graphviz:: ../../static/render_example.dot
938
+ """
939
+ kwargs.setdefault('verbose', True)
940
+ self.to_graph().render(**kwargs)
941
+
942
+ def render_otq(
943
+ self,
944
+ image_path: Optional[str] = None,
945
+ output_format: Optional[str] = None,
946
+ load_external_otqs: bool = True,
947
+ view: bool = False,
948
+ line_limit: Optional[Tuple[int, int]] = (10, 30),
949
+ parse_eval_from_params: bool = False,
950
+ render_debug_info: bool = False,
951
+ debug: bool = False,
952
+ graphviz_compat_mode: bool = False,
953
+ **kwargs,
954
+ ):
955
+ """
956
+ Render current :py:class:`~onetick.py.Source` graph.
957
+
958
+ Parameters
959
+ ----------
960
+ image_path: str, None
961
+ Path for generated image. If omitted, image will be saved in a temp dir
962
+ output_format: str, None
963
+ `Graphviz` rendering format. Default: `png`.
964
+ If `image_path` contains one of next extensions, `output_format` will be set automatically:
965
+ `png`, `svg`, `dot`.
966
+ load_external_otqs: bool
967
+ If set to `True` (default) dependencies from external .otq files (not listed in ``path`` param)
968
+ will be loaded automatically.
969
+ view: bool
970
+ Defines should generated image be shown after render.
971
+ line_limit: Tuple[int, int], None
972
+ Limit for maximum number of lines and length of some EP parameters strings.
973
+ First param is limit of lines, second - limit of characters in each line.
974
+ If set to None limit disabled.
975
+ If one of tuple values set to zero the corresponding limit disabled.
976
+ parse_eval_from_params: bool
977
+ Enable parsing and printing `eval` sub-queries from EP parameters.
978
+ render_debug_info: bool
979
+ Render additional debug information.
980
+ debug: bool
981
+ Allow to print stdout or stderr from `Graphviz` render.
982
+ graphviz_compat_mode: bool
983
+ Change internal parameters of result graph for better compatibility with old `Graphviz` versions.
984
+ Could produce larger and less readable graphs.
985
+ kwargs:
986
+ Additional arguments to be passed to :py:meth:`onetick.py.Source.to_otq` method (except
987
+ ``file_name``, ``file_suffix`` and ``query_name`` parameters)
988
+
989
+ Returns
990
+ -------
991
+ Path to rendered image
992
+
993
+ See also
994
+ --------
995
+ :py:func:`render_otq <onetick.py.utils.render_otq>`
996
+
997
+ Examples
998
+ --------
999
+
1000
+ >>> data = otp.DataSource(db='US_COMP', tick_type='TRD', symbols='AAA') # doctest: +SKIP
1001
+ >>> data1, data2 = data[(data['PRICE'] > 50)] # doctest: +SKIP
1002
+ >>> data = otp.merge([data1, data2]) # doctest: +SKIP
1003
+ >>> data.render_otq('./path/to/image.png') # doctest: +SKIP
1004
+
1005
+ .. image:: ../../static/testing/images/render_otq_3.png
1006
+ """
1007
+
1008
+ if {'file_name', 'file_suffix', 'query_name'} & kwargs.keys():
1009
+ raise ValueError(
1010
+ 'It\'s not allowed to pass parameters `file_name`, `file_suffix` and `query_name` as `kwargs` '
1011
+ 'in `render_otq` method.'
1012
+ )
1013
+
1014
+ otq_path = self.to_otq(**kwargs)
1015
+ return render_otq(
1016
+ otq_path, image_path, output_format, load_external_otqs, view, line_limit, parse_eval_from_params,
1017
+ render_debug_info, debug, graphviz_compat_mode,
1018
+ )
1019
+
1020
+ def copy(self, ep=None, columns=None, deep=False) -> 'Source':
1021
+ """
1022
+ Build an object with copied calculation graph.
1023
+
1024
+ Every node of the resulting graph has the same id as in the original. It means that
1025
+ if the original and copied graphs are merged or joined together further then all common
1026
+ nodes (all that created before the .copy() method) will be glued.
1027
+
1028
+ For example, let's imagine that you have the following calculation graph ``G``
1029
+
1030
+ .. graphviz::
1031
+
1032
+ digraph {
1033
+ rankdir="LR";
1034
+ A -> B;
1035
+ }
1036
+
1037
+ where ``A`` is a source and ``B`` is some operation on it.
1038
+
1039
+ Then we copy it to the ``G'`` and assign a new operation there
1040
+
1041
+ .. graphviz::
1042
+
1043
+ digraph {
1044
+ rankdir="LR";
1045
+ A -> B -> C;
1046
+ }
1047
+
1048
+ After that we decided to merge ``G`` and ``G'``. The resulting calculation graph will be:
1049
+
1050
+ .. graphviz::
1051
+
1052
+ digraph {
1053
+ rankdir="LR";
1054
+ A -> B -> C -> MERGE;
1055
+ B -> MERGE;
1056
+ }
1057
+
1058
+ Please use the :meth:`Source.deepcopy` if you want to get the following calculation graph after merges and joins
1059
+
1060
+ .. graphviz::
1061
+
1062
+ digraph {
1063
+ rankdir="LR";
1064
+ A -> B -> C -> MERGE;
1065
+ "A'" -> "B'" -> "C'" -> MERGE;
1066
+ }
1067
+
1068
+ Returns
1069
+ -------
1070
+ Source
1071
+
1072
+ See Also
1073
+ --------
1074
+ Source.deepcopy
1075
+ """
1076
+ if columns is None:
1077
+ columns = self.columns(skip_meta_fields=True)
1078
+
1079
+ if ep:
1080
+ result = self.__class__(node=ep, schema=columns)
1081
+ result.source(self.node().copy_graph())
1082
+ # we need to clean it, because ep is not a _source
1083
+ result._clean_sources_dates()
1084
+ else:
1085
+ result = self.__class__(node=self.node(), schema=columns)
1086
+
1087
+ result.node().add_rules(self.node().copy_rules(deep=deep))
1088
+ result._set_sources_dates(self)
1089
+ if deep:
1090
+ # generating all new uuids for node history and for sources
1091
+ # after they were initialized
1092
+ keys = defaultdict(uuid.uuid4) # type: ignore
1093
+ result.node().rebuild_graph(keys)
1094
+ result._change_sources_keys(keys)
1095
+
1096
+ # add state
1097
+ result._copy_state_vars_from(self)
1098
+
1099
+ result._tmp_otq = self._tmp_otq.copy()
1100
+ result.__name = self.__name #noqa
1101
+
1102
+ result._copy_properties_from(self)
1103
+
1104
+ return result
1105
+
1106
+ def deepcopy(self, ep=None, columns=None) -> 'onetick.py.Source':
1107
+ """
1108
+ Copy all graph and change ids for every node.
1109
+ More details could be found in :meth:`Source.copy`
1110
+
1111
+ See Also
1112
+ --------
1113
+ Source.copy
1114
+ """
1115
+ return self.copy(ep, columns, deep=True)
1116
+
1117
+ def _copy_properties_from(self, obj):
1118
+ # needed if we are doing copy of a child with custom properties
1119
+ for attr in set(self.__class__._PROPERTIES) - set(Source._PROPERTIES):
1120
+ setattr(self, attr, getattr(obj, attr))
1121
+
1122
+ def _copy_state_vars_from(self, objs):
1123
+ self.__dict__["_state_vars"] = StateVars(self, objs)
1124
+
1125
+ def columns(self, skip_meta_fields=False):
1126
+ """
1127
+ Return columns in data source
1128
+
1129
+ Parameters
1130
+ ----------
1131
+ skip_meta_fields: bool, default=False
1132
+ do not add meta fields
1133
+ Returns
1134
+ -------
1135
+ dict
1136
+ """
1137
+ result = {}
1138
+
1139
+ for key, value in self.__dict__.items():
1140
+ if skip_meta_fields and self._check_key_is_meta(key):
1141
+ continue
1142
+
1143
+ if self._check_key_in_properties(key):
1144
+ continue
1145
+
1146
+ if isinstance(value, _Column):
1147
+ result[value.name] = value.dtype
1148
+
1149
+ return result
1150
+
1151
+ def drop_columns(self):
1152
+ """
1153
+ Method removes all columns in the python representation, but don't
1154
+ drop columns on the data.
1155
+
1156
+ It is used when external query is applied, because we don't know how
1157
+ data schema has changed.
1158
+ """
1159
+
1160
+ items = []
1161
+
1162
+ for key, value in self.__dict__.items():
1163
+ if self._check_key_is_reserved(key):
1164
+ continue
1165
+
1166
+ if isinstance(value, _Column):
1167
+ items.append(key)
1168
+
1169
+ for item in items:
1170
+ del self.__dict__[item]
1171
+
1172
+ def node(self):
1173
+ return self.__node
1174
+
1175
+ def tick_type(self, tt):
1176
+ self.__node.tick_type(tt)
1177
+ return self
1178
+
1179
+ def symbol(self, symbol): # NOSONAR
1180
+ """
1181
+ Apply symbol to graph
1182
+
1183
+ .. deprecated:: 1.3.31
1184
+
1185
+ """
1186
+ warnings.warn("symbol method is deprecated, please specify symbol during creation", FutureWarning)
1187
+ self.__node.symbol(symbol)
1188
+ return self
1189
+
1190
+ def node_name(self, name=None, key=None):
1191
+ return self.__node.node_name(name, key)
1192
+
1193
+ def _fix_varstrings(self):
1194
+ """
1195
+ PY-556: converting to varstring results in string with null-characters
1196
+ """
1197
+ varstring_columns = {
1198
+ name: self[name]
1199
+ for name, dtype in self.schema.items()
1200
+ if dtype is ott.varstring
1201
+ }
1202
+ # just updating the column removes null-characters
1203
+ if varstring_columns:
1204
+ self.update(varstring_columns, inplace=True)
1205
+
1206
+ def __from_ep_to_proxy(self, ep):
1207
+ in_pin, out_pin = None, None
1208
+ if isinstance(ep, otq.graph_components.EpBase.PinnedEp):
1209
+ if hasattr(ep, "_output_name"):
1210
+ out_pin = getattr(ep, "_output_name")
1211
+ else:
1212
+ in_pin = getattr(ep, "_input_name")
1213
+
1214
+ ep = getattr(ep, "_ep")
1215
+
1216
+ return ep, uuid.uuid4(), in_pin, out_pin
1217
+
1218
+ def sink(self, ep, out_pin=None, inplace: bool = True):
1219
+ """
1220
+ Appends ``ep`` node to this source (inplace by default).
1221
+ Connects ``out_pin`` of this source to ``ep``.
1222
+
1223
+ Can be used to connect onetick.query objects to :class:`onetick.py.Source`.
1224
+
1225
+ Data schema changes (added or deleted columns) will not be detected automatically
1226
+ after applying this function, so the user must change the schema himself
1227
+ by updating :meth:`onetick.py.Source.schema` property.
1228
+
1229
+ Parameters
1230
+ ----------
1231
+ ep: otq.graph_components.EpBase,\
1232
+ otq.graph_components.EpBase.PinnedEp,\
1233
+ Tuple[otq.graph_components.EpBase, uuid.uuid4, Optional[str], Optional[str]]
1234
+ onetick.query EP object to append to source.
1235
+ out_pin: Optional[str], default=None
1236
+ name of the out pin to connect to ``ep``
1237
+ inplace: bool, default=False
1238
+ if `True` method will modify current object,
1239
+ otherwise it will return modified copy of the object.
1240
+
1241
+ Returns
1242
+ ----------
1243
+ :class:`Source` or ``None``
1244
+ Returns ``None`` if ``inplace=True``.
1245
+
1246
+ See Also
1247
+ --------
1248
+ onetick.py.Source.schema
1249
+ onetick.py.core._source.schema.Schema
1250
+
1251
+ Examples
1252
+ --------
1253
+ Adding column 'B' directly with onetick.query EP.
1254
+
1255
+ >>> data = otp.Tick(A=1)
1256
+ >>> data.sink(otq.AddField(field='B', value=2)) # OTdirective: skip-snippet:;
1257
+ >>> otp.run(data) # OTdirective: skip-snippet:;
1258
+ Time A B
1259
+ 0 2003-12-01 1 2
1260
+
1261
+ But we can't use this column with `onetick.py` methods yet:
1262
+
1263
+ >>> data['C'] = data['B'] # OTdirective: skip-snippet:; # doctest: +ELLIPSIS
1264
+ Traceback (most recent call last):
1265
+ ...
1266
+ AttributeError: There is no 'B' column
1267
+
1268
+ We should manually change source's schema:
1269
+
1270
+ >>> data.schema.update(B=int) # OTdirective: skip-snippet:;
1271
+ >>> data['C'] = data['B']
1272
+ >>> otp.run(data)
1273
+ Time A B C
1274
+ 0 2003-12-01 1 2 2
1275
+
1276
+ Use parameter ``inplace=False`` to return modified copy of the source:
1277
+
1278
+ >>> data = otp.Tick(A=1)
1279
+ >>> new_data = data.sink(otq.AddField(field='B', value=2), inplace=False)
1280
+ >>> otp.run(data)
1281
+ Time A
1282
+ 0 2003-12-01 1
1283
+ >>> otp.run(new_data)
1284
+ Time A B
1285
+ 0 2003-12-01 1 2
1286
+ """
1287
+ if not (
1288
+ issubclass(type(ep), otq.graph_components.EpBase)
1289
+ or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1290
+ or isinstance(ep, tuple)
1291
+ ):
1292
+ raise TypeError("sinking is allowed only for EpBase instances")
1293
+
1294
+ if inplace:
1295
+ obj = self
1296
+ else:
1297
+ obj = self.copy()
1298
+
1299
+ if isinstance(ep, tuple):
1300
+ # for already existed EP fetched from _ProxyNode
1301
+ obj.__node.sink(out_pin, *ep)
1302
+ else:
1303
+ obj.__node.sink(out_pin, *obj.__from_ep_to_proxy(ep))
1304
+
1305
+ if inplace:
1306
+ return None
1307
+ return obj
1308
+
1309
+ def __rshift__(self, ep):
1310
+ """ duplicates sink() method, but returns new object """
1311
+ new_source = self.copy()
1312
+ new_source.sink(ep)
1313
+ return new_source
1314
+
1315
+ def __irshift__(self, ep):
1316
+ """ duplicates sink() method, but assigns source new object """
1317
+ new_source = self.copy()
1318
+ new_source.sink(ep)
1319
+ return new_source
1320
+
1321
+ def source(self, ep, in_pin=None):
1322
+ """ Add node as source to root node """
1323
+ if not (
1324
+ issubclass(type(ep), otq.graph_components.EpBase)
1325
+ or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1326
+ or isinstance(ep, tuple)
1327
+ ):
1328
+ raise TypeError("sourcing is allowed only for EpBase instances")
1329
+
1330
+ if isinstance(ep, tuple):
1331
+ # for already existed EP fetched from _ProxyNode
1332
+ return self.__node.source(in_pin, *ep)
1333
+ else:
1334
+ return self.__node.source(in_pin, *self.__from_ep_to_proxy(ep))
1335
+
1336
+ def source_by_key(self, ep, to_key):
1337
+ """ Add node as source to graph node by key"""
1338
+ if not (
1339
+ issubclass(type(ep), otq.graph_components.EpBase)
1340
+ or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1341
+ or isinstance(ep, tuple)
1342
+ ):
1343
+ raise TypeError("sourcing is allowed only for EpBase instances")
1344
+
1345
+ if isinstance(ep, tuple):
1346
+ # for already existed EP fetched from _ProxyNode
1347
+ return self.__node.source_by_key(to_key, *ep)
1348
+ else:
1349
+ return self.__node.source_by_key(to_key, *self.__from_ep_to_proxy(ep))
1350
+
1351
+ def to_symbol_param(self):
1352
+ """
1353
+ Creates a read-only instance with the same columns except Time.
1354
+ It is used as a result of a first stage query with symbol params.
1355
+
1356
+ See also
1357
+ --------
1358
+ :ref:`static/concepts/symbols:Symbol parameters`
1359
+
1360
+ Examples
1361
+ --------
1362
+ >>> symbols = otp.Ticks({'SYMBOL_NAME': ['S1', 'S2'], 'PARAM': ['A', 'B']})
1363
+ >>> symbol_params = symbols.to_symbol_param()
1364
+ >>> t = otp.DataSource('SOME_DB', tick_type='TT')
1365
+ >>> t['S_PARAM'] = symbol_params['PARAM']
1366
+ >>> result = otp.run(t, symbols=symbols)
1367
+ >>> result['S1']
1368
+ Time X S_PARAM
1369
+ 0 2003-12-01 00:00:00.000 1 A
1370
+ 1 2003-12-01 00:00:00.001 2 A
1371
+ 2 2003-12-01 00:00:00.002 3 A
1372
+ """
1373
+ return _SymbolParamSource(**self.columns())
1374
+
1375
+ @staticmethod
1376
+ def _convert_symbol_to_string(symbol, tmp_otq=None, start=None, end=None, timezone=None, symbol_date=None):
1377
+ if start is adaptive:
1378
+ start = None
1379
+ if end is adaptive:
1380
+ end = None
1381
+
1382
+ if isinstance(symbol, Source):
1383
+ symbol = otp.eval(symbol).to_eval_string(tmp_otq=tmp_otq,
1384
+ start=start, end=end, timezone=timezone,
1385
+ operation_suffix='symbol',
1386
+ query_name=None,
1387
+ file_suffix=symbol._name_suffix('symbol.otq'),
1388
+ symbol_date=symbol_date)
1389
+
1390
+ if isinstance(symbol, otp.query):
1391
+ return symbol.to_eval_string()
1392
+
1393
+ if isinstance(symbol, otq.GraphQuery):
1394
+ params = {'symbol_date': symbol_date} if symbol_date is not None else {}
1395
+ query_name = tmp_otq.add_query(symbol, suffix='__symbol', params=params)
1396
+ return f'eval(THIS::{query_name})'
1397
+
1398
+ return symbol
1399
+
1400
+ @staticmethod
1401
+ def _construct_multi_branch_graph(branches):
1402
+ # TODO: add various checks, e.g. that branches have common parts
1403
+ main = branches[0].copy()
1404
+ for branch in branches[1:]:
1405
+ main.node().add_rules(branch.node().copy_rules())
1406
+ main._merge_tmp_otq(branch)
1407
+ return main
1408
+
1409
+ def _apply_side_branches(self, side_branches):
1410
+ for side_branch in side_branches:
1411
+ self.node().add_rules(side_branch.node().copy_rules())
1412
+ self._merge_tmp_otq(side_branch)
1413
+ self.__sources_keys_dates.update(side_branch.__sources_keys_dates)
1414
+ self.__sources_modify_query_times.update(side_branch.__sources_modify_query_times)
1415
+ self.__sources_base_ep_func.update(side_branch.__sources_base_ep_func)
1416
+ self.__sources_symbols.update(side_branch.__sources_symbols)
1417
+
1418
+ @property
1419
+ def state_vars(self) -> StateVars:
1420
+ """
1421
+ Provides access to state variables
1422
+
1423
+ Returns
1424
+ -------
1425
+ State Variables: Dict[str, state variable]
1426
+ State variables, you can access one with its name.
1427
+
1428
+ See Also
1429
+ --------
1430
+ | `State Variables \
1431
+ <../../static/getting_started/variables_and_data_structures.html#variables-and-data-structures>`_
1432
+ | **DECLARE_STATE_VARIABLES** OneTick event processor
1433
+
1434
+ """
1435
+ return self.__dict__['_state_vars']
1436
+
1437
+ # non word characters are not supported
1438
+ __invalid_query_name_symbols_regex = re.compile(r'\W')
1439
+
1440
+ def __remove_invalid_symbols(self, s):
1441
+ """
1442
+ Replaces symbols that cannot be put in query names with '_'
1443
+ """
1444
+ return self.__invalid_query_name_symbols_regex.sub('_', s)
1445
+
1446
+ def get_name(self, remove_invalid_symbols=False) -> Optional[str]:
1447
+ """
1448
+ Returns source name.
1449
+
1450
+ Parameters
1451
+ ----------
1452
+ remove_invalid_symbols: bool
1453
+ If True, all characters not supported in query names in `.otq` file will be replaced,
1454
+ because only alphanumeric, minus and underscore characters are supported in query names.
1455
+
1456
+ See also
1457
+ --------
1458
+ :meth:`set_name`
1459
+ """
1460
+ if remove_invalid_symbols and self.__name:
1461
+ return self.__remove_invalid_symbols(self.__name)
1462
+ else:
1463
+ return self.__name
1464
+
1465
+ def set_name(self, new_name):
1466
+ """
1467
+ Sets source name.
1468
+ It's an internal onetick-py name of the source that is only used
1469
+ as a part of the resulting .otq file name and as the name of the query inside this file.
1470
+
1471
+ This method doesn't set the name of the OneTick graph node.
1472
+
1473
+ Parameters
1474
+ ----------
1475
+ new_name: str
1476
+ New name of the source.
1477
+
1478
+ Only alphanumeric, minus and underscore characters are supported.
1479
+ All other characters will be replaced in the resulting query name.
1480
+
1481
+ See also
1482
+ --------
1483
+ :meth:`get_name`
1484
+
1485
+ Examples
1486
+ --------
1487
+ >>> t = otp.Tick(A=1)
1488
+
1489
+ By default source has no name and some predefined values are used when generating .otq file:
1490
+
1491
+ >>> t.to_otq() # doctest: +SKIP
1492
+ '/tmp/test_user/run_20240126_152546_1391/magnificent-wolverine.to_otq.otq::query'
1493
+
1494
+ Changed name will be used as a part of the resulting .otq file name
1495
+ and as the name of the query inside this file:
1496
+
1497
+ >>> t.set_name('main')
1498
+ >>> t.to_otq() # doctest: +SKIP
1499
+ '/tmp/test_user/run_20240126_152546_1391/dandelion-angelfish.main.to_otq.otq::main'
1500
+ """
1501
+ assert isinstance(new_name, str) or new_name is None, "Source name must be a string or None."
1502
+ if new_name is not None:
1503
+ assert new_name != '', "Source name must be a non-empty string."
1504
+ self.__name = new_name
1505
+
1506
+ def _name_suffix(self, suffix, separator='.', remove_invalid_symbols=False):
1507
+ if remove_invalid_symbols:
1508
+ suffix = self.__remove_invalid_symbols(suffix)
1509
+ separator = self.__remove_invalid_symbols(separator)
1510
+ name = self.get_name(remove_invalid_symbols=True)
1511
+ else:
1512
+ name = self.__name
1513
+ return f'{separator}{name}{separator}{suffix}' if name else f'{separator}{suffix}'
1514
+
1515
+ @property
1516
+ def schema(self) -> Schema:
1517
+ """
1518
+ Represents actual python data schema in the column-name -> type format.
1519
+ For example, could be used after the :meth:`Source.sink` to adjust
1520
+ the schema.
1521
+
1522
+ Returns
1523
+ -------
1524
+ Schema
1525
+
1526
+ See Also
1527
+ --------
1528
+ Source.sink
1529
+
1530
+ Examples
1531
+ --------
1532
+
1533
+ >>> data = otp.Ticks([['X', 'Y', 'Z'],
1534
+ ... [ 1, 0.5, 'abc']])
1535
+ >>> data['T'] = data['Time']
1536
+ >>> data.schema
1537
+ {'X': <class 'int'>, 'Y': <class 'float'>, 'Z': <class 'str'>, 'T': <class 'onetick.py.types.nsectime'>}
1538
+
1539
+ >>> data.schema['X']
1540
+ <class 'int'>
1541
+
1542
+ >>> data.schema['X'] = float
1543
+ >>> data.schema['X']
1544
+ <class 'float'>
1545
+
1546
+ >>> 'W' in data.schema
1547
+ False
1548
+ >>> data.schema['W'] = otp.nsectime
1549
+ >>> 'W' in data.schema
1550
+ True
1551
+ >>> data.schema['W']
1552
+ <class 'onetick.py.types.nsectime'>
1553
+ """
1554
+ schema = self.columns(skip_meta_fields=True)
1555
+ # meta fields will be in schema, but hidden
1556
+ hidden_columns = {
1557
+ k: v
1558
+ for k, v in self.columns(skip_meta_fields=False).items()
1559
+ if self._check_key_is_meta(k)
1560
+ }
1561
+ if 'TIMESTAMP' in hidden_columns:
1562
+ hidden_columns['Time'] = hidden_columns['TIMESTAMP']
1563
+ return Schema(_base_source=self, _hidden_columns=hidden_columns, **schema)
1564
+
1565
+ def set_schema(self, **kwargs):
1566
+ """
1567
+ Set schema of the source.
1568
+ Note: this method affect python part only and won't make any db queries. It used to set schema after db reading/
1569
+ complex query.
1570
+
1571
+ .. deprecated:: 1.14.9
1572
+
1573
+ Please use the :property:`Source.schema` to access and adjust the schema.
1574
+
1575
+ Parameters
1576
+ ----------
1577
+ kwargs
1578
+ schema in the column_name=type format
1579
+
1580
+ Examples
1581
+ --------
1582
+ Python can't follow low level change of column, e.g. complex query or pertick script can be sink.
1583
+
1584
+ >>> data = otp.Ticks(dict(A=[1, 2], B=["a", "b"]))
1585
+ >>> data.sink(otq.AddField(field='Z', value='5'))
1586
+ >>> data.columns(skip_meta_fields=True)
1587
+ {'A': <class 'int'>, 'B': <class 'str'>}
1588
+ >>> # OTdirective: snippet-name: Arrange.schema.set;
1589
+ >>> data.set_schema(A=int, B=str, Z=int)
1590
+ >>> data.columns(skip_meta_fields=True)
1591
+ {'A': <class 'int'>, 'B': <class 'str'>, 'Z': <class 'int'>}
1592
+ """
1593
+ self.drop_columns()
1594
+ for name, dtype in kwargs.items():
1595
+ dtype = ott.get_source_base_type(dtype)
1596
+ if self._check_key_is_meta(name):
1597
+ warnings.warn(f"Setting type in schema for meta field {name}", stacklevel=2)
1598
+ if self._check_key_in_properties(name):
1599
+ raise ValueError(f"Can't set type in schema for class property {name}")
1600
+ self.__dict__[name] = _Column(name, dtype, self)
1601
+
1602
+ def has_start_end_time(self) -> Tuple[bool, bool]:
1603
+ """
1604
+ Check if at least one of query sources has start and end time
1605
+ """
1606
+ has_start_time = False
1607
+ has_end_time = False
1608
+
1609
+ for start, end in self._get_sources_dates().values():
1610
+ if not has_start_time and start is not adaptive and start is not None:
1611
+ has_start_time = True
1612
+
1613
+ if not has_end_time and end is not adaptive and end is not None:
1614
+ has_end_time = True
1615
+
1616
+ return has_start_time, has_end_time
1617
+
1618
+ from ._source.source_methods.aggregations import ( # type: ignore[misc]
1619
+ agg,
1620
+ high, low, first, last, distinct, high_time, low_time,
1621
+ ob_snapshot, ob_snapshot_wide, ob_snapshot_flat, ob_summary,
1622
+ ob_size, ob_vwap, ob_num_levels,
1623
+ ranking, percentile, find_value_for_percentile,
1624
+ exp_w_average, exp_tw_average, standardized_moment,
1625
+ portfolio_price, multi_portfolio_price, return_ep, implied_vol,
1626
+ linear_regression,
1627
+ process_by_group,
1628
+ )
1629
+ from ._source.source_methods.joins import ( # type: ignore[misc]
1630
+ _process_keep_time_param,
1631
+ _get_columns_with_prefix,
1632
+ join_with_collection,
1633
+ join_with_query,
1634
+ point_in_time,
1635
+ join_with_snapshot,
1636
+ )
1637
+ from ._source.source_methods.times import ( # type: ignore[misc]
1638
+ update_timestamp,
1639
+ modify_query_times,
1640
+ time_interval_shift,
1641
+ time_interval_change,
1642
+ )
1643
+ from ._source.source_methods.fields import ( # type: ignore[misc]
1644
+ _add_field,
1645
+ _update_timestamp,
1646
+ _update_field,
1647
+ __setattr__,
1648
+ __setitem__,
1649
+ add_fields,
1650
+ table,
1651
+ update,
1652
+ )
1653
+ from ._source.source_methods.filters import ( # type: ignore[misc]
1654
+ if_else,
1655
+ where_clause,
1656
+ where,
1657
+ _get_integer_slice,
1658
+ __getitem__,
1659
+ dropna,
1660
+ time_filter,
1661
+ skip_bad_tick,
1662
+ character_present,
1663
+ )
1664
+ from ._source.source_methods.drops import ( # type: ignore[misc]
1665
+ drop,
1666
+ __delitem__,
1667
+ )
1668
+ from ._source.source_methods.writes import ( # type: ignore[misc]
1669
+ write,
1670
+ write_parquet,
1671
+ save_snapshot,
1672
+ write_text,
1673
+ )
1674
+ from ._source.source_methods.renames import ( # type: ignore[misc]
1675
+ _add_prefix_and_suffix,
1676
+ add_prefix,
1677
+ add_suffix,
1678
+ rename,
1679
+ )
1680
+ from ._source.source_methods.pandases import ( # type: ignore[misc]
1681
+ plot,
1682
+ count,
1683
+ head,
1684
+ tail,
1685
+ )
1686
+ from ._source.source_methods.sorts import ( # type: ignore[misc]
1687
+ sort_values,
1688
+ sort,
1689
+ )
1690
+ from ._source.source_methods.debugs import ( # type: ignore[misc]
1691
+ dump,
1692
+ throw,
1693
+ logf,
1694
+ )
1695
+ from ._source.source_methods.applyers import ( # type: ignore[misc]
1696
+ apply_query,
1697
+ apply,
1698
+ script,
1699
+ )
1700
+ from ._source.source_methods.symbols import ( # type: ignore[misc]
1701
+ show_symbol_name_in_db,
1702
+ modify_symbol_name,
1703
+ )
1704
+ from ._source.source_methods.columns import ( # type: ignore[misc]
1705
+ mean,
1706
+ unite_columns,
1707
+ )
1708
+ from ._source.source_methods.switches import ( # type: ignore[misc]
1709
+ switch,
1710
+ split,
1711
+ )
1712
+ from ._source.source_methods.misc import ( # type: ignore[misc]
1713
+ pause,
1714
+ insert_tick,
1715
+ insert_at_end,
1716
+ transpose,
1717
+ cache,
1718
+ pnl_realized,
1719
+ execute,
1720
+ _columns_names_regex,
1721
+ fillna,
1722
+ mkt_activity,
1723
+ book_diff,
1724
+ limit,
1725
+ virtual_ob,
1726
+ corp_actions,
1727
+ )
1728
+ from ._source.source_methods.merges import ( # type: ignore[misc]
1729
+ __add__,
1730
+ append,
1731
+ diff,
1732
+ lee_and_ready,
1733
+ estimate_ts_delay,
1734
+ )
1735
+ from ._source.source_methods.data_quality import ( # type: ignore[misc]
1736
+ show_data_quality,
1737
+ insert_data_quality_event,
1738
+ intercept_data_quality,
1739
+ show_symbol_errors,
1740
+ intercept_symbol_errors,
1741
+ )
1742
+
1743
+
1744
+ _Source = Source # Backward compatibility