onetick-py 1.162.2__py3-none-any.whl → 1.164.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. locator_parser/actions.py +10 -14
  2. locator_parser/common.py +13 -10
  3. locator_parser/io.py +6 -4
  4. locator_parser/locator.py +1 -1
  5. onetick/doc_utilities/ot_doctest.py +1 -1
  6. onetick/doc_utilities/snippets.py +1 -2
  7. onetick/lib/instance.py +7 -4
  8. onetick/py/__init__.py +5 -9
  9. onetick/py/_version.py +1 -1
  10. onetick/py/aggregations/_base.py +7 -4
  11. onetick/py/aggregations/_docs.py +22 -7
  12. onetick/py/aggregations/other.py +1 -1
  13. onetick/py/cache.py +1 -0
  14. onetick/py/callback/callback.py +1 -0
  15. onetick/py/core/_internal/_proxy_node.py +1 -1
  16. onetick/py/core/_internal/_state_objects.py +2 -2
  17. onetick/py/core/_source/source_methods/aggregations.py +8 -9
  18. onetick/py/core/_source/source_methods/applyers.py +2 -2
  19. onetick/py/core/_source/source_methods/debugs.py +16 -14
  20. onetick/py/core/_source/source_methods/drops.py +1 -1
  21. onetick/py/core/_source/source_methods/fields.py +5 -5
  22. onetick/py/core/_source/source_methods/filters.py +4 -3
  23. onetick/py/core/_source/source_methods/joins.py +6 -6
  24. onetick/py/core/_source/source_methods/misc.py +84 -0
  25. onetick/py/core/_source/source_methods/renames.py +3 -3
  26. onetick/py/core/_source/source_methods/switches.py +3 -3
  27. onetick/py/core/_source/source_methods/writes.py +279 -10
  28. onetick/py/core/_source/tmp_otq.py +1 -1
  29. onetick/py/core/column_operations/_methods/_internal.py +1 -1
  30. onetick/py/core/column_operations/_methods/methods.py +8 -7
  31. onetick/py/core/column_operations/_methods/op_types.py +1 -0
  32. onetick/py/core/column_operations/accessors/dt_accessor.py +4 -0
  33. onetick/py/core/column_operations/base.py +5 -5
  34. onetick/py/core/cut_builder.py +1 -0
  35. onetick/py/core/eval_query.py +1 -0
  36. onetick/py/core/lambda_object.py +2 -3
  37. onetick/py/core/per_tick_script.py +6 -5
  38. onetick/py/core/query_inspector.py +6 -7
  39. onetick/py/core/source.py +11 -8
  40. onetick/py/db/_inspection.py +4 -8
  41. onetick/py/db/db.py +4 -100
  42. onetick/py/docs/docstring_parser.py +1 -1
  43. onetick/py/functions.py +48 -11
  44. onetick/py/license.py +2 -0
  45. onetick/py/math.py +2 -2
  46. onetick/py/otq.py +1 -2
  47. onetick/py/run.py +8 -7
  48. onetick/py/servers.py +2 -2
  49. onetick/py/session.py +8 -6
  50. onetick/py/sources/common.py +6 -4
  51. onetick/py/sources/data_source.py +25 -35
  52. onetick/py/sources/query.py +7 -7
  53. onetick/py/sources/symbols.py +1 -1
  54. onetick/py/sources/ticks.py +3 -3
  55. onetick/py/state.py +1 -0
  56. onetick/py/types.py +27 -25
  57. onetick/py/utils/config.py +2 -2
  58. onetick/py/utils/perf.py +2 -3
  59. onetick/py/utils/temp.py +2 -2
  60. {onetick_py-1.162.2.dist-info → onetick_py-1.164.0.dist-info}/METADATA +1 -1
  61. {onetick_py-1.162.2.dist-info → onetick_py-1.164.0.dist-info}/RECORD +65 -65
  62. {onetick_py-1.162.2.dist-info → onetick_py-1.164.0.dist-info}/WHEEL +0 -0
  63. {onetick_py-1.162.2.dist-info → onetick_py-1.164.0.dist-info}/entry_points.txt +0 -0
  64. {onetick_py-1.162.2.dist-info → onetick_py-1.164.0.dist-info}/licenses/LICENSE +0 -0
  65. {onetick_py-1.162.2.dist-info → onetick_py-1.164.0.dist-info}/top_level.txt +0 -0
@@ -57,10 +57,14 @@ class _DtAccessor(_Accessor):
57
57
 
58
58
  %S - Seconds (2 digits)
59
59
 
60
+ %q - Milliseconds (3 digits)
61
+
60
62
  %J - Nanoseconds (9 digits)
61
63
 
62
64
  %p - AM/PM (2 characters)
63
65
 
66
+ %% - % character
67
+
64
68
  Examples
65
69
  --------
66
70
  >>> t = otp.Ticks(A=[otp.dt(2019, 1, 1, 1, 1, 1), otp.dt(2019, 2, 2, 2, 2, 2)])
@@ -880,6 +880,7 @@ class Operation:
880
880
  self._op_str = op_str
881
881
  self._dtype = dtype
882
882
  return op_str, dtype
883
+ return None
883
884
 
884
885
  def _convert_to(self, to_type):
885
886
  return _Operation(_methods.CONVERSIONS[self.dtype, to_type], [self])
@@ -889,16 +890,15 @@ class Operation:
889
890
  if dtype is bool:
890
891
  return self
891
892
  if are_ints_not_time(dtype):
892
- self = _Operation(_methods.ne, (self, 0))
893
+ return _Operation(_methods.ne, (self, 0))
893
894
  elif are_time(dtype):
894
- self = _Operation(_methods.ne, (self._convert_to(int), 0))
895
+ return _Operation(_methods.ne, (self._convert_to(int), 0))
895
896
  elif are_floats(dtype):
896
- self = _Operation(_methods.ne, (self, 0.0))
897
+ return _Operation(_methods.ne, (self, 0.0))
897
898
  elif are_strings(dtype):
898
- self = _Operation(_methods.ne, (self, ""))
899
+ return _Operation(_methods.ne, (self, ""))
899
900
  else:
900
901
  raise TypeError("Filter expression should return bool, int, float or string")
901
- return self
902
902
 
903
903
  def _replace_parameters(self, operation_cb, return_replace_tuples=False):
904
904
  """
@@ -102,6 +102,7 @@ class _BaseCutBuilder(metaclass=ABCMeta):
102
102
  return s
103
103
 
104
104
 
105
+ # pylint: disable-next=abstract-method
105
106
  class _CutBuilder(_BaseCutBuilder):
106
107
 
107
108
  def compute_bin_variables(self):
@@ -237,6 +237,7 @@ def prepare_params(**kwargs):
237
237
  converted_params = {}
238
238
  for key, value in kwargs.items():
239
239
  dtype = otp.types.get_object_type(value)
240
+ # pylint: disable-next=unidiomatic-typecheck
240
241
  if type(value) is str and len(value) > otp.string.DEFAULT_LENGTH:
241
242
  dtype = otp.string[len(value)]
242
243
  param = OnetickParameter(key, dtype)
@@ -253,7 +253,7 @@ class _EmulateObject(_EmulateInputObject):
253
253
 
254
254
  def __setattr__(self, key, value):
255
255
  if key in self.__class__.__dict__:
256
- return super().__setattr__(key, value)
256
+ super().__setattr__(key, value)
257
257
  if key not in self.__dict__ or key in _EmulateObject.NEW_VALUES:
258
258
  _EmulateObject.NEW_VALUES[key].append(value)
259
259
  if key not in self.__dict__:
@@ -381,8 +381,7 @@ class _EmulateObject(_EmulateInputObject):
381
381
 
382
382
  def _validate_lambda(lambda_f):
383
383
  if not (isinstance(lambda_f, types.LambdaType) and lambda_f.__name__ == '<lambda>'
384
- or isinstance(lambda_f, types.FunctionType)
385
- or isinstance(lambda_f, types.MethodType)):
384
+ or isinstance(lambda_f, (types.FunctionType, types.MethodType))):
386
385
  raise ValueError("It is expected to get a function, method or lambda,"
387
386
  f" but got '{lambda_f}' of type '{type(lambda_f)}'")
388
387
 
@@ -329,7 +329,7 @@ class Expression:
329
329
  @property
330
330
  def is_dynamic_tick(self) -> bool:
331
331
  try:
332
- return type(self.value) is _DynamicTick
332
+ return isinstance(self.value, _DynamicTick)
333
333
  except ValueError:
334
334
  return False
335
335
 
@@ -585,7 +585,7 @@ class ExpressionParser:
585
585
  value = self.fun.emulator if self.fun.emulator is not None else expr.id
586
586
  return Expression(value)
587
587
 
588
- if type(expr.ctx) is not ast.Load:
588
+ if not isinstance(expr.ctx, ast.Load):
589
589
  # local variable, left-hand side
590
590
  return Expression(LocalVariable(expr.id))
591
591
 
@@ -603,6 +603,7 @@ class ExpressionParser:
603
603
  # parameter of the per-tick script function
604
604
  return Expression(_Operation(op_str=expr.id, dtype=self.fun.args_annotations[expr.id]))
605
605
 
606
+ # pylint: disable-next=eval-used
606
607
  value = eval(expr.id, self.fun.closure_vars.globals, self.fun.closure_vars.nonlocals) # type: ignore[arg-type]
607
608
  return Expression(value)
608
609
 
@@ -632,7 +633,7 @@ class ExpressionParser:
632
633
  val = self.expression(expr.value)
633
634
  item = self.expression(expr.slice)
634
635
 
635
- if type(expr.ctx) is ast.Load:
636
+ if isinstance(expr.ctx, ast.Load):
636
637
  v = val.value[item.value]
637
638
  return Expression(v)
638
639
 
@@ -657,7 +658,7 @@ class ExpressionParser:
657
658
  val = self.expression(expr.value)
658
659
  attr = expr.attr
659
660
 
660
- if type(expr.ctx) is ast.Load:
661
+ if isinstance(expr.ctx, ast.Load):
661
662
  v = getattr(val.value, attr)
662
663
  return Expression(v)
663
664
 
@@ -1007,7 +1008,7 @@ class CaseExpressionParser(ExpressionParser):
1007
1008
  def get_if_expr(first, second):
1008
1009
  if isinstance(expr.op, ast.Or):
1009
1010
  return ast.IfExp(test=first, body=first, orelse=second)
1010
- if isinstance(expr.op, ast.And):
1011
+ else:
1011
1012
  return ast.IfExp(test=first, body=second, orelse=first)
1012
1013
 
1013
1014
  first = None
@@ -101,7 +101,7 @@ def get_query_info(otq_path, query_name="", inspected_graphs=None):
101
101
  stack_info = res.group(1)
102
102
  return ep_name, stack_info
103
103
 
104
- graph = defaultdict(lambda: Node())
104
+ graph = defaultdict(Node)
105
105
 
106
106
  if not os.path.exists(otq_path):
107
107
  raise FileNotFoundError(f'otq "{otq_path}" is not found')
@@ -176,7 +176,7 @@ def get_query_info(otq_path, query_name="", inspected_graphs=None):
176
176
  graph[num].PARAMETERS[inner_param_name] = inner_param_value
177
177
  continue
178
178
 
179
- if param == "SOURCE" or param == "SINK":
179
+ if param in ("SOURCE", "SINK"):
180
180
  sources = []
181
181
 
182
182
  for v in value.split():
@@ -199,8 +199,6 @@ def get_query_info(otq_path, query_name="", inspected_graphs=None):
199
199
  else:
200
200
  setattr(graph[num], param, value)
201
201
 
202
- # with
203
-
204
202
  if not found:
205
203
  raise QueryNotFoundError(f'Query "{query_name}" is not found in the {otq_path}')
206
204
 
@@ -233,6 +231,8 @@ def get_query_info(otq_path, query_name="", inspected_graphs=None):
233
231
  address = node.PARAMETERS["OTQ_NODE"]
234
232
  elif hasattr(node, "PH_PATH"):
235
233
  address = node.PH_PATH
234
+ else:
235
+ raise ValueError("Can't get NESTED_OTQ address")
236
236
 
237
237
  node.EP = "NESTED_OTQ " + address
238
238
 
@@ -263,7 +263,7 @@ def get_query_info(otq_path, query_name="", inspected_graphs=None):
263
263
 
264
264
  def _search_for_bound_sink(self, root, bound_symbol_info):
265
265
  if root in bound_symbol_info:
266
- return bound_symbol_info[root]
266
+ return
267
267
 
268
268
  if not is_commented_out(self.nodes[root]):
269
269
  # A commented node cannot have its own bound symbols, but can have a bound sink
@@ -419,7 +419,7 @@ def add_pins(otq_path, query_name, specification):
419
419
  if res:
420
420
  for inx, v in enumerate(specification):
421
421
  node, pin_flag, pin_name = v
422
- if node.NUM == num:
422
+ if node.NUM == num: # noqa
423
423
  if pin_flag is None:
424
424
  continue
425
425
  node_name = "NODE_" + str(num)
@@ -432,7 +432,6 @@ def add_pins(otq_path, query_name, specification):
432
432
 
433
433
  # don't use this specification item anymore
434
434
  specification[inx] = (node, None, None)
435
- # with
436
435
 
437
436
  with open(otq_path, "w") as fout:
438
437
  fout.write(out.getvalue())
onetick/py/core/source.py CHANGED
@@ -362,9 +362,9 @@ class Source:
362
362
  raise KeyError(f'Column name {name} is not in the schema. Please, check that this column '
363
363
  'is in the schema or add it using the .schema property')
364
364
 
365
- if name == 0 or name == 1:
365
+ if name in (0, 1):
366
366
  raise ValueError(f"constant {name} are not supported for indexing for now, please use otp.Empty")
367
- if type(name) in (int, float):
367
+ if isinstance(name, (int, float)):
368
368
  raise ValueError("integer indexes are not supported")
369
369
  self.__dict__[name] = _Column(name, dtype, self)
370
370
  else:
@@ -775,6 +775,7 @@ class Source:
775
775
  )
776
776
 
777
777
  return mqt_ep
778
+ return None
778
779
 
779
780
  def _set_date_range_and_symbols(self, symbols=None, start=None, end=None):
780
781
  # will modify self
@@ -1208,7 +1209,7 @@ class Source:
1208
1209
  if not (
1209
1210
  issubclass(type(ep), otq.graph_components.EpBase)
1210
1211
  or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1211
- or type(ep) is tuple
1212
+ or isinstance(ep, tuple)
1212
1213
  ):
1213
1214
  raise TypeError("sinking is allowed only for EpBase instances")
1214
1215
 
@@ -1217,7 +1218,7 @@ class Source:
1217
1218
  else:
1218
1219
  obj = self.copy()
1219
1220
 
1220
- if type(ep) is tuple:
1221
+ if isinstance(ep, tuple):
1221
1222
  # for already existed EP fetched from _ProxyNode
1222
1223
  obj.__node.sink(out_pin, *ep)
1223
1224
  else:
@@ -1244,11 +1245,11 @@ class Source:
1244
1245
  if not (
1245
1246
  issubclass(type(ep), otq.graph_components.EpBase)
1246
1247
  or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1247
- or type(ep) is tuple
1248
+ or isinstance(ep, tuple)
1248
1249
  ):
1249
1250
  raise TypeError("sourcing is allowed only for EpBase instances")
1250
1251
 
1251
- if type(ep) is tuple:
1252
+ if isinstance(ep, tuple):
1252
1253
  # for already existed EP fetched from _ProxyNode
1253
1254
  return self.__node.source(in_pin, *ep)
1254
1255
  else:
@@ -1259,11 +1260,11 @@ class Source:
1259
1260
  if not (
1260
1261
  issubclass(type(ep), otq.graph_components.EpBase)
1261
1262
  or issubclass(type(ep), otq.graph_components.EpBase.PinnedEp)
1262
- or type(ep) is tuple
1263
+ or isinstance(ep, tuple)
1263
1264
  ):
1264
1265
  raise TypeError("sourcing is allowed only for EpBase instances")
1265
1266
 
1266
- if type(ep) is tuple:
1267
+ if isinstance(ep, tuple):
1267
1268
  # for already existed EP fetched from _ProxyNode
1268
1269
  return self.__node.source_by_key(to_key, *ep)
1269
1270
  else:
@@ -1590,6 +1591,7 @@ class Source:
1590
1591
  write,
1591
1592
  write_parquet,
1592
1593
  save_snapshot,
1594
+ write_text,
1593
1595
  )
1594
1596
  from ._source.source_methods.renames import ( # type: ignore[misc]
1595
1597
  _add_prefix_and_suffix,
@@ -1632,6 +1634,7 @@ class Source:
1632
1634
  from ._source.source_methods.misc import ( # type: ignore[misc]
1633
1635
  pause,
1634
1636
  insert_tick,
1637
+ insert_at_end,
1635
1638
  transpose,
1636
1639
  cache,
1637
1640
  pnl_realized,
@@ -278,9 +278,8 @@ class DB:
278
278
  tz_gmt = gettz('GMT')
279
279
  for inx in range(len(result)):
280
280
  start_date = result['START_DATE'][inx]
281
- if start_date < 0:
282
- # On Windows datetime.fromtimestamp throws an OSError for negative values
283
- start_date = 0
281
+ # On Windows datetime.fromtimestamp throws an OSError for negative values
282
+ start_date = max(start_date, 0)
284
283
  start = datetime.fromtimestamp(start_date / 1000, tz=tz_gmt)
285
284
  start = start.replace(tzinfo=None)
286
285
  try:
@@ -376,8 +375,7 @@ class DB:
376
375
  # future is not loaded yet
377
376
  if locator_start > today:
378
377
  continue
379
- if locator_end > today:
380
- locator_end = today
378
+ locator_end = min(locator_end, today)
381
379
 
382
380
  if respect_acl:
383
381
  try:
@@ -627,7 +625,7 @@ class DB:
627
625
  context=self.context)
628
626
 
629
627
  result = get_schema(use_cache=True)
630
- if not len(result):
628
+ if result.empty:
631
629
  # in case cache settings in database are bad (e.g. BEXRTS-1220)
632
630
  result = get_schema(use_cache=False)
633
631
 
@@ -719,8 +717,6 @@ class DB:
719
717
  >>> us_comp_db.symbols(date=otp.dt(2022, 3, 1), tick_type='TRD', pattern='^AAP.*')
720
718
  ['AAP', 'AAPL']
721
719
  """
722
- import onetick.py as otp
723
-
724
720
  if date is None:
725
721
  date = self.last_date
726
722
  if timezone is None:
onetick/py/db/db.py CHANGED
@@ -182,7 +182,7 @@ class _DB:
182
182
  def _format_params(params):
183
183
  res = {}
184
184
  for key, value in params.items():
185
- if isinstance(value, dt.datetime):
185
+ if hasattr(value, 'strftime'):
186
186
  res[key] = value.strftime("%Y%m%d%H%M%S")
187
187
  else:
188
188
  res[key] = str(value)
@@ -280,6 +280,8 @@ class _DB:
280
280
  # because before there was no ability to get written ticks
281
281
  if kwargs.get('propagate'):
282
282
  return res
283
+ else:
284
+ return None
283
285
 
284
286
  @property
285
287
  def properties(self):
@@ -724,105 +726,6 @@ class DB(_DB):
724
726
  return self._format_params(feed)
725
727
 
726
728
 
727
- """
728
- Keep here as example of custom databases that can be defined in client code
729
-
730
-
731
- P_CME = DB(
732
- name="P_CME",
733
- db_properties={
734
- "symbology": "CTA",
735
- "archive_compression_type": constants.compression_type.NATIVE_PLUS_GZIP,
736
- "tick_search_max_boundary_offset_sec": 1800,
737
- "tick_timestamp_type": "NANOS",
738
- },
739
- db_locations=[
740
- {
741
- "access_method": constants.access_method.SOCKET,
742
- "location": "192.168.5.63:50025",
743
- "start_time": datetime(year=2008, month=9, day=1),
744
- "end_time": constants.DEFAULT_END_DATE,
745
- }
746
- ],
747
- )
748
-
749
- MS127 = DB(
750
- name="MS127",
751
- db_properties={
752
- "symbology": "MSGR",
753
- "ref_data_db": "REF_DATA_MS127",
754
- "archive_compression_type": constants.compression_type.NATIVE_PLUS_GZIP,
755
- "ignore_previous_day_corrections_on_reload": "yes",
756
- },
757
- db_locations=[
758
- {
759
- "access_method": constants.access_method.SOCKET,
760
- "location": "192.168.5.63:50025",
761
- "start_time": datetime(year=2010, month=12, day=31),
762
- "end_time": constants.DEFAULT_END_DATE,
763
- }
764
- ],
765
- )
766
-
767
- MS44 = DB(
768
- name="MS44",
769
- db_properties={
770
- "symbology": "MSGR",
771
- "ref_data_db": "REF_DATA_MS44",
772
- "archive_compression_type": constants.compression_type.NATIVE_PLUS_GZIP,
773
- "ignore_previous_day_corrections_on_reload": "yes",
774
- },
775
- db_locations=[
776
- {
777
- "access_method": constants.access_method.SOCKET,
778
- "location": "192.168.5.63:50025",
779
- "start_time": datetime(year=2010, month=12, day=31),
780
- "end_time": constants.DEFAULT_END_DATE,
781
- }
782
- ],
783
- )
784
-
785
- TAQ_NBBO = DB(
786
- name="TAQ_NBBO",
787
- db_properties={
788
- "symbology": "BZX",
789
- "price_not_key": True,
790
- "memory_data_max_life_hours": 30,
791
- "memory_db_dir": "/onetick-tickdata-com/STORAGE_GATEWAY/DEEP_HISTORY/US_TED/NBBO/shmem",
792
- "mmap_db_compression_type": constants.compression_type.NATIVE_PLUS_GZIP,
793
- },
794
- db_locations=[
795
- {
796
- "access_method": constants.access_method.FILE,
797
- "location": "/onetick-tickdata-com/STORAGE_GATEWAY/DEEP_HISTORY/US_TED/NBBO/",
798
- "start_time": datetime(year=2001, month=1, day=1),
799
- "end_time": constants.DEFAULT_END_DATE,
800
- }
801
- ],
802
- )
803
-
804
-
805
- US_COMP = DB(
806
- name="US_COMP",
807
- db_properties={
808
- "symbology": "BZX",
809
- "price_not_key": True,
810
- "memory_data_max_life_hours": 30,
811
- "memory_db_dir": "/onetick-tickdata-com/STORAGE_GATEWAY/DEEP_HISTORY/US_TED/TAQ/shmem",
812
- "mmap_db_compression_type": constants.compression_type.NATIVE_PLUS_GZIP,
813
- },
814
- db_locations=[
815
- {
816
- "access_method": constants.access_method.FILE,
817
- "location": "/onetick-tickdata-com/STORAGE_GATEWAY/DEEP_HISTORY/US_TED/TAQ/",
818
- "start_time": datetime(year=2003, month=10, day=1),
819
- "end_time": constants.DEFAULT_END_DATE,
820
- }
821
- ],
822
- )
823
- """
824
-
825
-
826
729
  class RefDB(DB):
827
730
  """ Creates reference database object.
828
731
 
@@ -1414,6 +1317,7 @@ class RefDB(DB):
1414
1317
  },
1415
1318
  stdout=subprocess.PIPE,
1416
1319
  stderr=subprocess.PIPE,
1320
+ check=False,
1417
1321
  )
1418
1322
  return p.stdout, p.stderr
1419
1323
 
@@ -8,7 +8,7 @@ from textwrap import dedent
8
8
  class Docstring:
9
9
 
10
10
  def __init__(self, doc: str):
11
- if type(doc) is list:
11
+ if isinstance(doc, list):
12
12
  doc = '\n'.join(doc)
13
13
  self.doc = doc
14
14
  self.indentation = ' ' * (len(doc.lstrip('\n')) - len(doc.lstrip()))
onetick/py/functions.py CHANGED
@@ -572,6 +572,7 @@ def _add_node_name_prefix_to_columns_in_operation(op, src):
572
572
  raise ValueError('You set to use name for column prefix, but name is empty')
573
573
  name = f'{src.node_name()}.{column.name}'
574
574
  return Column(name, column.dtype, column.obj_ref, precision=getattr(column, "_precision", None))
575
+ return None
575
576
 
576
577
  return op._replace_parameters(fun)
577
578
 
@@ -902,8 +903,18 @@ def join_by_time(sources, how="outer", on=None, policy=None, check_schema=True,
902
903
  0 2003-12-01 00:00:00.001 1 0
903
904
  1 2003-12-01 00:00:00.003 2 1
904
905
 
905
- In case you willing to add prefix/suffix to all columns in one of the sources you should use
906
- :func:`Source.add_prefix` or :func:`Source.add_suffix`
906
+ Note
907
+ ----
908
+ In case different ``sources`` have matching columns, the exception will be raised.
909
+
910
+ To fix this error,
911
+ functions :func:`Source.add_prefix` or :func:`Source.add_suffix` can be used to rename all columns in the source.
912
+
913
+ Note that resulting **TIMESTAMP** pseudo-column will be taken from the leading source,
914
+ and timestamps of ticks from non-leading sources will not be added to the output,
915
+ so if you need to save them, you need to copy the timestamp to some other column.
916
+
917
+ See examples below.
907
918
 
908
919
  Parameters
909
920
  ----------
@@ -1047,7 +1058,8 @@ def join_by_time(sources, how="outer", on=None, policy=None, check_schema=True,
1047
1058
 
1048
1059
  Default joining logic, outer join with the first source is the leader by default:
1049
1060
 
1050
- >>> otp.run(otp.join_by_time([d1, d2]))
1061
+ >>> data = otp.join_by_time([d1, d2])
1062
+ >>> otp.run(data)
1051
1063
  Time A B
1052
1064
  0 2003-12-01 00:00:00.001 1 0
1053
1065
  1 2003-12-01 00:00:00.002 2 1
@@ -1055,7 +1067,8 @@ def join_by_time(sources, how="outer", on=None, policy=None, check_schema=True,
1055
1067
 
1056
1068
  Leading source can be changed by using parameter ``leading``:
1057
1069
 
1058
- >>> otp.run(otp.join_by_time([d1, d2], leading=1))
1070
+ >>> data = otp.join_by_time([d1, d2], leading=1)
1071
+ >>> otp.run(data)
1059
1072
  Time A B
1060
1073
  0 2003-12-01 00:00:00.001 1 1
1061
1074
  1 2003-12-01 00:00:00.002 2 2
@@ -1064,7 +1077,8 @@ def join_by_time(sources, how="outer", on=None, policy=None, check_schema=True,
1064
1077
  Note that OneTick's logic is different depending on the order of sources specified,
1065
1078
  so specifying ``leading`` parameter in the previous example is not the same as changing the order of sources here:
1066
1079
 
1067
- >>> otp.run(otp.join_by_time([d2, d1], leading=0))
1080
+ >>> data = otp.join_by_time([d2, d1], leading=0)
1081
+ >>> otp.run(data)
1068
1082
  Time B A
1069
1083
  0 2003-12-01 00:00:00.001 1 0
1070
1084
  1 2003-12-01 00:00:00.002 2 1
@@ -1073,7 +1087,8 @@ def join_by_time(sources, how="outer", on=None, policy=None, check_schema=True,
1073
1087
  Parameter ``source_fields_order`` can be used to change the order of fields in the output,
1074
1088
  but it also affects the joining logic the same way as changing the order of sources:
1075
1089
 
1076
- >>> otp.run(otp.join_by_time([d1, d2], leading=1, source_fields_order=[1, 0]))
1090
+ >>> data = otp.join_by_time([d1, d2], leading=1, source_fields_order=[1, 0])
1091
+ >>> otp.run(data)
1077
1092
  Time B A
1078
1093
  0 2003-12-01 00:00:00.001 1 0
1079
1094
  1 2003-12-01 00:00:00.002 2 1
@@ -1082,26 +1097,48 @@ def join_by_time(sources, how="outer", on=None, policy=None, check_schema=True,
1082
1097
  Parameter ``how`` can be set to "inner".
1083
1098
  In this case only ticks that were successfully joined from all sources will be propagated:
1084
1099
 
1085
- >>> otp.run(otp.join_by_time([d1, d2], how='inner'))
1100
+ >>> data = otp.join_by_time([d1, d2], how='inner')
1101
+ >>> otp.run(data)
1086
1102
  Time A B
1087
1103
  0 2003-12-01 00:00:00.002 2 1
1088
1104
  1 2003-12-01 00:00:00.003 3 2
1089
1105
 
1090
1106
  Set parameter ``match_if_identical_times`` to only join ticks with the same timestamps:
1091
1107
 
1092
- >>> otp.run(otp.join_by_time([d1, d2], how='inner', match_if_identical_times=True))
1108
+ >>> data = otp.join_by_time([d1, d2], how='inner', match_if_identical_times=True)
1109
+ >>> otp.run(data)
1093
1110
  Time A B
1094
1111
  0 2003-12-01 00:00:00.001 1 1
1095
1112
  1 2003-12-01 00:00:00.002 2 2
1096
1113
 
1097
- Adding prefix to right source for all columns:
1114
+ In case of conflicting names in different sources, exception will be raised:
1115
+
1116
+ >>> d3 = otp.Ticks({'A': [1, 2, 4], 'offset': [1, 2, 4]})
1117
+ >>> data = otp.join_by_time([d1, d3])
1118
+ Traceback (most recent call last):
1119
+ ...
1120
+ ValueError: There are matched columns between sources: A
1121
+
1122
+ Adding prefix to right source for all columns will fix this problem:
1098
1123
 
1099
- >>> otp.run(otp.join_by_time([d1, d2.add_prefix('right_')]))
1100
- Time A right_B
1124
+ >>> data = otp.join_by_time([d1, d3.add_prefix('right_')])
1125
+ >>> otp.run(data)
1126
+ Time A right_A
1101
1127
  0 2003-12-01 00:00:00.001 1 0
1102
1128
  1 2003-12-01 00:00:00.002 2 1
1103
1129
  2 2003-12-01 00:00:00.003 3 2
1104
1130
 
1131
+ Note that timestamps from the non-leading source are not added to the output.
1132
+ You can add them manually in a different field:
1133
+
1134
+ >>> d3['D3_TIMESTAMP'] = d3['TIMESTAMP']
1135
+ >>> data = otp.join_by_time([d1, d3.add_prefix('right_')])
1136
+ >>> otp.run(data)
1137
+ Time A right_A right_D3_TIMESTAMP
1138
+ 0 2003-12-01 00:00:00.001 1 0 1969-12-31 19:00:00.000
1139
+ 1 2003-12-01 00:00:00.002 2 1 2003-12-01 00:00:00.001
1140
+ 2 2003-12-01 00:00:00.003 3 2 2003-12-01 00:00:00.002
1141
+
1105
1142
  Use parameter ``output_type_index`` to specify which input class to use to create output object.
1106
1143
  It may be useful in case some custom user class was used as input:
1107
1144
 
onetick/py/license.py CHANGED
@@ -71,6 +71,8 @@ class Custom(_LicenseBase):
71
71
  Custom license path
72
72
  """
73
73
 
74
+ # it is not useless
75
+ # pylint: disable=useless-parent-delegation
74
76
  def __init__(self, file, directory=None):
75
77
  """
76
78
  Parameters
onetick/py/math.py CHANGED
@@ -10,7 +10,7 @@ class _MaxOperator(_Operation):
10
10
  super().__init__(dtype=get_type_by_objects(objs))
11
11
 
12
12
  def _str_max(l_val, r_val):
13
- if type(r_val) is list:
13
+ if isinstance(r_val, list):
14
14
  if len(r_val) > 1:
15
15
  r_val = _str_max(r_val[0], r_val[1:])
16
16
  else:
@@ -55,7 +55,7 @@ class _MinOperator(_Operation):
55
55
  super().__init__(dtype=get_type_by_objects(objs))
56
56
 
57
57
  def _str_min(l_val, r_val):
58
- if type(r_val) is list:
58
+ if isinstance(r_val, list):
59
59
  if len(r_val) > 1:
60
60
  r_val = _str_min(r_val[0], r_val[1:])
61
61
  else:
onetick/py/otq.py CHANGED
@@ -10,7 +10,6 @@ import os
10
10
  import tempfile
11
11
  import warnings
12
12
  import onetick.py as otp
13
- from onetick.py import configuration
14
13
 
15
14
 
16
15
  class OneTickLib:
@@ -33,7 +32,7 @@ class OneTickLib:
33
32
  pass
34
33
 
35
34
 
36
- if os.getenv("OTP_SKIP_OTQ_VALIDATION", False):
35
+ if os.getenv("OTP_SKIP_OTQ_VALIDATION"):
37
36
  import onetick_stubs as otq # noqa: F401
38
37
  import pyomd # noqa: F401
39
38