onetick-py 1.177.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- locator_parser/__init__.py +0 -0
- locator_parser/acl.py +73 -0
- locator_parser/actions.py +262 -0
- locator_parser/common.py +368 -0
- locator_parser/io.py +43 -0
- locator_parser/locator.py +150 -0
- onetick/__init__.py +101 -0
- onetick/doc_utilities/__init__.py +3 -0
- onetick/doc_utilities/napoleon.py +40 -0
- onetick/doc_utilities/ot_doctest.py +140 -0
- onetick/doc_utilities/snippets.py +279 -0
- onetick/lib/__init__.py +4 -0
- onetick/lib/instance.py +141 -0
- onetick/py/__init__.py +293 -0
- onetick/py/_stack_info.py +89 -0
- onetick/py/_version.py +2 -0
- onetick/py/aggregations/__init__.py +11 -0
- onetick/py/aggregations/_base.py +648 -0
- onetick/py/aggregations/_docs.py +948 -0
- onetick/py/aggregations/compute.py +286 -0
- onetick/py/aggregations/functions.py +2216 -0
- onetick/py/aggregations/generic.py +104 -0
- onetick/py/aggregations/high_low.py +80 -0
- onetick/py/aggregations/num_distinct.py +83 -0
- onetick/py/aggregations/order_book.py +501 -0
- onetick/py/aggregations/other.py +1014 -0
- onetick/py/backports.py +26 -0
- onetick/py/cache.py +374 -0
- onetick/py/callback/__init__.py +5 -0
- onetick/py/callback/callback.py +276 -0
- onetick/py/callback/callbacks.py +131 -0
- onetick/py/compatibility.py +798 -0
- onetick/py/configuration.py +771 -0
- onetick/py/core/__init__.py +0 -0
- onetick/py/core/_csv_inspector.py +93 -0
- onetick/py/core/_internal/__init__.py +0 -0
- onetick/py/core/_internal/_manually_bound_value.py +6 -0
- onetick/py/core/_internal/_nodes_history.py +250 -0
- onetick/py/core/_internal/_op_utils/__init__.py +0 -0
- onetick/py/core/_internal/_op_utils/every_operand.py +9 -0
- onetick/py/core/_internal/_op_utils/is_const.py +10 -0
- onetick/py/core/_internal/_per_tick_scripts/tick_list_sort_template.script +121 -0
- onetick/py/core/_internal/_proxy_node.py +140 -0
- onetick/py/core/_internal/_state_objects.py +2312 -0
- onetick/py/core/_internal/_state_vars.py +93 -0
- onetick/py/core/_source/__init__.py +0 -0
- onetick/py/core/_source/_symbol_param.py +95 -0
- onetick/py/core/_source/schema.py +97 -0
- onetick/py/core/_source/source_methods/__init__.py +0 -0
- onetick/py/core/_source/source_methods/aggregations.py +809 -0
- onetick/py/core/_source/source_methods/applyers.py +296 -0
- onetick/py/core/_source/source_methods/columns.py +141 -0
- onetick/py/core/_source/source_methods/data_quality.py +301 -0
- onetick/py/core/_source/source_methods/debugs.py +272 -0
- onetick/py/core/_source/source_methods/drops.py +120 -0
- onetick/py/core/_source/source_methods/fields.py +619 -0
- onetick/py/core/_source/source_methods/filters.py +1002 -0
- onetick/py/core/_source/source_methods/joins.py +1413 -0
- onetick/py/core/_source/source_methods/merges.py +605 -0
- onetick/py/core/_source/source_methods/misc.py +1455 -0
- onetick/py/core/_source/source_methods/pandases.py +155 -0
- onetick/py/core/_source/source_methods/renames.py +356 -0
- onetick/py/core/_source/source_methods/sorts.py +183 -0
- onetick/py/core/_source/source_methods/switches.py +142 -0
- onetick/py/core/_source/source_methods/symbols.py +117 -0
- onetick/py/core/_source/source_methods/times.py +627 -0
- onetick/py/core/_source/source_methods/writes.py +986 -0
- onetick/py/core/_source/symbol.py +205 -0
- onetick/py/core/_source/tmp_otq.py +222 -0
- onetick/py/core/column.py +209 -0
- onetick/py/core/column_operations/__init__.py +0 -0
- onetick/py/core/column_operations/_methods/__init__.py +4 -0
- onetick/py/core/column_operations/_methods/_internal.py +28 -0
- onetick/py/core/column_operations/_methods/conversions.py +216 -0
- onetick/py/core/column_operations/_methods/methods.py +292 -0
- onetick/py/core/column_operations/_methods/op_types.py +160 -0
- onetick/py/core/column_operations/accessors/__init__.py +0 -0
- onetick/py/core/column_operations/accessors/_accessor.py +28 -0
- onetick/py/core/column_operations/accessors/decimal_accessor.py +104 -0
- onetick/py/core/column_operations/accessors/dt_accessor.py +537 -0
- onetick/py/core/column_operations/accessors/float_accessor.py +184 -0
- onetick/py/core/column_operations/accessors/str_accessor.py +1367 -0
- onetick/py/core/column_operations/base.py +1121 -0
- onetick/py/core/cut_builder.py +150 -0
- onetick/py/core/db_constants.py +20 -0
- onetick/py/core/eval_query.py +245 -0
- onetick/py/core/lambda_object.py +441 -0
- onetick/py/core/multi_output_source.py +232 -0
- onetick/py/core/per_tick_script.py +2256 -0
- onetick/py/core/query_inspector.py +464 -0
- onetick/py/core/source.py +1744 -0
- onetick/py/db/__init__.py +2 -0
- onetick/py/db/_inspection.py +1128 -0
- onetick/py/db/db.py +1327 -0
- onetick/py/db/utils.py +64 -0
- onetick/py/docs/__init__.py +0 -0
- onetick/py/docs/docstring_parser.py +112 -0
- onetick/py/docs/utils.py +81 -0
- onetick/py/functions.py +2398 -0
- onetick/py/license.py +190 -0
- onetick/py/log.py +88 -0
- onetick/py/math.py +935 -0
- onetick/py/misc.py +470 -0
- onetick/py/oqd/__init__.py +22 -0
- onetick/py/oqd/eps.py +1195 -0
- onetick/py/oqd/sources.py +325 -0
- onetick/py/otq.py +216 -0
- onetick/py/pyomd_mock.py +47 -0
- onetick/py/run.py +916 -0
- onetick/py/servers.py +173 -0
- onetick/py/session.py +1347 -0
- onetick/py/sources/__init__.py +19 -0
- onetick/py/sources/cache.py +167 -0
- onetick/py/sources/common.py +128 -0
- onetick/py/sources/csv.py +642 -0
- onetick/py/sources/custom.py +85 -0
- onetick/py/sources/data_file.py +305 -0
- onetick/py/sources/data_source.py +1045 -0
- onetick/py/sources/empty.py +94 -0
- onetick/py/sources/odbc.py +337 -0
- onetick/py/sources/order_book.py +271 -0
- onetick/py/sources/parquet.py +168 -0
- onetick/py/sources/pit.py +191 -0
- onetick/py/sources/query.py +495 -0
- onetick/py/sources/snapshots.py +419 -0
- onetick/py/sources/split_query_output_by_symbol.py +198 -0
- onetick/py/sources/symbology_mapping.py +123 -0
- onetick/py/sources/symbols.py +374 -0
- onetick/py/sources/ticks.py +825 -0
- onetick/py/sql.py +70 -0
- onetick/py/state.py +251 -0
- onetick/py/types.py +2131 -0
- onetick/py/utils/__init__.py +70 -0
- onetick/py/utils/acl.py +93 -0
- onetick/py/utils/config.py +186 -0
- onetick/py/utils/default.py +49 -0
- onetick/py/utils/file.py +38 -0
- onetick/py/utils/helpers.py +76 -0
- onetick/py/utils/locator.py +94 -0
- onetick/py/utils/perf.py +498 -0
- onetick/py/utils/query.py +49 -0
- onetick/py/utils/render.py +1374 -0
- onetick/py/utils/script.py +244 -0
- onetick/py/utils/temp.py +471 -0
- onetick/py/utils/types.py +120 -0
- onetick/py/utils/tz.py +84 -0
- onetick_py-1.177.0.dist-info/METADATA +137 -0
- onetick_py-1.177.0.dist-info/RECORD +152 -0
- onetick_py-1.177.0.dist-info/WHEEL +5 -0
- onetick_py-1.177.0.dist-info/entry_points.txt +2 -0
- onetick_py-1.177.0.dist-info/licenses/LICENSE +21 -0
- onetick_py-1.177.0.dist-info/top_level.txt +2 -0
|
File without changes
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
import numpy as np
|
|
3
|
+
|
|
4
|
+
from .. import types as ott
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def _convert_pandas_types(dtype):
|
|
8
|
+
if pd.api.types.is_integer_dtype(dtype):
|
|
9
|
+
return int
|
|
10
|
+
elif pd.api.types.is_float_dtype(dtype):
|
|
11
|
+
return float
|
|
12
|
+
elif pd.api.types.is_string_dtype(dtype):
|
|
13
|
+
return str
|
|
14
|
+
elif pd.api.types.is_bool_dtype(dtype):
|
|
15
|
+
return bool
|
|
16
|
+
return None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def inspect_by_pandas(path_to_csv, first_line_is_title=True, names=None, field_delimiter=',', quote_char='"'):
|
|
20
|
+
# read CHUNK_SIZE first lines to determine column types
|
|
21
|
+
CHUNK_SIZE = 300
|
|
22
|
+
|
|
23
|
+
header = None
|
|
24
|
+
|
|
25
|
+
if first_line_is_title:
|
|
26
|
+
header = 0 if names else "infer"
|
|
27
|
+
|
|
28
|
+
with pd.read_csv(
|
|
29
|
+
path_to_csv,
|
|
30
|
+
engine="python",
|
|
31
|
+
iterator=True,
|
|
32
|
+
header=header,
|
|
33
|
+
names=names,
|
|
34
|
+
sep=field_delimiter,
|
|
35
|
+
escapechar='\\',
|
|
36
|
+
quotechar=quote_char,
|
|
37
|
+
) as reader:
|
|
38
|
+
df = reader.read(CHUNK_SIZE)
|
|
39
|
+
prefix = None if first_line_is_title or names else "COLUMN_"
|
|
40
|
+
if prefix:
|
|
41
|
+
df.columns = [f'{prefix}{col}' for col in df.columns]
|
|
42
|
+
|
|
43
|
+
if not first_line_is_title:
|
|
44
|
+
first_column = "COLUMN_0"
|
|
45
|
+
if names:
|
|
46
|
+
first_column = names[0]
|
|
47
|
+
if len(df) > 0 and len(df.columns) > 0 and df.dtypes[first_column] == np.dtype("O"):
|
|
48
|
+
if df[first_column][0].startswith("#"):
|
|
49
|
+
raise ValueError(
|
|
50
|
+
"If first line of CSV starts with #, you must set first_line_is_title=True, "
|
|
51
|
+
"because OneTick will forcefully use first line as header.")
|
|
52
|
+
|
|
53
|
+
# CSV_FILE_LISTING will ignore FIRST_LINE_IS_TITLE, if first line starts with hash sign #
|
|
54
|
+
forced_title = df.columns.values[0][0] == '#'
|
|
55
|
+
if forced_title:
|
|
56
|
+
# remove hash sign from first column name
|
|
57
|
+
df.rename(columns={df.columns.values[0]: df.columns.values[0][1:]}, inplace=True)
|
|
58
|
+
|
|
59
|
+
# check for default types in OneTick format ("columnname type")
|
|
60
|
+
default_types = {}
|
|
61
|
+
rename = {}
|
|
62
|
+
|
|
63
|
+
for column in df.columns.values:
|
|
64
|
+
c = column.split()
|
|
65
|
+
|
|
66
|
+
if column.startswith('Unnamed: '):
|
|
67
|
+
# OneTick doesn't allow to have empty column name, so we explicitly set it here
|
|
68
|
+
# and in CSV_FILE_LISTING EP later in CSV.base_ep()
|
|
69
|
+
rename[column] = "COLUMN_" + column[9:]
|
|
70
|
+
|
|
71
|
+
elif len(c) == 2:
|
|
72
|
+
# format: "type COLUMNNAME"
|
|
73
|
+
# http://solutions.pages.soltest.onetick.com/iac/onetick-server/ep_guide/EP/FieldTypeDeclarations.htm#supported_field_types
|
|
74
|
+
dtype = ott.str2type(c[0])
|
|
75
|
+
if dtype is not None:
|
|
76
|
+
default_types[c[1]] = dtype
|
|
77
|
+
rename[column] = c[1]
|
|
78
|
+
|
|
79
|
+
if rename:
|
|
80
|
+
df.rename(columns=rename, inplace=True)
|
|
81
|
+
|
|
82
|
+
# convert pandas types to otp
|
|
83
|
+
columns = dict(map(lambda x: (x[0], _convert_pandas_types(x[1])), dict(df.dtypes).items()))
|
|
84
|
+
|
|
85
|
+
# explicitly set types for columns having format "type COLUMNNAME"
|
|
86
|
+
for column_name, dtype in default_types.items():
|
|
87
|
+
columns[column_name] = dtype
|
|
88
|
+
|
|
89
|
+
# reset default_types if # is not in first line
|
|
90
|
+
if not forced_title:
|
|
91
|
+
default_types = {}
|
|
92
|
+
|
|
93
|
+
return columns, default_types, forced_title
|
|
File without changes
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import List
|
|
3
|
+
from copy import deepcopy
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class _NodeRule(ABC):
|
|
7
|
+
key_params: List[str] = []
|
|
8
|
+
|
|
9
|
+
@abstractmethod
|
|
10
|
+
def build(self, eps, print_out=False):
|
|
11
|
+
raise NotImplementedError
|
|
12
|
+
|
|
13
|
+
def __repr__(self):
|
|
14
|
+
return f'{self.__class__.__name__}({id(self)})'
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class _Assign(_NodeRule):
|
|
18
|
+
key_params = ['key']
|
|
19
|
+
|
|
20
|
+
def __init__(self, ep, key):
|
|
21
|
+
self.ep = ep
|
|
22
|
+
self.key = key
|
|
23
|
+
|
|
24
|
+
def build(self, eps, print_out=False):
|
|
25
|
+
if self.key not in eps:
|
|
26
|
+
eps[self.key] = self.ep.copy()
|
|
27
|
+
|
|
28
|
+
if print_out:
|
|
29
|
+
print(f'[API] Create "{str(eps[self.key]).strip()}" (id={id(eps[self.key])})')
|
|
30
|
+
|
|
31
|
+
return eps[self.key]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class _TickType(_NodeRule):
|
|
35
|
+
key_params = ['key']
|
|
36
|
+
|
|
37
|
+
def __init__(self, ep, key, tt):
|
|
38
|
+
self.ep = ep
|
|
39
|
+
self.key = key
|
|
40
|
+
self.tt = tt
|
|
41
|
+
|
|
42
|
+
def build(self, eps, print_out=False):
|
|
43
|
+
if self.key not in eps:
|
|
44
|
+
eps[self.key] = self.ep.copy()
|
|
45
|
+
|
|
46
|
+
if print_out:
|
|
47
|
+
print(f'[API] Set tick type "{self.tt}" for "{str(eps[self.key]).strip()}" (id={id(eps[self.key])})')
|
|
48
|
+
|
|
49
|
+
eps[self.key].tick_type(self.tt)
|
|
50
|
+
return eps[self.key]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class _Symbol(_NodeRule):
|
|
54
|
+
key_params = ['key']
|
|
55
|
+
|
|
56
|
+
def __init__(self, ep, key, symbol):
|
|
57
|
+
self.ep = ep
|
|
58
|
+
self.key = key
|
|
59
|
+
self.symbol = symbol
|
|
60
|
+
|
|
61
|
+
def build(self, eps, print_out=False):
|
|
62
|
+
if self.key not in eps:
|
|
63
|
+
eps[self.key] = self.ep.copy()
|
|
64
|
+
|
|
65
|
+
if print_out:
|
|
66
|
+
print(f'[API] Set symbol "{self.symbol}" for "{str(eps[self.key]).strip()}" (id={id(eps[self.key])})')
|
|
67
|
+
|
|
68
|
+
eps[self.key].set_symbol(self.symbol)
|
|
69
|
+
return eps[self.key]
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class _NodeName(_NodeRule):
|
|
73
|
+
key_params = ['key']
|
|
74
|
+
|
|
75
|
+
def __init__(self, key, name):
|
|
76
|
+
self.key = key
|
|
77
|
+
self.name = name
|
|
78
|
+
|
|
79
|
+
def build(self, eps, print_out=False):
|
|
80
|
+
eps[self.key].node_name(self.name)
|
|
81
|
+
|
|
82
|
+
if print_out:
|
|
83
|
+
print(f'[API] Set name "{self.name}" for "{str(eps[self.key]).strip()}" (id={id(eps[self.key])})')
|
|
84
|
+
|
|
85
|
+
return eps[self.key]
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class _Sink(_NodeRule):
|
|
89
|
+
key_params = ['p_key', 'key']
|
|
90
|
+
|
|
91
|
+
def __init__(self, p_ep, p_key, p_out_pin, ep, key, in_pin):
|
|
92
|
+
self.p_ep = p_ep
|
|
93
|
+
self.p_key = p_key
|
|
94
|
+
self.p_out_pin = p_out_pin
|
|
95
|
+
self.ep = ep
|
|
96
|
+
self.key = key
|
|
97
|
+
self.in_pin = in_pin
|
|
98
|
+
|
|
99
|
+
def build(self, eps, print_out=False):
|
|
100
|
+
if self.p_key not in eps:
|
|
101
|
+
eps[self.p_key] = self.p_ep.copy()
|
|
102
|
+
if self.key not in eps:
|
|
103
|
+
eps[self.key] = self.ep.copy()
|
|
104
|
+
|
|
105
|
+
t_p_ep = eps[self.p_key]
|
|
106
|
+
t_ep = eps[self.key]
|
|
107
|
+
|
|
108
|
+
if self.p_out_pin:
|
|
109
|
+
t_p_ep = eps[self.p_key][self.p_out_pin]
|
|
110
|
+
if self.in_pin:
|
|
111
|
+
t_ep = eps[self.key](self.in_pin)
|
|
112
|
+
|
|
113
|
+
if print_out:
|
|
114
|
+
print(f'[API] Connect "{str(t_p_ep).strip()}"[out_pin={self.p_out_pin}] -> '
|
|
115
|
+
f'"{str(t_ep).strip()}"[self.in_pin={self.in_pin}] (id={id(t_p_ep)} and id={id(t_ep)})')
|
|
116
|
+
|
|
117
|
+
t_p_ep.sink(t_ep)
|
|
118
|
+
return t_ep
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class _Source(_NodeRule):
|
|
122
|
+
key_params = ['p_key', 'key']
|
|
123
|
+
|
|
124
|
+
def __init__(self, p_ep, p_key, p_in_pin, ep, key, out_pin):
|
|
125
|
+
self.p_ep = p_ep
|
|
126
|
+
self.p_key = p_key
|
|
127
|
+
self.p_in_pin = p_in_pin
|
|
128
|
+
self.ep = ep
|
|
129
|
+
self.key = key
|
|
130
|
+
self.out_pin = out_pin
|
|
131
|
+
|
|
132
|
+
def build(self, eps, print_out=False):
|
|
133
|
+
if self.p_key not in eps:
|
|
134
|
+
eps[self.p_key] = self.p_ep.copy()
|
|
135
|
+
if self.key not in eps:
|
|
136
|
+
eps[self.key] = self.ep.copy()
|
|
137
|
+
|
|
138
|
+
t_p_ep = eps[self.p_key]
|
|
139
|
+
t_ep = eps[self.key]
|
|
140
|
+
|
|
141
|
+
if self.p_in_pin:
|
|
142
|
+
t_p_ep = eps[self.p_key](self.p_in_pin)
|
|
143
|
+
if self.out_pin:
|
|
144
|
+
t_ep = eps[self.key][self.out_pin]
|
|
145
|
+
|
|
146
|
+
if print_out:
|
|
147
|
+
print(f'[API] Connect "{str(t_ep).strip()}"[out_pin={self.out_pin}] -> '
|
|
148
|
+
f'"{str(t_p_ep).strip()}"[in_pin={self.p_in_pin}] (id={id(t_ep)} and id={id(t_p_ep)})')
|
|
149
|
+
|
|
150
|
+
t_p_ep.source(t_ep)
|
|
151
|
+
return t_p_ep
|
|
152
|
+
|
|
153
|
+
|
|
154
|
+
class _SourceByKey(_NodeRule):
|
|
155
|
+
key_params = ['p_key', 'key']
|
|
156
|
+
|
|
157
|
+
def __init__(self, p_key, ep, key, out_pin):
|
|
158
|
+
self.p_key = p_key
|
|
159
|
+
self.ep = ep
|
|
160
|
+
self.key = key
|
|
161
|
+
self.out_pin = out_pin
|
|
162
|
+
|
|
163
|
+
def build(self, eps, print_out=False):
|
|
164
|
+
if self.key not in eps:
|
|
165
|
+
eps[self.key] = self.ep.copy()
|
|
166
|
+
|
|
167
|
+
t_p_ep = eps[self.p_key]
|
|
168
|
+
t_ep = eps[self.key]
|
|
169
|
+
|
|
170
|
+
if self.out_pin:
|
|
171
|
+
t_ep = eps[self.key][self.out_pin]
|
|
172
|
+
|
|
173
|
+
if print_out:
|
|
174
|
+
print(f'[API] Connect "{str(t_ep).strip()}" -> "{str(t_p_ep).strip()}" (id={id(t_ep)} and id={id(t_p_ep)})')
|
|
175
|
+
|
|
176
|
+
t_p_ep.source(t_ep)
|
|
177
|
+
return t_p_ep
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class _NodesHistory:
|
|
181
|
+
def __init__(self):
|
|
182
|
+
self._rules: List[_NodeRule] = []
|
|
183
|
+
|
|
184
|
+
def assign(self, ep, key):
|
|
185
|
+
self._rules.append(_Assign(ep, key))
|
|
186
|
+
|
|
187
|
+
def tick_type(self, ep, key, tt):
|
|
188
|
+
self._rules.append(_TickType(ep, key, tt))
|
|
189
|
+
|
|
190
|
+
def symbol(self, ep, key, symbol):
|
|
191
|
+
self._rules.append(_Symbol(ep, key, symbol))
|
|
192
|
+
|
|
193
|
+
def node_name(self, key, name):
|
|
194
|
+
self._rules.append(_NodeName(key, name))
|
|
195
|
+
|
|
196
|
+
def sink(self, p_ep, p_key, p_out_pin, ep, key, in_pin):
|
|
197
|
+
self._rules.append(_Sink(p_ep, p_key, p_out_pin, ep, key, in_pin))
|
|
198
|
+
|
|
199
|
+
def source(self, p_ep, p_key, p_in_pin, ep, key, out_pin):
|
|
200
|
+
self._rules.append(_Source(p_ep, p_key, p_in_pin, ep, key, out_pin))
|
|
201
|
+
|
|
202
|
+
def source_by_key(self, p_key, ep, key, out_pin):
|
|
203
|
+
self._rules.append(_SourceByKey(p_key, ep, key, out_pin))
|
|
204
|
+
|
|
205
|
+
def rebuild(self, keys: dict):
|
|
206
|
+
"""Rebuild history, change uuid for each node"""
|
|
207
|
+
processed = set()
|
|
208
|
+
for rule in self._rules:
|
|
209
|
+
if id(rule) in processed:
|
|
210
|
+
# this can happen when we are merging the branch of the source with this source.
|
|
211
|
+
# In this case some rules in the source and the branch will be the same.
|
|
212
|
+
# (e.g. in dump() function)
|
|
213
|
+
continue
|
|
214
|
+
processed.add(id(rule))
|
|
215
|
+
for key_param in rule.key_params:
|
|
216
|
+
key = getattr(rule, key_param)
|
|
217
|
+
setattr(rule, key_param, keys[key])
|
|
218
|
+
|
|
219
|
+
def build(self, eps, root_key, print_out=False):
|
|
220
|
+
if print_out:
|
|
221
|
+
print("")
|
|
222
|
+
print("[[API OUTPUT STARTS]]")
|
|
223
|
+
|
|
224
|
+
if print_out:
|
|
225
|
+
print('number of rules:', len(self._rules))
|
|
226
|
+
for rule in self._rules:
|
|
227
|
+
rule.build(eps, print_out=print_out)
|
|
228
|
+
|
|
229
|
+
if print_out:
|
|
230
|
+
print("[[API OUTPUT ENDS]]")
|
|
231
|
+
|
|
232
|
+
res = eps[root_key]
|
|
233
|
+
|
|
234
|
+
assert res is not None
|
|
235
|
+
|
|
236
|
+
return res
|
|
237
|
+
|
|
238
|
+
def copy(self, deep=False):
|
|
239
|
+
return list(self._rules) if not deep else deepcopy(self._rules)
|
|
240
|
+
|
|
241
|
+
def add(self, other_rules):
|
|
242
|
+
processed = set(id(rule) for rule in self._rules)
|
|
243
|
+
for rule in other_rules:
|
|
244
|
+
if id(rule) in processed:
|
|
245
|
+
# this can happen when we are merging the branch of the source with this source.
|
|
246
|
+
# In this case some rules in the source and the branch will be the same.
|
|
247
|
+
# (e.g. in dump() function)
|
|
248
|
+
continue
|
|
249
|
+
processed.add(id(rule))
|
|
250
|
+
self._rules.append(rule)
|
|
File without changes
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from onetick.py.core._internal._op_utils.every_operand import every_operand
|
|
2
|
+
from onetick.py.core._source._symbol_param import _SymbolParamColumn
|
|
3
|
+
from onetick.py.core.column import _Column
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def is_const(operation):
|
|
7
|
+
for op in every_operand(operation):
|
|
8
|
+
if isinstance(op, _Column) and not isinstance(op, _SymbolParamColumn):
|
|
9
|
+
return False
|
|
10
|
+
return True
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
/* this file is a python template string that, upon substitution, produces a per tick script */
|
|
2
|
+
|
|
3
|
+
long main()
|
|
4
|
+
{
|
|
5
|
+
/* merge sort implementation for tick lists */
|
|
6
|
+
/*
|
|
7
|
+
We iterate over powers of 2: N=1, 2, 4, 8, etc.
|
|
8
|
+
For each N we do the following:
|
|
9
|
+
- take a new empty list
|
|
10
|
+
- iteratively take two consecutive sequences of size N from the initial list
|
|
11
|
+
and merge them (with ordering) into the new list
|
|
12
|
+
- sequences taken at step i will have items:
|
|
13
|
+
- for sequence 1, [2N(i-1), 2N(i-1) + N)
|
|
14
|
+
- for sequence 2, [2N(i-1) + N, 2Ni)
|
|
15
|
+
- note that both sequences may turn out incomplete or even empty, that's fine
|
|
16
|
+
- do this until the whole input list was split into those consecutive sequences and merged into the new list
|
|
17
|
+
- copy new list into the old one and clean the new list
|
|
18
|
+
We do this until N is greater than the size of the initial list, at which point we will have a sorted list
|
|
19
|
+
*/
|
|
20
|
+
static TICK_LIST LOCAL::TARGET_TICKS;
|
|
21
|
+
long LOCAL::I = 0;
|
|
22
|
+
/* 2^K is the length of the sub-lists that we're going to merge on each iteration */
|
|
23
|
+
long LOCAL::K = 0;
|
|
24
|
+
long LOCAL::N = 0;
|
|
25
|
+
static TICK_LIST_TICK LOCAL::SUB_LIST_1;
|
|
26
|
+
static TICK_LIST_TICK LOCAL::SUB_LIST_2;
|
|
27
|
+
long LOCAL::SUB_LIST_1_CNT = 0;
|
|
28
|
+
long LOCAL::SUB_LIST_2_CNT = 0;
|
|
29
|
+
long LOCAL::SUB_LIST_1_ENDED = 0;
|
|
30
|
+
long LOCAL::SUB_LIST_2_ENDED = 0;
|
|
31
|
+
/*
|
|
32
|
+
due to implementation, it won't hurt to do an extra iteration just to be sure that log calculation rounding
|
|
33
|
+
did not lead to any funny things
|
|
34
|
+
*/
|
|
35
|
+
long LOCAL::K_MAX = LOG(${tick_list_var}.GET_SIZE())/LOG(2) + 1;
|
|
36
|
+
for ( LOCAL::K=0; LOCAL::K <= LOCAL::K_MAX; LOCAL::K+=1 )
|
|
37
|
+
{
|
|
38
|
+
/* this is the main loop, here we select sub-lists and merge them */
|
|
39
|
+
LOCAL::N = POWER(2,LOCAL::K);
|
|
40
|
+
/* first, we set first iterator to the beginning */
|
|
41
|
+
${tick_list_var}.ITERATOR(LOCAL::SUB_LIST_1);
|
|
42
|
+
/* then we continue setting the second iterator to the required amount of ticks past that */
|
|
43
|
+
while(true)
|
|
44
|
+
{
|
|
45
|
+
LOCAL::SUB_LIST_1.COPY(LOCAL::SUB_LIST_2);
|
|
46
|
+
for (LOCAL::I=0; LOCAL::I < LOCAL::N; LOCAL::I+=1)
|
|
47
|
+
{
|
|
48
|
+
LOCAL::SUB_LIST_2.NEXT();
|
|
49
|
+
}
|
|
50
|
+
/*
|
|
51
|
+
now, we start from SUB_LIST_1 and SUB_LIST_2 and iterate over both
|
|
52
|
+
and merge them with proper order into the LOCAL::TARGET_TICKS list
|
|
53
|
+
while doing this, we take care to check if any of those lists already ended
|
|
54
|
+
*/
|
|
55
|
+
LOCAL::SUB_LIST_1_CNT = 0;
|
|
56
|
+
LOCAL::SUB_LIST_2_CNT = 0;
|
|
57
|
+
LOCAL::SUB_LIST_1_ENDED = 0;
|
|
58
|
+
LOCAL::SUB_LIST_2_ENDED = 0;
|
|
59
|
+
while(true)
|
|
60
|
+
{
|
|
61
|
+
/* first, setting flags for end of sub-lists */
|
|
62
|
+
if ((LOCAL::SUB_LIST_1_CNT >= LOCAL::N) or (LOCAL.SUB_LIST_1.IS_END() == 1))
|
|
63
|
+
{
|
|
64
|
+
LOCAL::SUB_LIST_1_ENDED = 1;
|
|
65
|
+
}
|
|
66
|
+
if ((LOCAL::SUB_LIST_2_CNT >= LOCAL::N) or (LOCAL.SUB_LIST_2.IS_END() == 1))
|
|
67
|
+
{
|
|
68
|
+
LOCAL::SUB_LIST_2_ENDED = 1;
|
|
69
|
+
}
|
|
70
|
+
/* now taking next element, or breaking, depending on the flags */
|
|
71
|
+
if ((LOCAL::SUB_LIST_1_ENDED == 0) and (LOCAL::SUB_LIST_2_ENDED == 0))
|
|
72
|
+
{
|
|
73
|
+
/* <= is important for the stability of the sort */
|
|
74
|
+
if (LOCAL::SUB_LIST_1.${field_access_function}("${field_name}") <= LOCAL::SUB_LIST_2.${field_access_function}("${field_name}"))
|
|
75
|
+
{
|
|
76
|
+
LOCAL::TARGET_TICKS.PUSH_BACK(LOCAL::SUB_LIST_1);
|
|
77
|
+
LOCAL::SUB_LIST_1_CNT += 1;
|
|
78
|
+
LOCAL::SUB_LIST_1.NEXT();
|
|
79
|
+
}
|
|
80
|
+
else
|
|
81
|
+
{
|
|
82
|
+
LOCAL::TARGET_TICKS.PUSH_BACK(LOCAL::SUB_LIST_2);
|
|
83
|
+
LOCAL::SUB_LIST_2_CNT += 1;
|
|
84
|
+
LOCAL::SUB_LIST_2.NEXT();
|
|
85
|
+
}
|
|
86
|
+
continue;
|
|
87
|
+
}
|
|
88
|
+
if (LOCAL::SUB_LIST_1_ENDED == 0)
|
|
89
|
+
{
|
|
90
|
+
LOCAL::TARGET_TICKS.PUSH_BACK(LOCAL::SUB_LIST_1);
|
|
91
|
+
LOCAL::SUB_LIST_1_CNT += 1;
|
|
92
|
+
LOCAL::SUB_LIST_1.NEXT();
|
|
93
|
+
continue;
|
|
94
|
+
}
|
|
95
|
+
if (LOCAL::SUB_LIST_2_ENDED == 0)
|
|
96
|
+
{
|
|
97
|
+
LOCAL::TARGET_TICKS.PUSH_BACK(LOCAL::SUB_LIST_2);
|
|
98
|
+
LOCAL::SUB_LIST_2_CNT += 1;
|
|
99
|
+
LOCAL::SUB_LIST_2.NEXT();
|
|
100
|
+
continue;
|
|
101
|
+
}
|
|
102
|
+
/* we fell into none of the continues above, this means that both sub-lists ended */
|
|
103
|
+
break;
|
|
104
|
+
}
|
|
105
|
+
/* for the next iteration, we start new sub-list 1 from where old sub-list 2 ended */
|
|
106
|
+
LOCAL::SUB_LIST_2.COPY(LOCAL::SUB_LIST_1);
|
|
107
|
+
if (LOCAL::SUB_LIST_1.IS_END() == 1)
|
|
108
|
+
{
|
|
109
|
+
break;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
/* copying newer tick list into the original one */
|
|
113
|
+
/* implementation can be optimized to avoid this step but well */
|
|
114
|
+
${tick_list_var}.CLEAR();
|
|
115
|
+
for (TICK_LIST_TICK LOCAL::T : LOCAL::TARGET_TICKS)
|
|
116
|
+
{
|
|
117
|
+
${tick_list_var}.PUSH_BACK(LOCAL::T);
|
|
118
|
+
}
|
|
119
|
+
LOCAL::TARGET_TICKS.CLEAR();
|
|
120
|
+
}
|
|
121
|
+
}
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
|
|
3
|
+
from onetick.py.core._internal._nodes_history import _NodesHistory
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class _ProxyNode:
|
|
7
|
+
"""
|
|
8
|
+
This class wraps node in _Source with goal to track changes
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
def __init__(self, ep, key, _, out_pin, refresh_func=None):
|
|
12
|
+
self._ep = ep
|
|
13
|
+
self._key = key
|
|
14
|
+
self._out_pin = out_pin
|
|
15
|
+
self._name = ""
|
|
16
|
+
self._refresh_func = refresh_func
|
|
17
|
+
|
|
18
|
+
self._hist = _NodesHistory()
|
|
19
|
+
self._hist.assign(ep, key)
|
|
20
|
+
|
|
21
|
+
def sink(self, p_out_pin, ep, key, in_pin, _, move_node=True):
|
|
22
|
+
"""
|
|
23
|
+
Connect self._ep[p_out_pin|self._out_pin] -> ep[in_pin]
|
|
24
|
+
"""
|
|
25
|
+
self._refresh_func()
|
|
26
|
+
|
|
27
|
+
if not p_out_pin:
|
|
28
|
+
p_out_pin = self._out_pin
|
|
29
|
+
|
|
30
|
+
self._hist.sink(self._ep, self._key, p_out_pin, ep, key, in_pin)
|
|
31
|
+
|
|
32
|
+
t_p_ep = self._ep
|
|
33
|
+
if p_out_pin:
|
|
34
|
+
t_p_ep = t_p_ep[p_out_pin]
|
|
35
|
+
|
|
36
|
+
t_ep = ep
|
|
37
|
+
if in_pin:
|
|
38
|
+
t_ep = ep
|
|
39
|
+
|
|
40
|
+
if move_node:
|
|
41
|
+
t_p_ep.sink(t_ep)
|
|
42
|
+
self._ep = ep
|
|
43
|
+
self._key = key
|
|
44
|
+
# it is not the first, then no need to store this pin
|
|
45
|
+
self._out_pin = None
|
|
46
|
+
self._name = ""
|
|
47
|
+
|
|
48
|
+
return self._ep
|
|
49
|
+
|
|
50
|
+
def source(self, p_in_pin, ep, key, _, out_pin):
|
|
51
|
+
"""
|
|
52
|
+
Connect self._ep[p_in_pin] <- ep[out_pin]
|
|
53
|
+
"""
|
|
54
|
+
self._refresh_func()
|
|
55
|
+
|
|
56
|
+
self._hist.source(self._ep, self._key, p_in_pin, ep, key, out_pin)
|
|
57
|
+
|
|
58
|
+
t_p_ep = self._ep
|
|
59
|
+
if p_in_pin:
|
|
60
|
+
t_p_ep = t_p_ep(p_in_pin)
|
|
61
|
+
|
|
62
|
+
t_ep = ep
|
|
63
|
+
if out_pin:
|
|
64
|
+
t_ep = t_ep[out_pin]
|
|
65
|
+
|
|
66
|
+
t_p_ep.source(t_ep)
|
|
67
|
+
|
|
68
|
+
return self._ep
|
|
69
|
+
|
|
70
|
+
def source_by_key(self, to_key, ep, key, _, out_pin):
|
|
71
|
+
"""
|
|
72
|
+
Connect node with key=to_key <- ep[out_pin]
|
|
73
|
+
"""
|
|
74
|
+
self._refresh_func()
|
|
75
|
+
|
|
76
|
+
self._hist.source_by_key(to_key, ep, key, out_pin)
|
|
77
|
+
|
|
78
|
+
return self._ep
|
|
79
|
+
|
|
80
|
+
def node_name(self, name=None, key=None):
|
|
81
|
+
if name is not None:
|
|
82
|
+
if key:
|
|
83
|
+
# set node_name by key
|
|
84
|
+
self._hist.node_name(key, name)
|
|
85
|
+
else:
|
|
86
|
+
self._hist.node_name(self._key, name)
|
|
87
|
+
self._name = name
|
|
88
|
+
return None
|
|
89
|
+
else:
|
|
90
|
+
return self._name
|
|
91
|
+
|
|
92
|
+
def tick_type(self, tt):
|
|
93
|
+
self._refresh_func()
|
|
94
|
+
self._hist.tick_type(self._ep, self._key, tt)
|
|
95
|
+
self._ep.tick_type(tt)
|
|
96
|
+
|
|
97
|
+
def symbol(self, symbol):
|
|
98
|
+
self._refresh_func()
|
|
99
|
+
self._hist.symbol(self._ep, self._key, symbol)
|
|
100
|
+
self._ep.set_symbol(symbol)
|
|
101
|
+
|
|
102
|
+
def get(self):
|
|
103
|
+
return self._ep
|
|
104
|
+
|
|
105
|
+
def key(self, _key=None):
|
|
106
|
+
if _key:
|
|
107
|
+
self._key = _key
|
|
108
|
+
|
|
109
|
+
return self._key
|
|
110
|
+
|
|
111
|
+
def out_pin(self, _out_pin=None):
|
|
112
|
+
if _out_pin:
|
|
113
|
+
self._out_pin = _out_pin
|
|
114
|
+
|
|
115
|
+
return self._out_pin
|
|
116
|
+
|
|
117
|
+
def copy_graph(self, eps=None, print_out=False):
|
|
118
|
+
if eps is None:
|
|
119
|
+
eps = defaultdict()
|
|
120
|
+
|
|
121
|
+
return self._hist.build(eps, self._key, print_out=print_out), self._key, None, self._out_pin
|
|
122
|
+
|
|
123
|
+
def rebuild_graph(self, keys: dict):
|
|
124
|
+
"""
|
|
125
|
+
Changing all uuids in this node and it's history.
|
|
126
|
+
Need it for making a deep copy of node.
|
|
127
|
+
|
|
128
|
+
Parameters
|
|
129
|
+
----------
|
|
130
|
+
keys: dict
|
|
131
|
+
Mapping from old key to new key
|
|
132
|
+
"""
|
|
133
|
+
self._key = keys[self._key]
|
|
134
|
+
self._hist.rebuild(keys)
|
|
135
|
+
|
|
136
|
+
def copy_rules(self, deep=False):
|
|
137
|
+
return self._hist.copy(deep=deep)
|
|
138
|
+
|
|
139
|
+
def add_rules(self, rules):
|
|
140
|
+
self._hist.add(rules)
|