onetick-py 1.172.0__py3-none-any.whl → 1.174.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- onetick/py/_version.py +1 -1
- onetick/py/aggregations/_base.py +1 -1
- onetick/py/aggregations/order_book.py +13 -7
- onetick/py/aggregations/other.py +2 -2
- onetick/py/compatibility.py +15 -10
- onetick/py/configuration.py +3 -2
- onetick/py/core/_source/source_methods/misc.py +59 -13
- onetick/py/core/_source/symbol.py +5 -2
- onetick/py/core/column_operations/_methods/conversions.py +3 -2
- onetick/py/core/column_operations/_methods/op_types.py +12 -3
- onetick/py/core/column_operations/base.py +2 -2
- onetick/py/core/source.py +9 -1
- onetick/py/functions.py +3 -2
- onetick/py/run.py +3 -1
- onetick/py/types.py +127 -59
- onetick/py/utils/render.py +271 -58
- onetick/py/utils/types.py +2 -0
- onetick/py/utils/tz.py +8 -6
- {onetick_py-1.172.0.dist-info → onetick_py-1.174.0.dist-info}/METADATA +12 -26
- {onetick_py-1.172.0.dist-info → onetick_py-1.174.0.dist-info}/RECORD +24 -24
- {onetick_py-1.172.0.dist-info → onetick_py-1.174.0.dist-info}/WHEEL +0 -0
- {onetick_py-1.172.0.dist-info → onetick_py-1.174.0.dist-info}/entry_points.txt +0 -0
- {onetick_py-1.172.0.dist-info → onetick_py-1.174.0.dist-info}/licenses/LICENSE +0 -0
- {onetick_py-1.172.0.dist-info → onetick_py-1.174.0.dist-info}/top_level.txt +0 -0
onetick/py/utils/render.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import re
|
|
3
3
|
import html
|
|
4
|
+
import textwrap
|
|
4
5
|
import graphviz as gv
|
|
5
6
|
from collections import defaultdict, deque
|
|
6
7
|
from datetime import datetime
|
|
@@ -29,6 +30,82 @@ IF_ELSE_EPS = {
|
|
|
29
30
|
}
|
|
30
31
|
|
|
31
32
|
|
|
33
|
+
def _parse_table_fields(line: str) -> list:
|
|
34
|
+
result = line.strip().split(',')
|
|
35
|
+
for idx in range(0, len(result) - 1):
|
|
36
|
+
result[idx] = result[idx] + ','
|
|
37
|
+
|
|
38
|
+
return result
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _light_function_splitter(line: str, sep=',') -> list:
|
|
42
|
+
lines = []
|
|
43
|
+
current_line: list = []
|
|
44
|
+
parentheses_stack = 0
|
|
45
|
+
quotes_stack = 0
|
|
46
|
+
lead_quote_type = None
|
|
47
|
+
|
|
48
|
+
for ch in line:
|
|
49
|
+
if ch == sep and not parentheses_stack and not quotes_stack:
|
|
50
|
+
lines.append(''.join(current_line) + sep)
|
|
51
|
+
current_line = []
|
|
52
|
+
continue
|
|
53
|
+
|
|
54
|
+
current_line.append(ch)
|
|
55
|
+
|
|
56
|
+
if ch == '(' and not quotes_stack:
|
|
57
|
+
parentheses_stack += 1
|
|
58
|
+
continue
|
|
59
|
+
|
|
60
|
+
if ch == ')' and not quotes_stack:
|
|
61
|
+
parentheses_stack -= 1
|
|
62
|
+
if parentheses_stack < 0:
|
|
63
|
+
break
|
|
64
|
+
|
|
65
|
+
if ch in ["\"", "'"]:
|
|
66
|
+
if lead_quote_type is None:
|
|
67
|
+
lead_quote_type = ch
|
|
68
|
+
quotes_stack = 1
|
|
69
|
+
elif ch == lead_quote_type:
|
|
70
|
+
lead_quote_type = None
|
|
71
|
+
quotes_stack = 0
|
|
72
|
+
|
|
73
|
+
if parentheses_stack != 0:
|
|
74
|
+
raise ValueError(f'Incorrect parentheses count in function: `{line}`')
|
|
75
|
+
|
|
76
|
+
if quotes_stack != 0:
|
|
77
|
+
raise ValueError(f'Incorrect quotes count in function: `{line}`')
|
|
78
|
+
|
|
79
|
+
lines.append(''.join(current_line))
|
|
80
|
+
|
|
81
|
+
return lines
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
EP_TO_MULTILINE_ATTRS: dict = {
|
|
85
|
+
"ADD_FIELDS": {
|
|
86
|
+
"set": _light_function_splitter,
|
|
87
|
+
},
|
|
88
|
+
"UPDATE_FIELDS": {
|
|
89
|
+
"set": _light_function_splitter,
|
|
90
|
+
},
|
|
91
|
+
"TABLE": {
|
|
92
|
+
"fields": _parse_table_fields,
|
|
93
|
+
},
|
|
94
|
+
"PASSTHROUGH": {
|
|
95
|
+
"fields": _parse_table_fields,
|
|
96
|
+
},
|
|
97
|
+
"COMPUTE": {
|
|
98
|
+
"compute": _light_function_splitter,
|
|
99
|
+
},
|
|
100
|
+
"DECLARE_STATE_VARIABLES": {
|
|
101
|
+
"variables": _light_function_splitter,
|
|
102
|
+
},
|
|
103
|
+
"RENAME_FIELDS": {
|
|
104
|
+
"rename_fields": _parse_table_fields,
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
|
|
32
109
|
@dataclass
|
|
33
110
|
class NestedQuery:
|
|
34
111
|
name: str
|
|
@@ -53,6 +130,14 @@ class NestedQuery:
|
|
|
53
130
|
return "::".join(i for i in [self.file_path, self.query] if i)
|
|
54
131
|
|
|
55
132
|
|
|
133
|
+
@dataclass
|
|
134
|
+
class Config:
|
|
135
|
+
height: int = field(default=0)
|
|
136
|
+
width: int = field(default=0)
|
|
137
|
+
render_debug_info: bool = field(default=False)
|
|
138
|
+
constraint_edges: str = field(default="true")
|
|
139
|
+
|
|
140
|
+
|
|
56
141
|
@dataclass
|
|
57
142
|
class EP:
|
|
58
143
|
name: str
|
|
@@ -341,11 +426,14 @@ def _parse_function_params(func_params: str) -> Tuple[list, dict]:
|
|
|
341
426
|
return args, kwargs
|
|
342
427
|
|
|
343
428
|
|
|
344
|
-
def _parse_function(expression: str) -> Tuple[Optional[str], list, dict]:
|
|
429
|
+
def _parse_function(expression: str, pattern: Optional[str] = None) -> Tuple[Optional[str], list, dict]:
|
|
345
430
|
# EP_NAME(PARAM_NAME=PARAM_VALUE,...)
|
|
346
431
|
# [a-zA-Z_:] is EP_NAME, can contain letters, underscore and colon
|
|
347
432
|
# [\s\S] is any symbol including newline (because . doesn't include newline by default)
|
|
348
|
-
|
|
433
|
+
if not pattern:
|
|
434
|
+
pattern = r"^([a-zA-Z_:]*)\s*\(([\s\S]*)\)\s*$"
|
|
435
|
+
|
|
436
|
+
m = re.search(pattern, expression)
|
|
349
437
|
|
|
350
438
|
if not m:
|
|
351
439
|
return None, [], {}
|
|
@@ -412,9 +500,9 @@ def _parse_ep(ep_string: str, parse_eval_from_params: bool = False) -> Union[EP,
|
|
|
412
500
|
is_query_found = True
|
|
413
501
|
|
|
414
502
|
if kwargs_key in kwargs:
|
|
415
|
-
query_path = kwargs
|
|
503
|
+
query_path = kwargs[kwargs_key][1]
|
|
416
504
|
elif 0 <= args_idx < len(args):
|
|
417
|
-
query_path = args
|
|
505
|
+
query_path = args[args_idx]
|
|
418
506
|
else:
|
|
419
507
|
# don't do anything, just process as EP
|
|
420
508
|
is_query_found = False
|
|
@@ -697,32 +785,59 @@ def read_otq(path: str, parse_eval_from_params: bool = False) -> Optional[Graph]
|
|
|
697
785
|
return graph
|
|
698
786
|
|
|
699
787
|
|
|
700
|
-
def
|
|
701
|
-
|
|
702
|
-
|
|
788
|
+
def _truncate_param_value(value, height, width):
|
|
789
|
+
lines = [
|
|
790
|
+
line if len(line) <= width or not width else line[:width] + "..."
|
|
791
|
+
for line in value.splitlines()
|
|
792
|
+
]
|
|
703
793
|
|
|
704
|
-
height
|
|
705
|
-
|
|
706
|
-
|
|
794
|
+
if height and len(lines) > height:
|
|
795
|
+
lines = lines[:height] + ["..."]
|
|
796
|
+
|
|
797
|
+
return "\n".join(lines)
|
|
798
|
+
|
|
799
|
+
|
|
800
|
+
def _split_long_value_to_lines(value, height, width, indent=0, escape=False) -> list:
|
|
801
|
+
if len(value) <= width:
|
|
802
|
+
return [value]
|
|
803
|
+
|
|
804
|
+
result = []
|
|
805
|
+
lines = value.splitlines()
|
|
806
|
+
|
|
807
|
+
# textwrap.wrap replaces newline character to whitespace and brakes multiline strings
|
|
808
|
+
# If replace_whitespace=False, it preserves newline, but not use it for result array line splitting
|
|
809
|
+
for line in lines:
|
|
810
|
+
result.extend(textwrap.wrap(line, width=width, replace_whitespace=False))
|
|
811
|
+
|
|
812
|
+
if escape:
|
|
813
|
+
result = [html.escape(s) for s in result]
|
|
814
|
+
|
|
815
|
+
if indent:
|
|
816
|
+
indent_str = " " * indent
|
|
817
|
+
for i in range(1, len(result)):
|
|
818
|
+
result[i] = indent_str + result[i]
|
|
707
819
|
|
|
820
|
+
if height and len(result) > height:
|
|
821
|
+
result = result[:height] + ['...']
|
|
822
|
+
return result
|
|
823
|
+
|
|
824
|
+
|
|
825
|
+
def transform_param_value(ep: Any, param, value, height, width):
|
|
708
826
|
if isinstance(ep, EP) and (
|
|
709
827
|
ep.name == "PER_TICK_SCRIPT" and param.lower() == "script" or
|
|
710
828
|
ep.name == "CSV_FILE_LISTING" and param.lower() == "file_contents"
|
|
711
829
|
):
|
|
712
|
-
|
|
713
|
-
line if len(line) <= width or not width else line[:width] + "..."
|
|
714
|
-
for line in value.split("\n")
|
|
715
|
-
]
|
|
716
|
-
|
|
717
|
-
if height and len(lines) > height:
|
|
718
|
-
lines = lines[:height] + ["..."]
|
|
830
|
+
return _truncate_param_value(value, height, width)
|
|
719
831
|
|
|
720
|
-
|
|
832
|
+
if not (isinstance(ep, EP) and EP_TO_MULTILINE_ATTRS.get(ep.name, {}).get(param.lower())):
|
|
833
|
+
return "\n".join(_split_long_value_to_lines(value, height, width))
|
|
721
834
|
|
|
722
835
|
return value
|
|
723
836
|
|
|
724
837
|
|
|
725
|
-
def build_symbols(
|
|
838
|
+
def build_symbols(
|
|
839
|
+
symbols, gr_nested, gr_static, graphs: GraphStorage, graph_node, config: Config, reverse=False, graph_file=None,
|
|
840
|
+
):
|
|
726
841
|
table = GVTable()
|
|
727
842
|
|
|
728
843
|
for symbol_data in symbols:
|
|
@@ -732,11 +847,17 @@ def build_symbols(symbols, gr_nested, gr_static, graphs: GraphStorage, graph_nod
|
|
|
732
847
|
if symbol.query:
|
|
733
848
|
if symbol.is_local:
|
|
734
849
|
# reversed directions here brakes everything
|
|
850
|
+
|
|
851
|
+
if graph_file is None:
|
|
852
|
+
raise ValueError('`graph_file` parameter required for this case')
|
|
853
|
+
|
|
854
|
+
nested_cluster_id = graphs.get_query_unique_id(symbol.query, graph_file)
|
|
855
|
+
|
|
735
856
|
gr_nested.edge(
|
|
736
|
-
f"
|
|
857
|
+
f"{nested_cluster_id}__footer",
|
|
737
858
|
f"{graph_node}:symbols",
|
|
738
|
-
ltail=f"
|
|
739
|
-
style="dashed",
|
|
859
|
+
ltail=f"{nested_cluster_id}",
|
|
860
|
+
style="dashed", dir="both", constraint=config.constraint_edges,
|
|
740
861
|
)
|
|
741
862
|
continue
|
|
742
863
|
|
|
@@ -747,7 +868,7 @@ def build_symbols(symbols, gr_nested, gr_static, graphs: GraphStorage, graph_nod
|
|
|
747
868
|
f"{nested_cluster_id}__footer",
|
|
748
869
|
f"{graph_node}:symbols",
|
|
749
870
|
ltail=nested_cluster_id,
|
|
750
|
-
style="dashed",
|
|
871
|
+
style="dashed", dir="both", constraint=config.constraint_edges,
|
|
751
872
|
)
|
|
752
873
|
continue
|
|
753
874
|
|
|
@@ -764,11 +885,48 @@ def build_symbols(symbols, gr_nested, gr_static, graphs: GraphStorage, graph_nod
|
|
|
764
885
|
gr_static.edge(
|
|
765
886
|
f"{graph_node}__symbols" if not reverse else f"{graph_node}:symbols",
|
|
766
887
|
f"{graph_node}:symbols" if not reverse else f"{graph_node}__symbols",
|
|
767
|
-
style="dashed", constraint=
|
|
888
|
+
style="dashed", constraint=config.constraint_edges,
|
|
768
889
|
)
|
|
769
890
|
|
|
770
891
|
|
|
771
|
-
def
|
|
892
|
+
def _parse_special_attribute(param_name, param_lines, parser, height, width, cols=4):
|
|
893
|
+
"""
|
|
894
|
+
Builds better param representation for selected parameters and EPs
|
|
895
|
+
"""
|
|
896
|
+
def generate_row_string(_line: list) -> list:
|
|
897
|
+
sep = " "
|
|
898
|
+
|
|
899
|
+
# only in this case line could be longer than width
|
|
900
|
+
if len(_line) == 1 and len(_line[0]) > width:
|
|
901
|
+
_lines = _split_long_value_to_lines(_line[0], height, width, indent=4, escape=True)
|
|
902
|
+
else:
|
|
903
|
+
_lines = [sep.join(html.escape(s) for s in _line)]
|
|
904
|
+
|
|
905
|
+
return [" " * 2 + s for s in _lines]
|
|
906
|
+
|
|
907
|
+
param_value = ' '.join(param_lines)
|
|
908
|
+
params = parser(param_value)
|
|
909
|
+
|
|
910
|
+
params_table = [f"{param_name}:"]
|
|
911
|
+
current_line = []
|
|
912
|
+
current_width = 0
|
|
913
|
+
|
|
914
|
+
for param in params:
|
|
915
|
+
if width and current_line and current_width + len(param) >= width or len(current_line) == cols:
|
|
916
|
+
params_table.extend(generate_row_string(current_line))
|
|
917
|
+
current_line = []
|
|
918
|
+
current_width = 0
|
|
919
|
+
|
|
920
|
+
current_line.append(param)
|
|
921
|
+
current_width += len(param)
|
|
922
|
+
|
|
923
|
+
if current_line:
|
|
924
|
+
params_table.extend(generate_row_string(current_line))
|
|
925
|
+
|
|
926
|
+
return [(params_table, {"ALIGN": "LEFT", "BALIGN": "LEFT"})]
|
|
927
|
+
|
|
928
|
+
|
|
929
|
+
def build_node(graphs: GraphStorage, node: Node, config: Config):
|
|
772
930
|
if node.ep is None:
|
|
773
931
|
raise ValueError(f"EP of node {node.id} could not be None")
|
|
774
932
|
|
|
@@ -786,6 +944,9 @@ def build_node(graphs: GraphStorage, node: Node, line_limit: Optional[Tuple[int,
|
|
|
786
944
|
if node.tick_type:
|
|
787
945
|
table.cell([node.tick_type])
|
|
788
946
|
|
|
947
|
+
if config.render_debug_info:
|
|
948
|
+
table.cell([node.id])
|
|
949
|
+
|
|
789
950
|
if node.symbols:
|
|
790
951
|
table.cell([("[■]", {"port": "symbols"})])
|
|
791
952
|
|
|
@@ -793,6 +954,10 @@ def build_node(graphs: GraphStorage, node: Node, line_limit: Optional[Tuple[int,
|
|
|
793
954
|
params: List[Tuple[Optional[str], Union[str, NestedQuery]]] = \
|
|
794
955
|
[(None, v) for v in node.ep.args] + list(node.ep.kwargs.values())
|
|
795
956
|
|
|
957
|
+
param_args_lines = []
|
|
958
|
+
param_kwargs_lines = []
|
|
959
|
+
special_params = []
|
|
960
|
+
|
|
796
961
|
for idx, data in enumerate(params):
|
|
797
962
|
k, v = data
|
|
798
963
|
attrs = {"port": k}
|
|
@@ -806,34 +971,53 @@ def build_node(graphs: GraphStorage, node: Node, line_limit: Optional[Tuple[int,
|
|
|
806
971
|
else:
|
|
807
972
|
param_value = v
|
|
808
973
|
|
|
809
|
-
|
|
810
|
-
param_value = html.escape(param_value).replace("\t", " " * 4)
|
|
811
|
-
param_lines = param_value.split("\n")
|
|
974
|
+
is_special_attribute = k and EP_TO_MULTILINE_ATTRS.get(node.ep.name, {}).get(k.lower())
|
|
812
975
|
|
|
813
|
-
|
|
814
|
-
if len(param_lines) == 1:
|
|
815
|
-
param_lines[0] = f"{html.escape(k)}={param_lines[0]}"
|
|
816
|
-
else:
|
|
817
|
-
param_lines = [f"{html.escape(k)}="] + param_lines
|
|
976
|
+
param_value = transform_param_value(node.ep, k, param_value, config.height, config.width)
|
|
818
977
|
|
|
819
|
-
if
|
|
820
|
-
|
|
821
|
-
# if there are multiline parameter for EP.
|
|
822
|
-
# Align change affects all parameters for EP.
|
|
823
|
-
for i in range(len(param_lines)):
|
|
824
|
-
if i > 0:
|
|
825
|
-
param_lines[i] = " " * 2 + param_lines[i]
|
|
978
|
+
if not is_special_attribute:
|
|
979
|
+
param_value = html.escape(param_value)
|
|
826
980
|
|
|
827
|
-
|
|
981
|
+
param_value = param_value.replace("\t", " " * 4)
|
|
982
|
+
param_lines = param_value.splitlines()
|
|
828
983
|
|
|
984
|
+
# additional k check required by mypy
|
|
985
|
+
if is_special_attribute and k:
|
|
986
|
+
special_params.extend(
|
|
987
|
+
_parse_special_attribute(
|
|
988
|
+
k, param_lines, EP_TO_MULTILINE_ATTRS[node.ep.name][k.lower()], config.height, config.width,
|
|
989
|
+
)
|
|
990
|
+
)
|
|
991
|
+
else:
|
|
992
|
+
if k:
|
|
993
|
+
if len(param_lines) == 1:
|
|
994
|
+
param_lines[0] = f"{html.escape(k)}={param_lines[0]}"
|
|
995
|
+
else:
|
|
996
|
+
param_lines = [f"{html.escape(k)}:"] + param_lines
|
|
997
|
+
|
|
998
|
+
if len(param_lines) > 1:
|
|
999
|
+
# Add idents disable default horizontal central align
|
|
1000
|
+
# if there are multiline parameter for EP.
|
|
1001
|
+
# Align change affects all parameters for EP.
|
|
1002
|
+
for i in range(len(param_lines)):
|
|
1003
|
+
if i > 0:
|
|
1004
|
+
param_lines[i] = " " * 2 + param_lines[i]
|
|
1005
|
+
|
|
1006
|
+
attrs.update({"ALIGN": "LEFT", "BALIGN": "LEFT"})
|
|
1007
|
+
|
|
1008
|
+
if k:
|
|
1009
|
+
param_kwargs_lines.append((param_lines, attrs))
|
|
1010
|
+
else:
|
|
1011
|
+
param_args_lines.append((param_lines, attrs))
|
|
1012
|
+
|
|
1013
|
+
for param_lines, attrs in param_args_lines + special_params + param_kwargs_lines:
|
|
829
1014
|
table.row([param_lines], attrs=attrs)
|
|
830
1015
|
|
|
831
1016
|
if node.params:
|
|
832
|
-
table.row([[
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
table.row([node.ep.to_string()])
|
|
1017
|
+
table.row([[
|
|
1018
|
+
f"{html.escape(k)}={html.escape(_truncate_param_value(v, config.height, config.width))}"
|
|
1019
|
+
for k, v in node.params.items()
|
|
1020
|
+
]])
|
|
837
1021
|
|
|
838
1022
|
if isinstance(node.ep, IfElseEP):
|
|
839
1023
|
table.row([
|
|
@@ -886,7 +1070,7 @@ def _get_nested_query(nested_query: NestedQuery, local_graph: Graph, graphs: Gra
|
|
|
886
1070
|
|
|
887
1071
|
|
|
888
1072
|
def _render_graph(
|
|
889
|
-
gr_root, gr, graphs: GraphStorage, graph_name: str, queries: set,
|
|
1073
|
+
gr_root, gr, graphs: GraphStorage, graph_name: str, queries: set, config: Config,
|
|
890
1074
|
):
|
|
891
1075
|
graph = graphs[graph_name]
|
|
892
1076
|
|
|
@@ -947,15 +1131,18 @@ def _render_graph(
|
|
|
947
1131
|
|
|
948
1132
|
gr_sub.edge(
|
|
949
1133
|
f"{footer_id}:params", f"{query_id}__params",
|
|
950
|
-
style="dashed", constraint=
|
|
1134
|
+
style="dashed", constraint=config.constraint_edges,
|
|
951
1135
|
)
|
|
952
1136
|
|
|
953
1137
|
if query.symbols:
|
|
954
|
-
build_symbols(
|
|
1138
|
+
build_symbols(
|
|
1139
|
+
query.symbols, gr, gr_sub, graphs, f"{query_id}__footer", config,
|
|
1140
|
+
reverse=True, graph_file=graph.file_path,
|
|
1141
|
+
)
|
|
955
1142
|
|
|
956
1143
|
for node_id, node in query.nodes.items():
|
|
957
1144
|
node_unique_id = _get_node_unique_id(node, query)
|
|
958
|
-
gr_sub.node(node_unique_id, build_node(graphs, node,
|
|
1145
|
+
gr_sub.node(node_unique_id, build_node(graphs, node, config), group=query_name)
|
|
959
1146
|
|
|
960
1147
|
for sink in node.sinks:
|
|
961
1148
|
if "OUT" in node.labels:
|
|
@@ -989,11 +1176,11 @@ def _render_graph(
|
|
|
989
1176
|
f"{node_unique_id}:{param_name}",
|
|
990
1177
|
_get_node_unique_id(nested_cluster.roots[0], nested_cluster),
|
|
991
1178
|
lhead=nested_cluster.get_id(),
|
|
992
|
-
style="dashed", dir="both",
|
|
1179
|
+
style="dashed", dir="both", constraint=config.constraint_edges,
|
|
993
1180
|
)
|
|
994
1181
|
|
|
995
1182
|
if node.symbols:
|
|
996
|
-
build_symbols(node.symbols, gr, gr_sub, graphs, node_unique_id)
|
|
1183
|
+
build_symbols(node.symbols, gr, gr_sub, graphs, node_unique_id, config, graph_file=graph.file_path)
|
|
997
1184
|
|
|
998
1185
|
if isinstance(node.ep, NestedQuery):
|
|
999
1186
|
nested_cluster = _get_nested_query(node.ep, graph, graphs)
|
|
@@ -1004,7 +1191,7 @@ def _render_graph(
|
|
|
1004
1191
|
node_unique_id,
|
|
1005
1192
|
_get_node_unique_id(nested_cluster.roots[0], nested_cluster),
|
|
1006
1193
|
lhead=nested_cluster.get_id(),
|
|
1007
|
-
style="dashed", dir="both",
|
|
1194
|
+
style="dashed", dir="both", constraint=config.constraint_edges,
|
|
1008
1195
|
)
|
|
1009
1196
|
|
|
1010
1197
|
|
|
@@ -1014,9 +1201,11 @@ def render_otq(
|
|
|
1014
1201
|
output_format: Optional[str] = None,
|
|
1015
1202
|
load_external_otqs: bool = True,
|
|
1016
1203
|
view: bool = False,
|
|
1017
|
-
line_limit: Optional[Tuple[int, int]] = (10,
|
|
1204
|
+
line_limit: Optional[Tuple[int, int]] = (10, 60),
|
|
1018
1205
|
parse_eval_from_params: bool = False,
|
|
1206
|
+
render_debug_info: bool = False,
|
|
1019
1207
|
debug: bool = False,
|
|
1208
|
+
graphviz_compat_mode: bool = False,
|
|
1020
1209
|
) -> str:
|
|
1021
1210
|
"""
|
|
1022
1211
|
Render queries from .otq files.
|
|
@@ -1029,7 +1218,7 @@ def render_otq(
|
|
|
1029
1218
|
image_path: str, None
|
|
1030
1219
|
Path for generated image. If omitted, image will be saved in a temp dir
|
|
1031
1220
|
output_format: str, None
|
|
1032
|
-
`Graphviz` rendering format. Default: `
|
|
1221
|
+
`Graphviz` rendering format. Default: `svg`.
|
|
1033
1222
|
If `image_path` contains one of next extensions, `output_format` will be set automatically: `png`, `svg`, `dot`.
|
|
1034
1223
|
load_external_otqs: bool
|
|
1035
1224
|
If set to `True` (default) dependencies from external .otq files (not listed in ``path`` param)
|
|
@@ -1043,8 +1232,13 @@ def render_otq(
|
|
|
1043
1232
|
If one of tuple values set to zero the corresponding limit disabled.
|
|
1044
1233
|
parse_eval_from_params: bool
|
|
1045
1234
|
Enable parsing and printing `eval` sub-queries from EP parameters.
|
|
1235
|
+
render_debug_info: bool
|
|
1236
|
+
Render additional debug information.
|
|
1046
1237
|
debug: bool
|
|
1047
1238
|
Allow to print stdout or stderr from `Graphviz` render.
|
|
1239
|
+
graphviz_compat_mode: bool
|
|
1240
|
+
Change internal parameters of result graph for better compatibility with old `Graphviz` versions.
|
|
1241
|
+
Could produce larger and less readable graphs.
|
|
1048
1242
|
|
|
1049
1243
|
Returns
|
|
1050
1244
|
-------
|
|
@@ -1069,6 +1263,19 @@ def render_otq(
|
|
|
1069
1263
|
|
|
1070
1264
|
>>> otp.utils.render_otq(["./first.otq", "./second.otq::some_query"]) # doctest: +SKIP
|
|
1071
1265
|
"""
|
|
1266
|
+
if line_limit is None:
|
|
1267
|
+
line_limit = (0, 0)
|
|
1268
|
+
|
|
1269
|
+
height, width = line_limit
|
|
1270
|
+
if height < 0 or width < 0:
|
|
1271
|
+
raise ValueError("line_limit values should not be negative")
|
|
1272
|
+
|
|
1273
|
+
config_kwargs = {}
|
|
1274
|
+
if graphviz_compat_mode:
|
|
1275
|
+
config_kwargs["constraint_edges"] = "false"
|
|
1276
|
+
|
|
1277
|
+
config = Config(height=height, width=width, render_debug_info=render_debug_info, **config_kwargs)
|
|
1278
|
+
|
|
1072
1279
|
if not isinstance(path, list):
|
|
1073
1280
|
path = [path]
|
|
1074
1281
|
|
|
@@ -1137,7 +1344,7 @@ def render_otq(
|
|
|
1137
1344
|
output_format = extension
|
|
1138
1345
|
|
|
1139
1346
|
if not output_format:
|
|
1140
|
-
output_format = "
|
|
1347
|
+
output_format = "svg"
|
|
1141
1348
|
|
|
1142
1349
|
if not image_path:
|
|
1143
1350
|
image_path = TmpFile().path
|
|
@@ -1154,8 +1361,14 @@ def render_otq(
|
|
|
1154
1361
|
with gr.subgraph(name=f"cluster__graph__{idx}", node_attr={"shape": "plaintext"}) as gr_otq:
|
|
1155
1362
|
gr_otq.attr(label=otq_path)
|
|
1156
1363
|
gr_otq.attr(margin="16")
|
|
1157
|
-
_render_graph(gr, gr_otq, graphs, otq_path, queries_to_render[otq_path],
|
|
1364
|
+
_render_graph(gr, gr_otq, graphs, otq_path, queries_to_render[otq_path], config)
|
|
1158
1365
|
|
|
1159
1366
|
idx += 1
|
|
1160
1367
|
|
|
1161
|
-
|
|
1368
|
+
try:
|
|
1369
|
+
return gr.render(view=view, quiet=not debug)
|
|
1370
|
+
except Exception as exc:
|
|
1371
|
+
raise RuntimeError(
|
|
1372
|
+
"Graphviz render failed. Try to set parameter `graphviz_compat_mode=True` "
|
|
1373
|
+
"for better compatibility if you use old Graphviz version"
|
|
1374
|
+
) from exc
|
onetick/py/utils/types.py
CHANGED
|
@@ -8,6 +8,8 @@ def get_type_that_includes(types):
|
|
|
8
8
|
if b_type1 != b_type2:
|
|
9
9
|
if {b_type1, b_type2} == {int, float}:
|
|
10
10
|
dtype = float
|
|
11
|
+
elif {b_type1, b_type2} == {ott.decimal, float} or {b_type1, b_type2} == {ott.decimal, int}:
|
|
12
|
+
dtype = ott.decimal
|
|
11
13
|
elif {b_type1, b_type2} == {ott.nsectime, ott.msectime}:
|
|
12
14
|
dtype = ott.nsectime
|
|
13
15
|
else:
|
onetick/py/utils/tz.py
CHANGED
|
@@ -3,8 +3,8 @@ import sys
|
|
|
3
3
|
import warnings
|
|
4
4
|
|
|
5
5
|
from typing import Optional
|
|
6
|
+
from contextlib import suppress
|
|
6
7
|
|
|
7
|
-
import pytz
|
|
8
8
|
import dateutil.tz
|
|
9
9
|
import tzlocal
|
|
10
10
|
|
|
@@ -15,8 +15,8 @@ from onetick.py.backports import zoneinfo
|
|
|
15
15
|
def get_tzfile_by_name(timezone):
|
|
16
16
|
if isinstance(timezone, str):
|
|
17
17
|
try:
|
|
18
|
-
timezone =
|
|
19
|
-
except
|
|
18
|
+
timezone = zoneinfo.ZoneInfo(timezone)
|
|
19
|
+
except zoneinfo.ZoneInfoNotFoundError:
|
|
20
20
|
timezone = dateutil.tz.gettz(timezone)
|
|
21
21
|
return timezone
|
|
22
22
|
|
|
@@ -35,8 +35,10 @@ def get_timezone_from_datetime(dt) -> Optional[str]:
|
|
|
35
35
|
return None
|
|
36
36
|
if tzinfo is datetime.timezone.utc:
|
|
37
37
|
return 'UTC'
|
|
38
|
-
|
|
39
|
-
|
|
38
|
+
with suppress(ModuleNotFoundError):
|
|
39
|
+
import pytz
|
|
40
|
+
if isinstance(tzinfo, pytz.BaseTzInfo):
|
|
41
|
+
return tzinfo.zone
|
|
40
42
|
if isinstance(tzinfo, zoneinfo.ZoneInfo):
|
|
41
43
|
return tzinfo.key
|
|
42
44
|
if isinstance(tzinfo, dateutil.tz.tzlocal):
|
|
@@ -48,7 +50,7 @@ def get_timezone_from_datetime(dt) -> Optional[str]:
|
|
|
48
50
|
warnings.warn(
|
|
49
51
|
"It's not recommended to use dateutil.tz timezones on Windows platform. "
|
|
50
52
|
"Function 'get_timezone_from_datetime' can't guarantee correct results in this case. "
|
|
51
|
-
"Please, use
|
|
53
|
+
"Please, use zoneinfo timezones instead."
|
|
52
54
|
)
|
|
53
55
|
if hasattr(tzinfo, '_filename'):
|
|
54
56
|
if tzinfo._filename == '/etc/localtime':
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: onetick-py
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.174.0
|
|
4
4
|
Summary: Python package that allows you to work with OneTick
|
|
5
5
|
Author-email: solutions <solutions@onetick.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -19,40 +19,26 @@ Classifier: Environment :: Console
|
|
|
19
19
|
Requires-Python: >=3.9
|
|
20
20
|
Description-Content-Type: text/markdown
|
|
21
21
|
License-File: LICENSE
|
|
22
|
-
Requires-Dist: pandas
|
|
23
|
-
Requires-Dist:
|
|
24
|
-
Requires-Dist: pandas<2.1.0,>=1.3.4; python_version == "3.8"
|
|
25
|
-
Requires-Dist: pandas<2.3.0,>=1.3.4; python_version == "3.9"
|
|
26
|
-
Requires-Dist: pandas<2.3.0,>=1.5.1; python_version == "3.10"
|
|
27
|
-
Requires-Dist: pandas<2.3.0,>=1.5.3; python_version == "3.11"
|
|
28
|
-
Requires-Dist: pandas<2.3.0,>=2.2.0; python_version >= "3.12"
|
|
29
|
-
Requires-Dist: pyarrow; python_version >= "3.12"
|
|
30
|
-
Requires-Dist: numpy==1.19.5; python_version <= "3.7"
|
|
31
|
-
Requires-Dist: numpy==1.21.6; python_version == "3.8"
|
|
32
|
-
Requires-Dist: numpy<2.3.0,>=1.20.3; python_version >= "3.9"
|
|
22
|
+
Requires-Dist: pandas>=1.5.2
|
|
23
|
+
Requires-Dist: numpy
|
|
33
24
|
Requires-Dist: coolname
|
|
34
25
|
Requires-Dist: python-dateutil
|
|
35
26
|
Requires-Dist: python-dotenv
|
|
36
27
|
Requires-Dist: tzlocal
|
|
37
28
|
Requires-Dist: tzdata
|
|
38
|
-
Requires-Dist: backports.zoneinfo==0.2.1; python_version < "3.9"
|
|
39
|
-
Requires-Dist: typing_extensions==4.7.1; python_version < "3.8"
|
|
40
|
-
Requires-Dist: singledispatchmethod==1.0; python_version < "3.8"
|
|
41
|
-
Requires-Dist: backports.cached-property==1.0.2; python_version <= "3.8"
|
|
42
|
-
Requires-Dist: backports.functools-lru-cache==1.6.6; python_version <= "3.8"
|
|
43
|
-
Requires-Dist: astunparse==1.6.3; python_version <= "3.8"
|
|
44
29
|
Requires-Dist: graphviz==0.20.1; python_version > "3.6"
|
|
45
30
|
Requires-Dist: packaging>=21.0
|
|
46
31
|
Provides-Extra: strict
|
|
47
|
-
Requires-Dist: numpy==1.
|
|
48
|
-
Requires-Dist: numpy==1.
|
|
49
|
-
Requires-Dist: numpy==1.
|
|
50
|
-
Requires-Dist: numpy==1.26.4;
|
|
51
|
-
Requires-Dist: pandas==1.
|
|
52
|
-
Requires-Dist: pandas==1.3
|
|
53
|
-
Requires-Dist: pandas==1.5.1; python_version == "3.10" and extra == "strict"
|
|
32
|
+
Requires-Dist: numpy==1.23.0; python_version == "3.9" and extra == "strict"
|
|
33
|
+
Requires-Dist: numpy==1.23.0; python_version == "3.10" and extra == "strict"
|
|
34
|
+
Requires-Dist: numpy==1.26.4; python_version == "3.11" and extra == "strict"
|
|
35
|
+
Requires-Dist: numpy==1.26.4; python_version == "3.12" and extra == "strict"
|
|
36
|
+
Requires-Dist: pandas==1.5.2; python_version == "3.9" and extra == "strict"
|
|
37
|
+
Requires-Dist: pandas==1.5.3; python_version == "3.10" and extra == "strict"
|
|
54
38
|
Requires-Dist: pandas==1.5.3; python_version == "3.11" and extra == "strict"
|
|
55
|
-
Requires-Dist: pandas==2.2.
|
|
39
|
+
Requires-Dist: pandas==2.2.1; python_version == "3.12" and extra == "strict"
|
|
40
|
+
Requires-Dist: pandas==2.3.0; python_version == "3.13" and extra == "strict"
|
|
41
|
+
Requires-Dist: pandas==2.3.3; python_version == "3.14" and extra == "strict"
|
|
56
42
|
Provides-Extra: webapi
|
|
57
43
|
Requires-Dist: onetick.query_webapi; extra == "webapi"
|
|
58
44
|
Provides-Extra: polars
|