onetick-py 1.162.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- locator_parser/__init__.py +0 -0
- locator_parser/acl.py +73 -0
- locator_parser/actions.py +266 -0
- locator_parser/common.py +365 -0
- locator_parser/io.py +41 -0
- locator_parser/locator.py +150 -0
- onetick/__init__.py +101 -0
- onetick/doc_utilities/__init__.py +3 -0
- onetick/doc_utilities/napoleon.py +40 -0
- onetick/doc_utilities/ot_doctest.py +140 -0
- onetick/doc_utilities/snippets.py +280 -0
- onetick/lib/__init__.py +4 -0
- onetick/lib/instance.py +138 -0
- onetick/py/__init__.py +290 -0
- onetick/py/_stack_info.py +89 -0
- onetick/py/_version.py +2 -0
- onetick/py/aggregations/__init__.py +11 -0
- onetick/py/aggregations/_base.py +645 -0
- onetick/py/aggregations/_docs.py +912 -0
- onetick/py/aggregations/compute.py +286 -0
- onetick/py/aggregations/functions.py +2216 -0
- onetick/py/aggregations/generic.py +104 -0
- onetick/py/aggregations/high_low.py +80 -0
- onetick/py/aggregations/num_distinct.py +83 -0
- onetick/py/aggregations/order_book.py +427 -0
- onetick/py/aggregations/other.py +1014 -0
- onetick/py/backports.py +26 -0
- onetick/py/cache.py +373 -0
- onetick/py/callback/__init__.py +5 -0
- onetick/py/callback/callback.py +275 -0
- onetick/py/callback/callbacks.py +131 -0
- onetick/py/compatibility.py +752 -0
- onetick/py/configuration.py +736 -0
- onetick/py/core/__init__.py +0 -0
- onetick/py/core/_csv_inspector.py +93 -0
- onetick/py/core/_internal/__init__.py +0 -0
- onetick/py/core/_internal/_manually_bound_value.py +6 -0
- onetick/py/core/_internal/_nodes_history.py +250 -0
- onetick/py/core/_internal/_op_utils/__init__.py +0 -0
- onetick/py/core/_internal/_op_utils/every_operand.py +9 -0
- onetick/py/core/_internal/_op_utils/is_const.py +10 -0
- onetick/py/core/_internal/_per_tick_scripts/tick_list_sort_template.script +121 -0
- onetick/py/core/_internal/_proxy_node.py +140 -0
- onetick/py/core/_internal/_state_objects.py +2307 -0
- onetick/py/core/_internal/_state_vars.py +87 -0
- onetick/py/core/_source/__init__.py +0 -0
- onetick/py/core/_source/_symbol_param.py +95 -0
- onetick/py/core/_source/schema.py +97 -0
- onetick/py/core/_source/source_methods/__init__.py +0 -0
- onetick/py/core/_source/source_methods/aggregations.py +810 -0
- onetick/py/core/_source/source_methods/applyers.py +296 -0
- onetick/py/core/_source/source_methods/columns.py +141 -0
- onetick/py/core/_source/source_methods/data_quality.py +301 -0
- onetick/py/core/_source/source_methods/debugs.py +270 -0
- onetick/py/core/_source/source_methods/drops.py +120 -0
- onetick/py/core/_source/source_methods/fields.py +619 -0
- onetick/py/core/_source/source_methods/filters.py +1001 -0
- onetick/py/core/_source/source_methods/joins.py +1393 -0
- onetick/py/core/_source/source_methods/merges.py +566 -0
- onetick/py/core/_source/source_methods/misc.py +1325 -0
- onetick/py/core/_source/source_methods/pandases.py +155 -0
- onetick/py/core/_source/source_methods/renames.py +356 -0
- onetick/py/core/_source/source_methods/sorts.py +183 -0
- onetick/py/core/_source/source_methods/switches.py +142 -0
- onetick/py/core/_source/source_methods/symbols.py +117 -0
- onetick/py/core/_source/source_methods/times.py +627 -0
- onetick/py/core/_source/source_methods/writes.py +702 -0
- onetick/py/core/_source/symbol.py +202 -0
- onetick/py/core/_source/tmp_otq.py +222 -0
- onetick/py/core/column.py +209 -0
- onetick/py/core/column_operations/__init__.py +0 -0
- onetick/py/core/column_operations/_methods/__init__.py +4 -0
- onetick/py/core/column_operations/_methods/_internal.py +28 -0
- onetick/py/core/column_operations/_methods/conversions.py +215 -0
- onetick/py/core/column_operations/_methods/methods.py +294 -0
- onetick/py/core/column_operations/_methods/op_types.py +150 -0
- onetick/py/core/column_operations/accessors/__init__.py +0 -0
- onetick/py/core/column_operations/accessors/_accessor.py +30 -0
- onetick/py/core/column_operations/accessors/decimal_accessor.py +92 -0
- onetick/py/core/column_operations/accessors/dt_accessor.py +464 -0
- onetick/py/core/column_operations/accessors/float_accessor.py +160 -0
- onetick/py/core/column_operations/accessors/str_accessor.py +1374 -0
- onetick/py/core/column_operations/base.py +1061 -0
- onetick/py/core/cut_builder.py +149 -0
- onetick/py/core/db_constants.py +20 -0
- onetick/py/core/eval_query.py +244 -0
- onetick/py/core/lambda_object.py +442 -0
- onetick/py/core/multi_output_source.py +193 -0
- onetick/py/core/per_tick_script.py +2253 -0
- onetick/py/core/query_inspector.py +465 -0
- onetick/py/core/source.py +1663 -0
- onetick/py/db/__init__.py +2 -0
- onetick/py/db/_inspection.py +1042 -0
- onetick/py/db/db.py +1423 -0
- onetick/py/db/utils.py +64 -0
- onetick/py/docs/__init__.py +0 -0
- onetick/py/docs/docstring_parser.py +112 -0
- onetick/py/docs/utils.py +81 -0
- onetick/py/functions.py +2354 -0
- onetick/py/license.py +188 -0
- onetick/py/log.py +88 -0
- onetick/py/math.py +947 -0
- onetick/py/misc.py +437 -0
- onetick/py/oqd/__init__.py +22 -0
- onetick/py/oqd/eps.py +1195 -0
- onetick/py/oqd/sources.py +325 -0
- onetick/py/otq.py +211 -0
- onetick/py/pyomd_mock.py +47 -0
- onetick/py/run.py +841 -0
- onetick/py/servers.py +173 -0
- onetick/py/session.py +1342 -0
- onetick/py/sources/__init__.py +19 -0
- onetick/py/sources/cache.py +167 -0
- onetick/py/sources/common.py +126 -0
- onetick/py/sources/csv.py +642 -0
- onetick/py/sources/custom.py +85 -0
- onetick/py/sources/data_file.py +305 -0
- onetick/py/sources/data_source.py +1049 -0
- onetick/py/sources/empty.py +94 -0
- onetick/py/sources/odbc.py +337 -0
- onetick/py/sources/order_book.py +238 -0
- onetick/py/sources/parquet.py +168 -0
- onetick/py/sources/pit.py +191 -0
- onetick/py/sources/query.py +495 -0
- onetick/py/sources/snapshots.py +419 -0
- onetick/py/sources/split_query_output_by_symbol.py +198 -0
- onetick/py/sources/symbology_mapping.py +123 -0
- onetick/py/sources/symbols.py +357 -0
- onetick/py/sources/ticks.py +825 -0
- onetick/py/sql.py +70 -0
- onetick/py/state.py +256 -0
- onetick/py/types.py +2056 -0
- onetick/py/utils/__init__.py +70 -0
- onetick/py/utils/acl.py +93 -0
- onetick/py/utils/config.py +186 -0
- onetick/py/utils/default.py +49 -0
- onetick/py/utils/file.py +38 -0
- onetick/py/utils/helpers.py +76 -0
- onetick/py/utils/locator.py +94 -0
- onetick/py/utils/perf.py +499 -0
- onetick/py/utils/query.py +49 -0
- onetick/py/utils/render.py +1139 -0
- onetick/py/utils/script.py +244 -0
- onetick/py/utils/temp.py +471 -0
- onetick/py/utils/types.py +118 -0
- onetick/py/utils/tz.py +82 -0
- onetick_py-1.162.2.dist-info/METADATA +148 -0
- onetick_py-1.162.2.dist-info/RECORD +152 -0
- onetick_py-1.162.2.dist-info/WHEEL +5 -0
- onetick_py-1.162.2.dist-info/entry_points.txt +2 -0
- onetick_py-1.162.2.dist-info/licenses/LICENSE +21 -0
- onetick_py-1.162.2.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,1139 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import re
|
|
3
|
+
import html
|
|
4
|
+
import graphviz as gv
|
|
5
|
+
from collections import defaultdict, deque
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any, Deque, Dict, List, Optional, Set, Tuple, Union
|
|
10
|
+
|
|
11
|
+
from onetick.py.utils import TmpFile
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
EPS_WITH_QUERIES = {
|
|
15
|
+
"eval": (0, "expression"),
|
|
16
|
+
"eval_expression": (0, "expression"),
|
|
17
|
+
"join_with_query": (0, "otq_query"),
|
|
18
|
+
"nested_otq": (0, "otq_name"),
|
|
19
|
+
"join_with_collection_summary": (1, "otq_query"),
|
|
20
|
+
"modify_state_var_from_query": (1, "otq_query"),
|
|
21
|
+
"create_cache": (1, "otq_file_path"),
|
|
22
|
+
"read_cache": (4, "create_cache_query"),
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
IF_ELSE_EPS = {
|
|
26
|
+
"CHARACTER_PRESENT", "CORRECT_TICK_FILTER", "PRIMARY_EXCH", "REGEX_MATCHES", "SKIP_BAD_TICK", "TIME_FILTER",
|
|
27
|
+
"TRD_VS_MID", "TRD_VS_QUOTE", "UPTICK", "VALUE_COMPARE", "VALUE_PRESENT", "VOLUME_LIMIT", "WHERE_CLAUSE",
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class NestedQuery:
|
|
33
|
+
name: str
|
|
34
|
+
raw_string: str
|
|
35
|
+
query: Optional[str] = field(default=None)
|
|
36
|
+
expression: Optional[str] = field(default=None)
|
|
37
|
+
file_path: Optional[str] = field(default=None)
|
|
38
|
+
args: list = field(default_factory=list)
|
|
39
|
+
kwargs: dict = field(default_factory=dict)
|
|
40
|
+
is_local: bool = field(default=False)
|
|
41
|
+
|
|
42
|
+
def to_string(self):
|
|
43
|
+
if self.is_local:
|
|
44
|
+
if self.file_path:
|
|
45
|
+
raise ValueError("Nested query from file couldn't be local")
|
|
46
|
+
|
|
47
|
+
if self.expression:
|
|
48
|
+
return self.expression
|
|
49
|
+
else:
|
|
50
|
+
return self.query
|
|
51
|
+
else:
|
|
52
|
+
return "::".join(i for i in [self.file_path, self.query] if i)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@dataclass
|
|
56
|
+
class EP:
|
|
57
|
+
name: str
|
|
58
|
+
raw_string: str
|
|
59
|
+
args: list = field(default_factory=list)
|
|
60
|
+
kwargs: dict = field(default_factory=dict)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@dataclass
|
|
64
|
+
class IfElseEP(EP):
|
|
65
|
+
if_nodes: Set[str] = field(default_factory=set)
|
|
66
|
+
else_nodes: Set[str] = field(default_factory=set)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dataclass
|
|
70
|
+
class Node:
|
|
71
|
+
ep: Union[EP, NestedQuery, None]
|
|
72
|
+
id: str
|
|
73
|
+
query: str
|
|
74
|
+
tick_type: Optional[str] = field(default=None)
|
|
75
|
+
labels: Dict[str, str] = field(default_factory=dict)
|
|
76
|
+
config: dict = field(default_factory=dict)
|
|
77
|
+
params: dict = field(default_factory=dict)
|
|
78
|
+
sinks: List[str] = field(default_factory=list)
|
|
79
|
+
symbols: list = field(default_factory=list)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@dataclass
|
|
83
|
+
class Query:
|
|
84
|
+
name: str
|
|
85
|
+
graph: str
|
|
86
|
+
nodes: Dict[str, Node] = field(default_factory=dict)
|
|
87
|
+
roots: list = field(default_factory=list)
|
|
88
|
+
leaves: list = field(default_factory=list)
|
|
89
|
+
symbols: list = field(default_factory=list)
|
|
90
|
+
config: dict = field(default_factory=dict)
|
|
91
|
+
params: dict = field(default_factory=dict)
|
|
92
|
+
depends: Set[Tuple[Optional[str], Optional[str]]] = field(default_factory=set)
|
|
93
|
+
|
|
94
|
+
def get_id(self, prefix: Optional[str] = "cluster"):
|
|
95
|
+
if prefix:
|
|
96
|
+
prefix = f"{prefix}__"
|
|
97
|
+
else:
|
|
98
|
+
prefix = ""
|
|
99
|
+
|
|
100
|
+
graph = self.graph.replace(":", "_")
|
|
101
|
+
|
|
102
|
+
return f"{prefix}{graph}__{self.name}"
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
@dataclass
|
|
106
|
+
class Graph:
|
|
107
|
+
file_path: str
|
|
108
|
+
config: dict = field(default_factory=dict)
|
|
109
|
+
queries: Dict[str, Query] = field(default_factory=dict)
|
|
110
|
+
|
|
111
|
+
def has_query(self, query):
|
|
112
|
+
return query in self.queries
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class GraphStorage(dict):
|
|
116
|
+
def get_query(self, otq_file: Optional[str], query_name: Optional[str]) -> Optional[Query]:
|
|
117
|
+
if not query_name or not otq_file:
|
|
118
|
+
return None
|
|
119
|
+
|
|
120
|
+
if otq_file not in self or query_name not in self[otq_file].queries:
|
|
121
|
+
return None
|
|
122
|
+
|
|
123
|
+
return self[otq_file].queries[query_name]
|
|
124
|
+
|
|
125
|
+
def get_query_unique_id(self, query: Union[str, Query, NestedQuery], graph: Optional[str] = None) -> Optional[str]:
|
|
126
|
+
query_obj = None
|
|
127
|
+
if isinstance(query, Query):
|
|
128
|
+
query_obj = query
|
|
129
|
+
elif isinstance(query, NestedQuery):
|
|
130
|
+
if not query.is_local or query.expression:
|
|
131
|
+
raise RuntimeError("Couldn't get id for non-local or expression-based NestedQuery")
|
|
132
|
+
|
|
133
|
+
query_obj = self.get_query(query.file_path, query.query)
|
|
134
|
+
elif isinstance(query, str):
|
|
135
|
+
if not graph:
|
|
136
|
+
raise ValueError("`graph` with path to otq file is required for `str` query")
|
|
137
|
+
|
|
138
|
+
query_obj = self.get_query(graph, query)
|
|
139
|
+
else:
|
|
140
|
+
raise RuntimeError(f"Unsupported query type: {type(query)}")
|
|
141
|
+
|
|
142
|
+
if not query_obj:
|
|
143
|
+
return None
|
|
144
|
+
else:
|
|
145
|
+
return query_obj.get_id()
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
class GVTable:
|
|
149
|
+
def __init__(self, border=0, cellborder=1, cellspacing=0, attrs: Optional[dict] = None, auto_colspan=True):
|
|
150
|
+
"""
|
|
151
|
+
Generate HTML tables for Graphviz
|
|
152
|
+
|
|
153
|
+
Attributes for each row or cell can be set by passing `attrs` attribute to `GVTable.row` or `GVTable.cell`.
|
|
154
|
+
Accoringly `attrs` params have reverse prioriry: higher for cells, less for rows.
|
|
155
|
+
|
|
156
|
+
Parameters
|
|
157
|
+
----------
|
|
158
|
+
border: int
|
|
159
|
+
Value of `BORDER` attribute of table HTML element
|
|
160
|
+
cellborder: int
|
|
161
|
+
Value of `CELLBORDER` attribute of table HTML element
|
|
162
|
+
cellspacing: int
|
|
163
|
+
Value of `CELLSPACING` attribute of table HTML element
|
|
164
|
+
attrs: dict
|
|
165
|
+
HTML attributes to apply to table element.
|
|
166
|
+
auto_colspan: bool
|
|
167
|
+
If set True, then last cell in each row automaticly fills table width,
|
|
168
|
+
if `colspan` attribute not set for this cell.
|
|
169
|
+
|
|
170
|
+
Examples
|
|
171
|
+
--------
|
|
172
|
+
|
|
173
|
+
Simple two rows table:
|
|
174
|
+
|
|
175
|
+
>>> table = otp.utils.render.GVTable()
|
|
176
|
+
>>> table.row(["cell_1_1", "cell_1_2"]) # doctest: +SKIP
|
|
177
|
+
>>> table.row(["cell_2_1", "cell_2_2"]) # doctest: +SKIP
|
|
178
|
+
>>> table_html = str(table) # doctest: +SKIP
|
|
179
|
+
"""
|
|
180
|
+
self.rows: List[Tuple[List[Tuple[Union[List[str], str], dict]], dict]] = []
|
|
181
|
+
self.attrs = {
|
|
182
|
+
"border": border,
|
|
183
|
+
"cellborder": cellborder,
|
|
184
|
+
"cellspacing": cellspacing,
|
|
185
|
+
}
|
|
186
|
+
if attrs:
|
|
187
|
+
self.attrs.update(attrs)
|
|
188
|
+
|
|
189
|
+
self.auto_colspan = auto_colspan
|
|
190
|
+
self.max_cols = 0
|
|
191
|
+
|
|
192
|
+
def cell(self, data: list, attrs: Optional[dict] = None):
|
|
193
|
+
"""
|
|
194
|
+
Append cell in the last row
|
|
195
|
+
"""
|
|
196
|
+
if len(self) == 0:
|
|
197
|
+
raise RuntimeError("No rows in table")
|
|
198
|
+
|
|
199
|
+
row, row_attrs = self.rows[-1]
|
|
200
|
+
|
|
201
|
+
for cell_data in data:
|
|
202
|
+
cell_attrs = row_attrs.copy()
|
|
203
|
+
|
|
204
|
+
if attrs:
|
|
205
|
+
cell_attrs.update(attrs)
|
|
206
|
+
|
|
207
|
+
if isinstance(cell_data, tuple):
|
|
208
|
+
cell, _cell_attrs = cell_data
|
|
209
|
+
if _cell_attrs:
|
|
210
|
+
cell_attrs.update(_cell_attrs)
|
|
211
|
+
else:
|
|
212
|
+
cell = cell_data
|
|
213
|
+
|
|
214
|
+
row.append((cell, cell_attrs))
|
|
215
|
+
|
|
216
|
+
self.max_cols = max(self.max_cols, len(row))
|
|
217
|
+
|
|
218
|
+
return self
|
|
219
|
+
|
|
220
|
+
def row(self, data: list, attrs: Optional[dict] = None):
|
|
221
|
+
self.rows.append(([], attrs if attrs else {}))
|
|
222
|
+
self.cell(data)
|
|
223
|
+
|
|
224
|
+
return self
|
|
225
|
+
|
|
226
|
+
def __len__(self):
|
|
227
|
+
return len(self.rows)
|
|
228
|
+
|
|
229
|
+
def __str__(self):
|
|
230
|
+
tags = []
|
|
231
|
+
|
|
232
|
+
attrs = " ".join([f"{k.upper()}=\"{v}\"" for k, v in self.attrs.items()])
|
|
233
|
+
tags.append(f"<TABLE {attrs}>")
|
|
234
|
+
|
|
235
|
+
for row, row_attrs in self.rows:
|
|
236
|
+
col_count = len(row)
|
|
237
|
+
tags.append("<TR>")
|
|
238
|
+
|
|
239
|
+
for i in range(col_count):
|
|
240
|
+
cell, cell_attrs = row[i]
|
|
241
|
+
|
|
242
|
+
if (
|
|
243
|
+
self.auto_colspan and col_count - 1 == i and
|
|
244
|
+
len(row) < self.max_cols and "colspan" not in cell_attrs
|
|
245
|
+
):
|
|
246
|
+
cell_attrs["colspan"] = self.max_cols - col_count + 1
|
|
247
|
+
|
|
248
|
+
attrs = " ".join([f"{k.upper()}=\"{v}\"" for k, v in cell_attrs.items()])
|
|
249
|
+
if not isinstance(cell, list):
|
|
250
|
+
cell = [cell]
|
|
251
|
+
|
|
252
|
+
cell_str = "<BR/>".join(cell)
|
|
253
|
+
tags.append(f"<TD {attrs}>{cell_str}</TD>")
|
|
254
|
+
|
|
255
|
+
tags.append("</TR>")
|
|
256
|
+
|
|
257
|
+
tags.append("</TABLE>")
|
|
258
|
+
|
|
259
|
+
return "<" + "".join(tags) + ">"
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def _posix_path(path: str):
|
|
263
|
+
return path.replace(os.sep, "/")
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def _get_node_unique_id(node: Union[Node, str], query: Query):
|
|
267
|
+
if isinstance(node, str):
|
|
268
|
+
node = query.nodes[node]
|
|
269
|
+
|
|
270
|
+
return f"{query.get_id(prefix=None)}__{node.id}"
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
def _save_param(storage, key, value):
|
|
274
|
+
if key in storage:
|
|
275
|
+
if not isinstance(storage[key], list):
|
|
276
|
+
storage[key] = [storage[key]]
|
|
277
|
+
|
|
278
|
+
storage[key].append(value)
|
|
279
|
+
else:
|
|
280
|
+
storage[key] = value
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def _is_local_query(f_path: Optional[str]) -> bool:
|
|
284
|
+
return f_path in ["THIS", "___ME___"]
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def _parse_function_params(func_params: str) -> Tuple[list, dict]:
|
|
288
|
+
def save_param(_key, _value, _args, _kwargs):
|
|
289
|
+
if _key:
|
|
290
|
+
_kwargs[_key.lower()] = (_key, _value)
|
|
291
|
+
else:
|
|
292
|
+
if _kwargs:
|
|
293
|
+
raise RuntimeError("Positional argument could not be after keyword argument")
|
|
294
|
+
|
|
295
|
+
_args.append(_value)
|
|
296
|
+
|
|
297
|
+
args: list = []
|
|
298
|
+
kwargs: dict = {}
|
|
299
|
+
key = ""
|
|
300
|
+
str_buffer: list = []
|
|
301
|
+
in_quotes = None
|
|
302
|
+
escape_next = False
|
|
303
|
+
|
|
304
|
+
if not func_params:
|
|
305
|
+
return args, kwargs
|
|
306
|
+
|
|
307
|
+
for ch in func_params:
|
|
308
|
+
if escape_next:
|
|
309
|
+
escape_next = False
|
|
310
|
+
str_buffer.append(ch)
|
|
311
|
+
elif ch == "\\":
|
|
312
|
+
escape_next = True
|
|
313
|
+
elif in_quotes:
|
|
314
|
+
if ch == in_quotes:
|
|
315
|
+
in_quotes = None
|
|
316
|
+
else:
|
|
317
|
+
str_buffer.append(ch)
|
|
318
|
+
else:
|
|
319
|
+
if ch in "\"\'":
|
|
320
|
+
in_quotes = ch
|
|
321
|
+
elif ch.isspace():
|
|
322
|
+
continue
|
|
323
|
+
elif ch == "=":
|
|
324
|
+
key = "".join(str_buffer)
|
|
325
|
+
str_buffer.clear()
|
|
326
|
+
elif ch == ",":
|
|
327
|
+
save_param(key, "".join(str_buffer), args, kwargs)
|
|
328
|
+
str_buffer.clear()
|
|
329
|
+
key = ""
|
|
330
|
+
else:
|
|
331
|
+
str_buffer.append(ch)
|
|
332
|
+
|
|
333
|
+
if in_quotes:
|
|
334
|
+
raise ValueError("`func_params` unclosed quote")
|
|
335
|
+
|
|
336
|
+
if str_buffer:
|
|
337
|
+
save_param(key, "".join(str_buffer), args, kwargs)
|
|
338
|
+
str_buffer.clear()
|
|
339
|
+
|
|
340
|
+
return args, kwargs
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
def _parse_function(expression: str) -> Tuple[Optional[str], list, dict]:
|
|
344
|
+
m = re.search(r"^(\w*)\s*\(([\s\S]*)\)\s*$", expression)
|
|
345
|
+
|
|
346
|
+
if not m:
|
|
347
|
+
return None, [], {}
|
|
348
|
+
|
|
349
|
+
ep, params_str = m.groups()
|
|
350
|
+
args, kwargs = _parse_function_params(params_str)
|
|
351
|
+
|
|
352
|
+
return ep, args, kwargs
|
|
353
|
+
|
|
354
|
+
|
|
355
|
+
def _get_ep_from_str(ep_string: str) -> Tuple[str, list, dict]:
|
|
356
|
+
ep, args, kwargs = _parse_function(ep_string)
|
|
357
|
+
|
|
358
|
+
if not ep:
|
|
359
|
+
ep = ep_string
|
|
360
|
+
|
|
361
|
+
return ep, args, kwargs
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
def _parse_query_path(query_path: str) -> Union[Tuple[str, Optional[str]], List[str]]:
|
|
365
|
+
query_path_splitted = query_path.rsplit("::", maxsplit=1)
|
|
366
|
+
|
|
367
|
+
if len(query_path_splitted) == 1:
|
|
368
|
+
return _posix_path(query_path_splitted[0]), None
|
|
369
|
+
else:
|
|
370
|
+
file_path, query = query_path_splitted
|
|
371
|
+
|
|
372
|
+
return _posix_path(file_path), query
|
|
373
|
+
|
|
374
|
+
|
|
375
|
+
def _parse_ep(ep_string: str, parse_eval_from_params: bool = False) -> Union[EP, NestedQuery]:
|
|
376
|
+
if ep_string.startswith("NESTED_OTQ"):
|
|
377
|
+
query_path = " ".join(ep_string.split(" ")[1:])
|
|
378
|
+
|
|
379
|
+
file_path, query = _parse_query_path(query_path)
|
|
380
|
+
is_local = _is_local_query(file_path)
|
|
381
|
+
|
|
382
|
+
return NestedQuery(
|
|
383
|
+
name="NESTED_OTQ", raw_string=ep_string, query=query, file_path=None if is_local else file_path,
|
|
384
|
+
is_local=is_local,
|
|
385
|
+
)
|
|
386
|
+
|
|
387
|
+
ep, args, kwargs = _get_ep_from_str(ep_string)
|
|
388
|
+
|
|
389
|
+
if parse_eval_from_params:
|
|
390
|
+
for param_name, param_value in kwargs.items():
|
|
391
|
+
m = re.search(r"^(eval\([^)]+\)).*$", param_value[1], re.IGNORECASE)
|
|
392
|
+
if not m:
|
|
393
|
+
continue
|
|
394
|
+
|
|
395
|
+
param_ep_str = m.group(1)
|
|
396
|
+
|
|
397
|
+
try:
|
|
398
|
+
param_ep = _parse_ep(param_ep_str, parse_eval_from_params=False)
|
|
399
|
+
if isinstance(param_ep, NestedQuery):
|
|
400
|
+
kwargs[param_name] = (param_value[0], param_ep)
|
|
401
|
+
except Exception:
|
|
402
|
+
pass
|
|
403
|
+
|
|
404
|
+
if ep.lower() in EPS_WITH_QUERIES:
|
|
405
|
+
ep_description = EPS_WITH_QUERIES[ep.lower()]
|
|
406
|
+
args_idx, kwargs_key = ep_description
|
|
407
|
+
|
|
408
|
+
is_query_found = True
|
|
409
|
+
|
|
410
|
+
if kwargs_key in kwargs:
|
|
411
|
+
query_path = kwargs.pop(kwargs_key)[1]
|
|
412
|
+
elif 0 <= args_idx < len(args):
|
|
413
|
+
query_path = args.pop(args_idx)
|
|
414
|
+
else:
|
|
415
|
+
# don't do anything, just process as EP
|
|
416
|
+
is_query_found = False
|
|
417
|
+
|
|
418
|
+
if is_query_found:
|
|
419
|
+
if query_path[0] in ["\"", "\'"] and query_path[0] == query_path[-1]:
|
|
420
|
+
query_path = query_path[1:-1]
|
|
421
|
+
|
|
422
|
+
file_path, query = _parse_query_path(query_path)
|
|
423
|
+
|
|
424
|
+
if file_path and query:
|
|
425
|
+
is_local = _is_local_query(file_path)
|
|
426
|
+
return NestedQuery(
|
|
427
|
+
name=ep, raw_string=ep_string, query=query, file_path=None if is_local else file_path,
|
|
428
|
+
args=args, kwargs=kwargs, is_local=is_local,
|
|
429
|
+
)
|
|
430
|
+
else:
|
|
431
|
+
return NestedQuery(
|
|
432
|
+
name=ep, raw_string=ep_string, expression=file_path, args=args, kwargs=kwargs, is_local=True,
|
|
433
|
+
)
|
|
434
|
+
|
|
435
|
+
if ep in IF_ELSE_EPS:
|
|
436
|
+
return IfElseEP(name=ep, raw_string=ep_string, args=args, kwargs=kwargs)
|
|
437
|
+
|
|
438
|
+
return EP(name=ep, raw_string=ep_string, args=args, kwargs=kwargs)
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
def _parse_security(value: str) -> Tuple[Union[str, EP, NestedQuery], str, bool]:
|
|
442
|
+
is_security_active = True
|
|
443
|
+
split_value = value.split()
|
|
444
|
+
|
|
445
|
+
try:
|
|
446
|
+
int(split_value[-1])
|
|
447
|
+
except ValueError:
|
|
448
|
+
# assume that third value is "No"
|
|
449
|
+
is_security_active = False
|
|
450
|
+
split_value.pop()
|
|
451
|
+
|
|
452
|
+
security = " ".join(split_value[:-1])
|
|
453
|
+
|
|
454
|
+
try:
|
|
455
|
+
security_ep = _parse_ep(security)
|
|
456
|
+
except ValueError:
|
|
457
|
+
security_ep = None
|
|
458
|
+
|
|
459
|
+
return (security_ep if security_ep else security), split_value[-1], is_security_active
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def _move_between_dicts(source, output, key, func):
|
|
463
|
+
if not isinstance(source[key], list):
|
|
464
|
+
source[key] = [source[key]]
|
|
465
|
+
|
|
466
|
+
output.update([func(k.split()) for k in source[key]])
|
|
467
|
+
del source[key]
|
|
468
|
+
|
|
469
|
+
|
|
470
|
+
def _move_parameters(_from, _to):
|
|
471
|
+
if "PARAMETER" in _from:
|
|
472
|
+
_move_between_dicts(
|
|
473
|
+
_from,
|
|
474
|
+
_to,
|
|
475
|
+
"PARAMETER",
|
|
476
|
+
lambda x: (x[0], " ".join(x[1:]))
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
if "PARAMETER_MANDATORY" in _from:
|
|
480
|
+
_move_between_dicts(
|
|
481
|
+
_from,
|
|
482
|
+
_to,
|
|
483
|
+
"PARAMETER_MANDATORY",
|
|
484
|
+
lambda x: (x[0], None)
|
|
485
|
+
)
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def _build_query_tree(query: Query):
|
|
489
|
+
roots = {*query.nodes.keys()}
|
|
490
|
+
leaves = {*query.nodes.keys()}
|
|
491
|
+
|
|
492
|
+
for node_id, node in query.nodes.items():
|
|
493
|
+
_move_parameters(node.config, node.params)
|
|
494
|
+
|
|
495
|
+
# save labels
|
|
496
|
+
if "NESTED_INPUT" in node.config:
|
|
497
|
+
node.labels["IN"] = node.config["NESTED_INPUT"]
|
|
498
|
+
|
|
499
|
+
if "NESTED_OUTPUT" in node.config:
|
|
500
|
+
node.labels["OUT"] = node.config["NESTED_OUTPUT"]
|
|
501
|
+
|
|
502
|
+
if "SOURCE_DESCRIPTION" in node.config:
|
|
503
|
+
description = node.config["SOURCE_DESCRIPTION"].strip().split(" ")
|
|
504
|
+
if len(description) > 1:
|
|
505
|
+
desc_node = description[0].split(".")[0]
|
|
506
|
+
labels = description[1].split(".")
|
|
507
|
+
|
|
508
|
+
if labels and desc_node in query.nodes:
|
|
509
|
+
if labels[0]:
|
|
510
|
+
query.nodes[desc_node].labels["IN"] = labels[0]
|
|
511
|
+
|
|
512
|
+
if labels[1]:
|
|
513
|
+
query.nodes[desc_node].labels["OUT"] = labels[1]
|
|
514
|
+
|
|
515
|
+
if "SINK_DESCRIPTION" in node.config:
|
|
516
|
+
description_path = node.config["SOURCE_DESCRIPTION"].strip().split(".")
|
|
517
|
+
if len(description_path) > 1:
|
|
518
|
+
desc_node = description_path[0]
|
|
519
|
+
label = description_path[-1]
|
|
520
|
+
|
|
521
|
+
if label and desc_node in query.nodes:
|
|
522
|
+
query.nodes[desc_node].labels["OUT"] = label
|
|
523
|
+
|
|
524
|
+
nodes = []
|
|
525
|
+
|
|
526
|
+
if "SINK" in node.config:
|
|
527
|
+
sink_nodes = [(sink, True) for sink in node.config["SINK"].strip().split()]
|
|
528
|
+
if sink_nodes:
|
|
529
|
+
leaves.remove(node_id)
|
|
530
|
+
|
|
531
|
+
nodes += sink_nodes
|
|
532
|
+
del node.config["SINK"]
|
|
533
|
+
|
|
534
|
+
if "SOURCE" in node.config:
|
|
535
|
+
source_nodes = [(sink, False) for sink in node.config["SOURCE"].strip().split()]
|
|
536
|
+
if source_nodes:
|
|
537
|
+
roots.remove(node_id)
|
|
538
|
+
|
|
539
|
+
nodes += source_nodes
|
|
540
|
+
del node.config["SOURCE"]
|
|
541
|
+
|
|
542
|
+
for source_node, is_sink_node in nodes:
|
|
543
|
+
# just ignore other nodes in path
|
|
544
|
+
source_node_path = source_node.split(".")
|
|
545
|
+
source_node_id = source_node_path[0]
|
|
546
|
+
|
|
547
|
+
if not is_sink_node and source_node_id in leaves:
|
|
548
|
+
leaves.remove(source_node_id)
|
|
549
|
+
|
|
550
|
+
if is_sink_node and source_node_id in roots:
|
|
551
|
+
roots.remove(source_node_id)
|
|
552
|
+
|
|
553
|
+
if source_node_id not in query.nodes:
|
|
554
|
+
raise RuntimeError(f"Malformed otq file passed: node {source_node_id} not found in {query.name}")
|
|
555
|
+
|
|
556
|
+
if is_sink_node:
|
|
557
|
+
query.nodes[node_id].sinks.append(source_node_id)
|
|
558
|
+
|
|
559
|
+
if isinstance(node.ep, IfElseEP):
|
|
560
|
+
if "IF" in source_node_path[1:]:
|
|
561
|
+
node.ep.if_nodes.add(source_node_id)
|
|
562
|
+
|
|
563
|
+
if "ELSE" in source_node_path[1:]:
|
|
564
|
+
node.ep.else_nodes.add(source_node_id)
|
|
565
|
+
else:
|
|
566
|
+
source_node = query.nodes[source_node_id]
|
|
567
|
+
source_node.sinks.append(node_id)
|
|
568
|
+
|
|
569
|
+
if isinstance(source_node.ep, IfElseEP):
|
|
570
|
+
if "IF" in source_node_path[1:]:
|
|
571
|
+
source_node.ep.if_nodes.add(node_id)
|
|
572
|
+
|
|
573
|
+
if "ELSE" in source_node_path[1:]:
|
|
574
|
+
source_node.ep.else_nodes.add(node_id)
|
|
575
|
+
|
|
576
|
+
query.roots = list(roots)
|
|
577
|
+
query.leaves = list(leaves)
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
def _save_dependency(obj, query: Query):
|
|
581
|
+
if isinstance(obj, (EP, NestedQuery)):
|
|
582
|
+
for kwarg_param in obj.kwargs.values():
|
|
583
|
+
if isinstance(kwarg_param[1], NestedQuery):
|
|
584
|
+
_save_dependency(kwarg_param[1], query)
|
|
585
|
+
|
|
586
|
+
if isinstance(obj, NestedQuery) and not obj.expression:
|
|
587
|
+
query.depends.add((obj.file_path, obj.query))
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
def _finalize_query(query: Query, graph: Graph):
|
|
591
|
+
if not query:
|
|
592
|
+
return
|
|
593
|
+
|
|
594
|
+
if query.name == "_meta":
|
|
595
|
+
graph.config = {k.upper(): v for k, v in query.config.items()}
|
|
596
|
+
return
|
|
597
|
+
|
|
598
|
+
if "SECURITY" in query.config:
|
|
599
|
+
if not isinstance(query.config["SECURITY"], list):
|
|
600
|
+
query.config["SECURITY"] = [query.config["SECURITY"]]
|
|
601
|
+
|
|
602
|
+
for security in query.config["SECURITY"]:
|
|
603
|
+
parsed_security = _parse_security(security)
|
|
604
|
+
query.symbols.append(parsed_security)
|
|
605
|
+
_save_dependency(parsed_security[0], query)
|
|
606
|
+
|
|
607
|
+
del query.config["SECURITY"]
|
|
608
|
+
|
|
609
|
+
_move_parameters(query.config, query.params)
|
|
610
|
+
_build_query_tree(query)
|
|
611
|
+
|
|
612
|
+
graph.queries[query.name] = query
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
def read_otq(path: str, parse_eval_from_params: bool = False) -> Optional[Graph]:
|
|
616
|
+
if path.startswith("remote://") or not os.path.exists(path):
|
|
617
|
+
return None
|
|
618
|
+
|
|
619
|
+
graph = Graph(path)
|
|
620
|
+
current_query = None
|
|
621
|
+
|
|
622
|
+
with open(path, "r") as input_otq:
|
|
623
|
+
tmp_line = ""
|
|
624
|
+
for line in input_otq:
|
|
625
|
+
line = line.rstrip()
|
|
626
|
+
|
|
627
|
+
if line.endswith("\\"):
|
|
628
|
+
tmp_line += f"{line[:-1]}\n"
|
|
629
|
+
continue
|
|
630
|
+
else:
|
|
631
|
+
tmp_line += line
|
|
632
|
+
|
|
633
|
+
line = tmp_line.strip()
|
|
634
|
+
tmp_line = ""
|
|
635
|
+
|
|
636
|
+
if not line:
|
|
637
|
+
continue
|
|
638
|
+
|
|
639
|
+
# found new query
|
|
640
|
+
m = re.search(r"^\[(.*)\]$", line)
|
|
641
|
+
if m:
|
|
642
|
+
if current_query and current_query.config['TYPE'] == 'GRAPH':
|
|
643
|
+
_finalize_query(current_query, graph)
|
|
644
|
+
|
|
645
|
+
query_name = m.groups()[0]
|
|
646
|
+
current_query = Query(name=query_name, graph=path)
|
|
647
|
+
continue
|
|
648
|
+
|
|
649
|
+
if not current_query:
|
|
650
|
+
continue
|
|
651
|
+
|
|
652
|
+
if line.startswith("NODE"):
|
|
653
|
+
prefix = r"NODE\D*?(\d+)"
|
|
654
|
+
elif line.startswith("ROOT"):
|
|
655
|
+
prefix = r"ROOT\D*?(\d*)"
|
|
656
|
+
else:
|
|
657
|
+
# other query param
|
|
658
|
+
line_expr = line.split("=")
|
|
659
|
+
param, value = line_expr[0], "=".join(line_expr[1:])
|
|
660
|
+
param = param.strip()
|
|
661
|
+
value = value.strip()
|
|
662
|
+
|
|
663
|
+
_save_param(current_query.config, param, value)
|
|
664
|
+
continue
|
|
665
|
+
|
|
666
|
+
m = re.search(rf"^({prefix})(_([a-zA-Z_]*[0-9]*))?\s*=\s*([\s\S]*)$", line)
|
|
667
|
+
if m:
|
|
668
|
+
node_id, _, _, node_param, value = m.groups()
|
|
669
|
+
|
|
670
|
+
if node_id not in current_query.nodes:
|
|
671
|
+
current_query.nodes[node_id] = Node(ep=None, id=node_id, query=current_query.name)
|
|
672
|
+
|
|
673
|
+
if not node_param:
|
|
674
|
+
ep = _parse_ep(value, parse_eval_from_params=parse_eval_from_params)
|
|
675
|
+
_save_dependency(ep, current_query)
|
|
676
|
+
current_query.nodes[node_id].ep = ep
|
|
677
|
+
elif node_param == "BIND_SECURITY":
|
|
678
|
+
security = _parse_security(value)
|
|
679
|
+
current_query.nodes[node_id].symbols.append(security)
|
|
680
|
+
_save_dependency(security[0], current_query)
|
|
681
|
+
elif node_param == "TICK_TYPE":
|
|
682
|
+
current_query.nodes[node_id].tick_type = value
|
|
683
|
+
else:
|
|
684
|
+
_save_param(current_query.nodes[node_id].config, node_param, value)
|
|
685
|
+
|
|
686
|
+
if current_query:
|
|
687
|
+
_finalize_query(current_query, graph)
|
|
688
|
+
|
|
689
|
+
return graph
|
|
690
|
+
|
|
691
|
+
|
|
692
|
+
def filter_nodes(ep: Any, param, value, line_limit: Optional[Tuple[int, int]] = None):
|
|
693
|
+
if line_limit is not None and (line_limit[0] < 0 or line_limit[1] < 0):
|
|
694
|
+
raise ValueError("line_limit values should not be negative")
|
|
695
|
+
|
|
696
|
+
if isinstance(ep, EP) and line_limit is not None and (
|
|
697
|
+
ep.name == "PER_TICK_SCRIPT" and param.lower() == "script" or
|
|
698
|
+
ep.name == "CSV_FILE_LISTING" and param.lower() == "file_contents"
|
|
699
|
+
):
|
|
700
|
+
lines = [
|
|
701
|
+
line if len(line) <= line_limit[1] or not line_limit[1] else line[:line_limit[1]] + "..."
|
|
702
|
+
for line in value.split("\n")
|
|
703
|
+
]
|
|
704
|
+
|
|
705
|
+
if line_limit[0] and len(lines) > line_limit[0]:
|
|
706
|
+
lines = lines[:line_limit[0]] + ["..."]
|
|
707
|
+
|
|
708
|
+
return "\n".join(lines)
|
|
709
|
+
|
|
710
|
+
return value
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
def build_symbols(symbols, gr_nested, gr_static, graphs: GraphStorage, graph_node, reverse=False):
|
|
714
|
+
table = GVTable()
|
|
715
|
+
|
|
716
|
+
for symbol_data in symbols:
|
|
717
|
+
symbol, _, _ = symbol_data
|
|
718
|
+
|
|
719
|
+
if isinstance(symbol, NestedQuery):
|
|
720
|
+
if symbol.query:
|
|
721
|
+
if symbol.is_local:
|
|
722
|
+
# reversed directions here brakes everything
|
|
723
|
+
gr_nested.edge(
|
|
724
|
+
f"cluster__{symbol.query}__footer",
|
|
725
|
+
f"{graph_node}:symbols",
|
|
726
|
+
ltail=f"cluster__{symbol.query}",
|
|
727
|
+
style="dashed", constraint="false",
|
|
728
|
+
)
|
|
729
|
+
continue
|
|
730
|
+
|
|
731
|
+
nested_cluster_id = graphs.get_query_unique_id(symbol.query, symbol.file_path)
|
|
732
|
+
|
|
733
|
+
if nested_cluster_id:
|
|
734
|
+
gr_nested.edge(
|
|
735
|
+
f"{nested_cluster_id}__footer",
|
|
736
|
+
f"{graph_node}:symbols",
|
|
737
|
+
ltail=nested_cluster_id,
|
|
738
|
+
style="dashed", constraint="false",
|
|
739
|
+
)
|
|
740
|
+
continue
|
|
741
|
+
|
|
742
|
+
query = symbol.to_string()
|
|
743
|
+
elif isinstance(symbol, EP):
|
|
744
|
+
query = symbol.raw_string
|
|
745
|
+
else:
|
|
746
|
+
query = symbol
|
|
747
|
+
|
|
748
|
+
table.row([query])
|
|
749
|
+
|
|
750
|
+
if len(table):
|
|
751
|
+
gr_static.node(f"{graph_node}__symbols", str(table))
|
|
752
|
+
gr_static.edge(
|
|
753
|
+
f"{graph_node}__symbols" if not reverse else f"{graph_node}:symbols",
|
|
754
|
+
f"{graph_node}:symbols" if not reverse else f"{graph_node}__symbols",
|
|
755
|
+
style="dashed", constraint="false" if not reverse else "true",
|
|
756
|
+
)
|
|
757
|
+
|
|
758
|
+
|
|
759
|
+
def build_node(graphs: GraphStorage, node: Node, line_limit: Optional[Tuple[int, int]] = None):
|
|
760
|
+
if node.ep is None:
|
|
761
|
+
raise ValueError(f"EP of node {node.id} could not be None")
|
|
762
|
+
|
|
763
|
+
table = GVTable()
|
|
764
|
+
|
|
765
|
+
if "IN" in node.labels:
|
|
766
|
+
table.row([
|
|
767
|
+
("<FONT POINT-SIZE=\"10\">" + html.escape(node.labels["IN"]) + "</FONT>", {"port": "in"}),
|
|
768
|
+
], attrs={
|
|
769
|
+
"border": "1", "fixedsize": "TRUE", "colspan": "3",
|
|
770
|
+
})
|
|
771
|
+
|
|
772
|
+
table.row([(node.ep.name, {"port": "ep"})], attrs={"bgcolor": "gray95"})
|
|
773
|
+
|
|
774
|
+
if node.tick_type:
|
|
775
|
+
table.cell([node.tick_type])
|
|
776
|
+
|
|
777
|
+
if node.symbols:
|
|
778
|
+
table.cell([("[■]", {"port": "symbols"})])
|
|
779
|
+
|
|
780
|
+
if node.ep and (node.ep.args or node.ep.kwargs):
|
|
781
|
+
params: List[Tuple[Optional[str], Union[str, NestedQuery]]] = \
|
|
782
|
+
[(None, v) for v in node.ep.args] + list(node.ep.kwargs.values())
|
|
783
|
+
|
|
784
|
+
for idx, data in enumerate(params):
|
|
785
|
+
k, v = data
|
|
786
|
+
attrs = {"port": k}
|
|
787
|
+
if idx == len(params) - 1:
|
|
788
|
+
attrs["sides"] = "LRB"
|
|
789
|
+
else:
|
|
790
|
+
attrs["sides"] = "LR"
|
|
791
|
+
|
|
792
|
+
if isinstance(v, NestedQuery):
|
|
793
|
+
param_value = html.escape(v.raw_string)
|
|
794
|
+
else:
|
|
795
|
+
param_value = html.escape(v).replace("\t", " " * 4)
|
|
796
|
+
|
|
797
|
+
param_lines = filter_nodes(node.ep, k, param_value, line_limit).split("\n")
|
|
798
|
+
|
|
799
|
+
if k:
|
|
800
|
+
if len(param_lines) == 1:
|
|
801
|
+
param_lines[0] = f"{html.escape(k)}={param_lines[0]}"
|
|
802
|
+
else:
|
|
803
|
+
param_lines = [f"{html.escape(k)}="] + param_lines
|
|
804
|
+
|
|
805
|
+
if len(param_lines) > 1:
|
|
806
|
+
# Add idents disable default horizontal central align
|
|
807
|
+
# if there are multiline parameter for EP.
|
|
808
|
+
# Align change affects all parameters for EP.
|
|
809
|
+
for i in range(len(param_lines)):
|
|
810
|
+
if i > 0:
|
|
811
|
+
param_lines[i] = " " * 2 + param_lines[i]
|
|
812
|
+
|
|
813
|
+
attrs.update({"ALIGN": "LEFT", "BALIGN": "LEFT"})
|
|
814
|
+
|
|
815
|
+
table.row([param_lines], attrs=attrs)
|
|
816
|
+
|
|
817
|
+
if node.params:
|
|
818
|
+
table.row([[f"{html.escape(k)}={html.escape(v)}" for k, v in node.params.items()]])
|
|
819
|
+
|
|
820
|
+
if isinstance(node.ep, NestedQuery):
|
|
821
|
+
if not (node.ep.is_local and node.ep.query or graphs.get_query(node.ep.file_path, node.ep.query)):
|
|
822
|
+
table.row([node.ep.to_string()])
|
|
823
|
+
|
|
824
|
+
if isinstance(node.ep, IfElseEP):
|
|
825
|
+
table.row([
|
|
826
|
+
("<FONT POINT-SIZE=\"10\">[IF]</FONT>", {"port": "if"}), ("", {"border": "0"}),
|
|
827
|
+
("<FONT POINT-SIZE=\"10\">[ELSE]</FONT>", {"port": "else"})
|
|
828
|
+
], attrs={
|
|
829
|
+
"border": "1", "fixedsize": "TRUE", "colspan": "1",
|
|
830
|
+
})
|
|
831
|
+
elif "OUT" in node.labels:
|
|
832
|
+
table.row([
|
|
833
|
+
("<FONT POINT-SIZE=\"10\">" + html.escape(node.labels["OUT"]) + "</FONT>", {"port": "out"}),
|
|
834
|
+
], attrs={
|
|
835
|
+
"border": "1", "fixedsize": "TRUE", "colspan": "3",
|
|
836
|
+
})
|
|
837
|
+
|
|
838
|
+
return str(table)
|
|
839
|
+
|
|
840
|
+
|
|
841
|
+
def _parse_time(time_str: str) -> str:
|
|
842
|
+
if time_str:
|
|
843
|
+
try:
|
|
844
|
+
time_str = datetime.strptime(time_str, "%Y%m%d%H%M%S%f").strftime("%Y/%m/%d %H:%M:%S.%f"[:-3])
|
|
845
|
+
except ValueError:
|
|
846
|
+
pass
|
|
847
|
+
else:
|
|
848
|
+
time_str = "--"
|
|
849
|
+
|
|
850
|
+
return time_str
|
|
851
|
+
|
|
852
|
+
|
|
853
|
+
def _build_time_expr(table: GVTable, name: str, time_expr: str):
|
|
854
|
+
attrs = {}
|
|
855
|
+
time_expr = html.escape(time_expr)
|
|
856
|
+
|
|
857
|
+
if len(time_expr) > 60:
|
|
858
|
+
time_expr = f"<FONT POINT-SIZE=\"10\">{time_expr}</FONT>"
|
|
859
|
+
attrs["cellpadding"] = "4"
|
|
860
|
+
|
|
861
|
+
table.row([name], attrs={"bgcolor": "gray95"}).row([time_expr], attrs=attrs)
|
|
862
|
+
|
|
863
|
+
|
|
864
|
+
def _get_nested_query(nested_query: NestedQuery, local_graph: Graph, graphs: GraphStorage) -> Optional[Query]:
|
|
865
|
+
if nested_query.query:
|
|
866
|
+
if nested_query.is_local:
|
|
867
|
+
return local_graph.queries[nested_query.query]
|
|
868
|
+
else:
|
|
869
|
+
return graphs.get_query(nested_query.file_path, nested_query.query)
|
|
870
|
+
|
|
871
|
+
return None
|
|
872
|
+
|
|
873
|
+
|
|
874
|
+
def _render_graph(
|
|
875
|
+
gr_root, gr, graphs: GraphStorage, graph_name: str, queries: set, line_limit: Optional[Tuple[int, int]] = None,
|
|
876
|
+
):
|
|
877
|
+
graph = graphs[graph_name]
|
|
878
|
+
|
|
879
|
+
if not queries or queries == {"*"}:
|
|
880
|
+
queries = set(graph.queries.keys())
|
|
881
|
+
|
|
882
|
+
for query_name in queries:
|
|
883
|
+
if query_name not in graph.queries:
|
|
884
|
+
continue
|
|
885
|
+
|
|
886
|
+
query = graph.queries[query_name]
|
|
887
|
+
query_id = query.get_id()
|
|
888
|
+
|
|
889
|
+
with gr.subgraph(name=query_id, node_attr={"shape": "plaintext"}) as gr_sub:
|
|
890
|
+
gr_sub.attr(label=query_name)
|
|
891
|
+
|
|
892
|
+
start_time = _parse_time(query.config.get("START", graph.config.get("START")))
|
|
893
|
+
end_time = _parse_time(query.config.get("END", graph.config.get("END")))
|
|
894
|
+
|
|
895
|
+
start_expression = query.config.get("START_EXPRESSION", graph.config.get("START_EXPRESSION"))
|
|
896
|
+
end_expression = query.config.get("END_EXPRESSION", graph.config.get("END_EXPRESSION"))
|
|
897
|
+
|
|
898
|
+
tz_data = query.config.get("TZ", graph.config.get("TZ"))
|
|
899
|
+
if not tz_data:
|
|
900
|
+
tz_data = "--"
|
|
901
|
+
|
|
902
|
+
table = GVTable().row([
|
|
903
|
+
"START_TIME", "END_TIME", "TZ"
|
|
904
|
+
], attrs={"bgcolor": "gray95"}).row([
|
|
905
|
+
start_time, end_time, tz_data,
|
|
906
|
+
])
|
|
907
|
+
|
|
908
|
+
if start_expression:
|
|
909
|
+
_build_time_expr(table, "START_EXPRESSION", start_expression)
|
|
910
|
+
|
|
911
|
+
if end_expression:
|
|
912
|
+
_build_time_expr(table, "END_EXPRESSION", end_expression)
|
|
913
|
+
|
|
914
|
+
table.row([
|
|
915
|
+
("PARAMETERS", {"port": "params"}),
|
|
916
|
+
("SYMBOLS", {"port": "symbols"}),
|
|
917
|
+
], attrs={"bgcolor": "gray95"})
|
|
918
|
+
|
|
919
|
+
footer_id = f"{query_id}__footer"
|
|
920
|
+
gr_sub.node(footer_id, str(table), labelloc="c")
|
|
921
|
+
|
|
922
|
+
# Put footer to the bottom and, most times, to the center
|
|
923
|
+
for node_id in query.leaves:
|
|
924
|
+
gr_sub.edge(_get_node_unique_id(node_id, query), footer_id, style="invis")
|
|
925
|
+
|
|
926
|
+
if query.params:
|
|
927
|
+
gr_sub.node(
|
|
928
|
+
f"{query_id}__params",
|
|
929
|
+
str(GVTable().row([[
|
|
930
|
+
f"{html.escape(k)}" + (f" = {html.escape(v)}" if v else "") for k, v in query.params.items()
|
|
931
|
+
]]))
|
|
932
|
+
)
|
|
933
|
+
|
|
934
|
+
gr_sub.edge(
|
|
935
|
+
f"{footer_id}:params", f"{query_id}__params",
|
|
936
|
+
style="dashed", constraint="true",
|
|
937
|
+
)
|
|
938
|
+
|
|
939
|
+
if query.symbols:
|
|
940
|
+
build_symbols(query.symbols, gr, gr_sub, graphs, f"{query_id}__footer", reverse=True)
|
|
941
|
+
|
|
942
|
+
for node_id, node in query.nodes.items():
|
|
943
|
+
node_unique_id = _get_node_unique_id(node, query)
|
|
944
|
+
gr_sub.node(node_unique_id, build_node(graphs, node, line_limit), group=query_name)
|
|
945
|
+
|
|
946
|
+
for sink in node.sinks:
|
|
947
|
+
if "OUT" in node.labels:
|
|
948
|
+
output_port = ":out"
|
|
949
|
+
else:
|
|
950
|
+
output_port = ""
|
|
951
|
+
|
|
952
|
+
if isinstance(node.ep, IfElseEP):
|
|
953
|
+
if sink in node.ep.else_nodes:
|
|
954
|
+
output_port = ":else"
|
|
955
|
+
else:
|
|
956
|
+
output_port = ":if"
|
|
957
|
+
|
|
958
|
+
sink_node = query.nodes[sink]
|
|
959
|
+
if "IN" in sink_node.labels:
|
|
960
|
+
sink_port = ":in"
|
|
961
|
+
else:
|
|
962
|
+
sink_port = ""
|
|
963
|
+
|
|
964
|
+
gr_sub.edge(
|
|
965
|
+
f"{node_unique_id}{output_port}", f"{_get_node_unique_id(sink_node, query)}{sink_port}",
|
|
966
|
+
)
|
|
967
|
+
|
|
968
|
+
for param_name, param_value in node.ep.kwargs.items():
|
|
969
|
+
if isinstance(param_value[1], NestedQuery):
|
|
970
|
+
nested_cluster = _get_nested_query(param_value[1], graph, graphs)
|
|
971
|
+
if not nested_cluster:
|
|
972
|
+
continue
|
|
973
|
+
|
|
974
|
+
gr_root.edge(
|
|
975
|
+
f"{node_unique_id}:{param_name}",
|
|
976
|
+
_get_node_unique_id(nested_cluster.roots[0], nested_cluster),
|
|
977
|
+
lhead=nested_cluster.get_id(),
|
|
978
|
+
style="dashed", dir="both",
|
|
979
|
+
)
|
|
980
|
+
|
|
981
|
+
if node.symbols:
|
|
982
|
+
build_symbols(node.symbols, gr, gr_sub, graphs, node_unique_id)
|
|
983
|
+
|
|
984
|
+
if isinstance(node.ep, NestedQuery):
|
|
985
|
+
nested_cluster = _get_nested_query(node.ep, graph, graphs)
|
|
986
|
+
if not nested_cluster:
|
|
987
|
+
continue
|
|
988
|
+
|
|
989
|
+
gr_root.edge(
|
|
990
|
+
node_unique_id,
|
|
991
|
+
_get_node_unique_id(nested_cluster.roots[0], nested_cluster),
|
|
992
|
+
lhead=nested_cluster.get_id(),
|
|
993
|
+
style="dashed", dir="both",
|
|
994
|
+
)
|
|
995
|
+
|
|
996
|
+
|
|
997
|
+
def render_otq(
|
|
998
|
+
path: Union[str, List[str]],
|
|
999
|
+
image_path: Optional[str] = None,
|
|
1000
|
+
output_format: Optional[str] = None,
|
|
1001
|
+
load_external_otqs: bool = True,
|
|
1002
|
+
view: bool = False,
|
|
1003
|
+
line_limit: Optional[Tuple[int, int]] = (10, 30),
|
|
1004
|
+
parse_eval_from_params: bool = False,
|
|
1005
|
+
debug: bool = False,
|
|
1006
|
+
) -> str:
|
|
1007
|
+
"""
|
|
1008
|
+
Render queries from .otq files.
|
|
1009
|
+
|
|
1010
|
+
Parameters
|
|
1011
|
+
----------
|
|
1012
|
+
path: str, List[str]
|
|
1013
|
+
Path to .otq file or list of paths to multiple .otq files.
|
|
1014
|
+
Needed to render query could be specified with the next format: `path_to_otq::query_name`
|
|
1015
|
+
image_path: str, None
|
|
1016
|
+
Path for generated image. If omitted, image will be saved in a temp dir
|
|
1017
|
+
output_format: str, None
|
|
1018
|
+
`Graphviz` rendering format. Default: `png`.
|
|
1019
|
+
If `image_path` contains one of next extensions, `output_format` will be set automatically: `png`, `svg`, `dot`.
|
|
1020
|
+
load_external_otqs: bool
|
|
1021
|
+
If set to `True` (default) dependencies from external .otq files (not listed in `path` param)
|
|
1022
|
+
will be loaded automatically.
|
|
1023
|
+
view: bool
|
|
1024
|
+
Defines should generated image be showed after render.
|
|
1025
|
+
line_limit: Tuple[int, int], None
|
|
1026
|
+
Limit for maximum number of lines and length of some EP parameters strings.
|
|
1027
|
+
First param is limit of lines, second - limit of characters in each line.
|
|
1028
|
+
If set to None limit disabled.
|
|
1029
|
+
If one of tuple values set to zero the corresponding limit disabled.
|
|
1030
|
+
parse_eval_from_params: bool
|
|
1031
|
+
Enable parsing and printing `eval` sub-queries from EP parameters.
|
|
1032
|
+
debug: bool
|
|
1033
|
+
Allow to print stdout or stderr from `Grapthviz` render.
|
|
1034
|
+
|
|
1035
|
+
Examples
|
|
1036
|
+
--------
|
|
1037
|
+
|
|
1038
|
+
Render single file:
|
|
1039
|
+
|
|
1040
|
+
>>> otp.utils.render_otq("./test.otq") # doctest: +SKIP
|
|
1041
|
+
|
|
1042
|
+
Render multiple files:
|
|
1043
|
+
|
|
1044
|
+
>>> otp.utils.render_otq(["./first.otq", "./second.otq"]) # doctest: +SKIP
|
|
1045
|
+
|
|
1046
|
+
Render specific queries from multiple files:
|
|
1047
|
+
|
|
1048
|
+
>>> otp.utils.render_otq(["./first.otq", "./second.otq::some_query"]) # doctest: +SKIP
|
|
1049
|
+
"""
|
|
1050
|
+
if not isinstance(path, list):
|
|
1051
|
+
path = [path]
|
|
1052
|
+
|
|
1053
|
+
path = [_posix_path(p) for p in path]
|
|
1054
|
+
|
|
1055
|
+
graphs = GraphStorage()
|
|
1056
|
+
|
|
1057
|
+
queries_to_render: Dict[str, Set[str]] = defaultdict(set)
|
|
1058
|
+
path_files: List[str] = []
|
|
1059
|
+
|
|
1060
|
+
for otq_path in path:
|
|
1061
|
+
query_file, query_name = _parse_query_path(otq_path)
|
|
1062
|
+
|
|
1063
|
+
path_files.append(query_file)
|
|
1064
|
+
|
|
1065
|
+
if queries_to_render[query_file] == {"*"}:
|
|
1066
|
+
continue
|
|
1067
|
+
|
|
1068
|
+
if query_name:
|
|
1069
|
+
queries_to_render[query_file].add(query_name)
|
|
1070
|
+
else:
|
|
1071
|
+
queries_to_render[query_file] = {"*"}
|
|
1072
|
+
|
|
1073
|
+
otq_files: Deque[str] = deque(path_files)
|
|
1074
|
+
|
|
1075
|
+
while otq_files:
|
|
1076
|
+
otq_path = otq_files.popleft()
|
|
1077
|
+
|
|
1078
|
+
graph = read_otq(otq_path, parse_eval_from_params=parse_eval_from_params)
|
|
1079
|
+
|
|
1080
|
+
if not graph:
|
|
1081
|
+
continue
|
|
1082
|
+
|
|
1083
|
+
graphs[otq_path] = graph
|
|
1084
|
+
|
|
1085
|
+
for graph_query in graph.queries.values():
|
|
1086
|
+
for dep_file, dep_query in graph_query.depends:
|
|
1087
|
+
if dep_file is None:
|
|
1088
|
+
dep_file = otq_path
|
|
1089
|
+
|
|
1090
|
+
if dep_file not in graphs and load_external_otqs:
|
|
1091
|
+
otq_files.append(dep_file)
|
|
1092
|
+
|
|
1093
|
+
if queries_to_render[dep_file] == {"*"}:
|
|
1094
|
+
continue
|
|
1095
|
+
|
|
1096
|
+
if (
|
|
1097
|
+
load_external_otqs and dep_file != otq_path or
|
|
1098
|
+
dep_file == otq_path and graph_query.name in queries_to_render[otq_path]
|
|
1099
|
+
):
|
|
1100
|
+
if dep_query:
|
|
1101
|
+
queries_to_render[dep_file].add(dep_query)
|
|
1102
|
+
else:
|
|
1103
|
+
queries_to_render[dep_file] = {"*"}
|
|
1104
|
+
|
|
1105
|
+
if image_path:
|
|
1106
|
+
extension = Path(image_path).suffix
|
|
1107
|
+
|
|
1108
|
+
if extension:
|
|
1109
|
+
extension = extension[1:]
|
|
1110
|
+
|
|
1111
|
+
if extension == output_format or output_format is None and extension in {"png", "svg", "dot"}:
|
|
1112
|
+
image_path = str(Path(image_path).with_suffix(""))
|
|
1113
|
+
|
|
1114
|
+
if output_format is None:
|
|
1115
|
+
output_format = extension
|
|
1116
|
+
|
|
1117
|
+
if not output_format:
|
|
1118
|
+
output_format = "png"
|
|
1119
|
+
|
|
1120
|
+
if not image_path:
|
|
1121
|
+
image_path = TmpFile().path
|
|
1122
|
+
|
|
1123
|
+
gr = gv.Digraph(format=output_format, filename=image_path, engine="dot")
|
|
1124
|
+
gr.attr("graph", compound="true")
|
|
1125
|
+
gr.attr("node", shape="plaintext", margin="0")
|
|
1126
|
+
|
|
1127
|
+
idx = 0
|
|
1128
|
+
for otq_path, graph in graphs.items():
|
|
1129
|
+
if not queries_to_render[otq_path]:
|
|
1130
|
+
continue
|
|
1131
|
+
|
|
1132
|
+
with gr.subgraph(name=f"cluster__graph__{idx}", node_attr={"shape": "plaintext"}) as gr_otq:
|
|
1133
|
+
gr_otq.attr(label=otq_path)
|
|
1134
|
+
gr_otq.attr(margin="16")
|
|
1135
|
+
_render_graph(gr, gr_otq, graphs, otq_path, queries_to_render[otq_path], line_limit)
|
|
1136
|
+
|
|
1137
|
+
idx += 1
|
|
1138
|
+
|
|
1139
|
+
return gr.render(view=view, quiet=not debug)
|