onetick-py 1.177.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- locator_parser/__init__.py +0 -0
- locator_parser/acl.py +73 -0
- locator_parser/actions.py +262 -0
- locator_parser/common.py +368 -0
- locator_parser/io.py +43 -0
- locator_parser/locator.py +150 -0
- onetick/__init__.py +101 -0
- onetick/doc_utilities/__init__.py +3 -0
- onetick/doc_utilities/napoleon.py +40 -0
- onetick/doc_utilities/ot_doctest.py +140 -0
- onetick/doc_utilities/snippets.py +279 -0
- onetick/lib/__init__.py +4 -0
- onetick/lib/instance.py +141 -0
- onetick/py/__init__.py +293 -0
- onetick/py/_stack_info.py +89 -0
- onetick/py/_version.py +2 -0
- onetick/py/aggregations/__init__.py +11 -0
- onetick/py/aggregations/_base.py +648 -0
- onetick/py/aggregations/_docs.py +948 -0
- onetick/py/aggregations/compute.py +286 -0
- onetick/py/aggregations/functions.py +2216 -0
- onetick/py/aggregations/generic.py +104 -0
- onetick/py/aggregations/high_low.py +80 -0
- onetick/py/aggregations/num_distinct.py +83 -0
- onetick/py/aggregations/order_book.py +501 -0
- onetick/py/aggregations/other.py +1014 -0
- onetick/py/backports.py +26 -0
- onetick/py/cache.py +374 -0
- onetick/py/callback/__init__.py +5 -0
- onetick/py/callback/callback.py +276 -0
- onetick/py/callback/callbacks.py +131 -0
- onetick/py/compatibility.py +798 -0
- onetick/py/configuration.py +771 -0
- onetick/py/core/__init__.py +0 -0
- onetick/py/core/_csv_inspector.py +93 -0
- onetick/py/core/_internal/__init__.py +0 -0
- onetick/py/core/_internal/_manually_bound_value.py +6 -0
- onetick/py/core/_internal/_nodes_history.py +250 -0
- onetick/py/core/_internal/_op_utils/__init__.py +0 -0
- onetick/py/core/_internal/_op_utils/every_operand.py +9 -0
- onetick/py/core/_internal/_op_utils/is_const.py +10 -0
- onetick/py/core/_internal/_per_tick_scripts/tick_list_sort_template.script +121 -0
- onetick/py/core/_internal/_proxy_node.py +140 -0
- onetick/py/core/_internal/_state_objects.py +2312 -0
- onetick/py/core/_internal/_state_vars.py +93 -0
- onetick/py/core/_source/__init__.py +0 -0
- onetick/py/core/_source/_symbol_param.py +95 -0
- onetick/py/core/_source/schema.py +97 -0
- onetick/py/core/_source/source_methods/__init__.py +0 -0
- onetick/py/core/_source/source_methods/aggregations.py +809 -0
- onetick/py/core/_source/source_methods/applyers.py +296 -0
- onetick/py/core/_source/source_methods/columns.py +141 -0
- onetick/py/core/_source/source_methods/data_quality.py +301 -0
- onetick/py/core/_source/source_methods/debugs.py +272 -0
- onetick/py/core/_source/source_methods/drops.py +120 -0
- onetick/py/core/_source/source_methods/fields.py +619 -0
- onetick/py/core/_source/source_methods/filters.py +1002 -0
- onetick/py/core/_source/source_methods/joins.py +1413 -0
- onetick/py/core/_source/source_methods/merges.py +605 -0
- onetick/py/core/_source/source_methods/misc.py +1455 -0
- onetick/py/core/_source/source_methods/pandases.py +155 -0
- onetick/py/core/_source/source_methods/renames.py +356 -0
- onetick/py/core/_source/source_methods/sorts.py +183 -0
- onetick/py/core/_source/source_methods/switches.py +142 -0
- onetick/py/core/_source/source_methods/symbols.py +117 -0
- onetick/py/core/_source/source_methods/times.py +627 -0
- onetick/py/core/_source/source_methods/writes.py +986 -0
- onetick/py/core/_source/symbol.py +205 -0
- onetick/py/core/_source/tmp_otq.py +222 -0
- onetick/py/core/column.py +209 -0
- onetick/py/core/column_operations/__init__.py +0 -0
- onetick/py/core/column_operations/_methods/__init__.py +4 -0
- onetick/py/core/column_operations/_methods/_internal.py +28 -0
- onetick/py/core/column_operations/_methods/conversions.py +216 -0
- onetick/py/core/column_operations/_methods/methods.py +292 -0
- onetick/py/core/column_operations/_methods/op_types.py +160 -0
- onetick/py/core/column_operations/accessors/__init__.py +0 -0
- onetick/py/core/column_operations/accessors/_accessor.py +28 -0
- onetick/py/core/column_operations/accessors/decimal_accessor.py +104 -0
- onetick/py/core/column_operations/accessors/dt_accessor.py +537 -0
- onetick/py/core/column_operations/accessors/float_accessor.py +184 -0
- onetick/py/core/column_operations/accessors/str_accessor.py +1367 -0
- onetick/py/core/column_operations/base.py +1121 -0
- onetick/py/core/cut_builder.py +150 -0
- onetick/py/core/db_constants.py +20 -0
- onetick/py/core/eval_query.py +245 -0
- onetick/py/core/lambda_object.py +441 -0
- onetick/py/core/multi_output_source.py +232 -0
- onetick/py/core/per_tick_script.py +2256 -0
- onetick/py/core/query_inspector.py +464 -0
- onetick/py/core/source.py +1744 -0
- onetick/py/db/__init__.py +2 -0
- onetick/py/db/_inspection.py +1128 -0
- onetick/py/db/db.py +1327 -0
- onetick/py/db/utils.py +64 -0
- onetick/py/docs/__init__.py +0 -0
- onetick/py/docs/docstring_parser.py +112 -0
- onetick/py/docs/utils.py +81 -0
- onetick/py/functions.py +2398 -0
- onetick/py/license.py +190 -0
- onetick/py/log.py +88 -0
- onetick/py/math.py +935 -0
- onetick/py/misc.py +470 -0
- onetick/py/oqd/__init__.py +22 -0
- onetick/py/oqd/eps.py +1195 -0
- onetick/py/oqd/sources.py +325 -0
- onetick/py/otq.py +216 -0
- onetick/py/pyomd_mock.py +47 -0
- onetick/py/run.py +916 -0
- onetick/py/servers.py +173 -0
- onetick/py/session.py +1347 -0
- onetick/py/sources/__init__.py +19 -0
- onetick/py/sources/cache.py +167 -0
- onetick/py/sources/common.py +128 -0
- onetick/py/sources/csv.py +642 -0
- onetick/py/sources/custom.py +85 -0
- onetick/py/sources/data_file.py +305 -0
- onetick/py/sources/data_source.py +1045 -0
- onetick/py/sources/empty.py +94 -0
- onetick/py/sources/odbc.py +337 -0
- onetick/py/sources/order_book.py +271 -0
- onetick/py/sources/parquet.py +168 -0
- onetick/py/sources/pit.py +191 -0
- onetick/py/sources/query.py +495 -0
- onetick/py/sources/snapshots.py +419 -0
- onetick/py/sources/split_query_output_by_symbol.py +198 -0
- onetick/py/sources/symbology_mapping.py +123 -0
- onetick/py/sources/symbols.py +374 -0
- onetick/py/sources/ticks.py +825 -0
- onetick/py/sql.py +70 -0
- onetick/py/state.py +251 -0
- onetick/py/types.py +2131 -0
- onetick/py/utils/__init__.py +70 -0
- onetick/py/utils/acl.py +93 -0
- onetick/py/utils/config.py +186 -0
- onetick/py/utils/default.py +49 -0
- onetick/py/utils/file.py +38 -0
- onetick/py/utils/helpers.py +76 -0
- onetick/py/utils/locator.py +94 -0
- onetick/py/utils/perf.py +498 -0
- onetick/py/utils/query.py +49 -0
- onetick/py/utils/render.py +1374 -0
- onetick/py/utils/script.py +244 -0
- onetick/py/utils/temp.py +471 -0
- onetick/py/utils/types.py +120 -0
- onetick/py/utils/tz.py +84 -0
- onetick_py-1.177.0.dist-info/METADATA +137 -0
- onetick_py-1.177.0.dist-info/RECORD +152 -0
- onetick_py-1.177.0.dist-info/WHEEL +5 -0
- onetick_py-1.177.0.dist-info/entry_points.txt +2 -0
- onetick_py-1.177.0.dist-info/licenses/LICENSE +21 -0
- onetick_py-1.177.0.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,1374 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import re
|
|
3
|
+
import html
|
|
4
|
+
import textwrap
|
|
5
|
+
import graphviz as gv
|
|
6
|
+
from collections import defaultdict, deque
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
from dataclasses import dataclass, field
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any, Deque, Dict, List, Optional, Set, Tuple, Union
|
|
11
|
+
|
|
12
|
+
from onetick.py.utils import TmpFile
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
EPS_WITH_QUERIES = {
|
|
16
|
+
"eval": (0, "expression"),
|
|
17
|
+
"eval_expression": (0, "expression"),
|
|
18
|
+
"join_with_query": (0, "otq_query"),
|
|
19
|
+
"nested_otq": (0, "otq_name"),
|
|
20
|
+
"join_with_collection_summary": (1, "otq_query"),
|
|
21
|
+
"modify_state_var_from_query": (1, "otq_query"),
|
|
22
|
+
"create_cache": (1, "otq_file_path"),
|
|
23
|
+
"read_cache": (4, "create_cache_query"),
|
|
24
|
+
"generic_aggregation": (0, "query_name"),
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
IF_ELSE_EPS = {
|
|
28
|
+
"CHARACTER_PRESENT", "CORRECT_TICK_FILTER", "PRIMARY_EXCH", "REGEX_MATCHES", "SKIP_BAD_TICK", "TIME_FILTER",
|
|
29
|
+
"TRD_VS_MID", "TRD_VS_QUOTE", "UPTICK", "VALUE_COMPARE", "VALUE_PRESENT", "VOLUME_LIMIT", "WHERE_CLAUSE",
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _parse_table_fields(line: str) -> list:
|
|
34
|
+
result = line.strip().split(',')
|
|
35
|
+
for idx in range(0, len(result) - 1):
|
|
36
|
+
result[idx] = result[idx] + ','
|
|
37
|
+
|
|
38
|
+
return result
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _light_function_splitter(line: str, sep=',') -> list:
|
|
42
|
+
lines = []
|
|
43
|
+
current_line: list = []
|
|
44
|
+
parentheses_stack = 0
|
|
45
|
+
quotes_stack = 0
|
|
46
|
+
lead_quote_type = None
|
|
47
|
+
|
|
48
|
+
for ch in line:
|
|
49
|
+
if ch == sep and not parentheses_stack and not quotes_stack:
|
|
50
|
+
lines.append(''.join(current_line) + sep)
|
|
51
|
+
current_line = []
|
|
52
|
+
continue
|
|
53
|
+
|
|
54
|
+
current_line.append(ch)
|
|
55
|
+
|
|
56
|
+
if ch == '(' and not quotes_stack:
|
|
57
|
+
parentheses_stack += 1
|
|
58
|
+
continue
|
|
59
|
+
|
|
60
|
+
if ch == ')' and not quotes_stack:
|
|
61
|
+
parentheses_stack -= 1
|
|
62
|
+
if parentheses_stack < 0:
|
|
63
|
+
break
|
|
64
|
+
|
|
65
|
+
if ch in ["\"", "'"]:
|
|
66
|
+
if lead_quote_type is None:
|
|
67
|
+
lead_quote_type = ch
|
|
68
|
+
quotes_stack = 1
|
|
69
|
+
elif ch == lead_quote_type:
|
|
70
|
+
lead_quote_type = None
|
|
71
|
+
quotes_stack = 0
|
|
72
|
+
|
|
73
|
+
if parentheses_stack != 0:
|
|
74
|
+
raise ValueError(f'Incorrect parentheses count in function: `{line}`')
|
|
75
|
+
|
|
76
|
+
if quotes_stack != 0:
|
|
77
|
+
raise ValueError(f'Incorrect quotes count in function: `{line}`')
|
|
78
|
+
|
|
79
|
+
lines.append(''.join(current_line))
|
|
80
|
+
|
|
81
|
+
return lines
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
EP_TO_MULTILINE_ATTRS: dict = {
|
|
85
|
+
"ADD_FIELDS": {
|
|
86
|
+
"set": _light_function_splitter,
|
|
87
|
+
},
|
|
88
|
+
"UPDATE_FIELDS": {
|
|
89
|
+
"set": _light_function_splitter,
|
|
90
|
+
},
|
|
91
|
+
"TABLE": {
|
|
92
|
+
"fields": _parse_table_fields,
|
|
93
|
+
},
|
|
94
|
+
"PASSTHROUGH": {
|
|
95
|
+
"fields": _parse_table_fields,
|
|
96
|
+
},
|
|
97
|
+
"COMPUTE": {
|
|
98
|
+
"compute": _light_function_splitter,
|
|
99
|
+
},
|
|
100
|
+
"DECLARE_STATE_VARIABLES": {
|
|
101
|
+
"variables": _light_function_splitter,
|
|
102
|
+
},
|
|
103
|
+
"RENAME_FIELDS": {
|
|
104
|
+
"rename_fields": _parse_table_fields,
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@dataclass
|
|
110
|
+
class NestedQuery:
|
|
111
|
+
name: str
|
|
112
|
+
raw_string: str
|
|
113
|
+
query: Optional[str] = field(default=None)
|
|
114
|
+
expression: Optional[str] = field(default=None)
|
|
115
|
+
file_path: Optional[str] = field(default=None)
|
|
116
|
+
args: list = field(default_factory=list)
|
|
117
|
+
kwargs: dict = field(default_factory=dict)
|
|
118
|
+
is_local: bool = field(default=False)
|
|
119
|
+
|
|
120
|
+
def to_string(self):
|
|
121
|
+
if self.is_local:
|
|
122
|
+
if self.file_path:
|
|
123
|
+
raise ValueError("Nested query from file couldn't be local")
|
|
124
|
+
|
|
125
|
+
if self.expression:
|
|
126
|
+
return self.expression
|
|
127
|
+
else:
|
|
128
|
+
return self.query
|
|
129
|
+
else:
|
|
130
|
+
return "::".join(i for i in [self.file_path, self.query] if i)
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
@dataclass
|
|
134
|
+
class Config:
|
|
135
|
+
height: int = field(default=0)
|
|
136
|
+
width: int = field(default=0)
|
|
137
|
+
render_debug_info: bool = field(default=False)
|
|
138
|
+
constraint_edges: str = field(default="true")
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
@dataclass
|
|
142
|
+
class EP:
|
|
143
|
+
name: str
|
|
144
|
+
raw_string: str
|
|
145
|
+
args: list = field(default_factory=list)
|
|
146
|
+
kwargs: dict = field(default_factory=dict)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
@dataclass
|
|
150
|
+
class IfElseEP(EP):
|
|
151
|
+
if_nodes: Set[str] = field(default_factory=set)
|
|
152
|
+
else_nodes: Set[str] = field(default_factory=set)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
@dataclass
|
|
156
|
+
class Node:
|
|
157
|
+
ep: Union[EP, NestedQuery, None]
|
|
158
|
+
id: str
|
|
159
|
+
query: str
|
|
160
|
+
tick_type: Optional[str] = field(default=None)
|
|
161
|
+
labels: Dict[str, str] = field(default_factory=dict)
|
|
162
|
+
config: dict = field(default_factory=dict)
|
|
163
|
+
params: dict = field(default_factory=dict)
|
|
164
|
+
sinks: List[str] = field(default_factory=list)
|
|
165
|
+
symbols: list = field(default_factory=list)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@dataclass
|
|
169
|
+
class Query:
|
|
170
|
+
name: str
|
|
171
|
+
graph: str
|
|
172
|
+
nodes: Dict[str, Node] = field(default_factory=dict)
|
|
173
|
+
roots: list = field(default_factory=list)
|
|
174
|
+
leaves: list = field(default_factory=list)
|
|
175
|
+
symbols: list = field(default_factory=list)
|
|
176
|
+
config: dict = field(default_factory=dict)
|
|
177
|
+
params: dict = field(default_factory=dict)
|
|
178
|
+
depends: Set[Tuple[Optional[str], Optional[str]]] = field(default_factory=set)
|
|
179
|
+
|
|
180
|
+
def get_id(self, prefix: Optional[str] = "cluster"):
|
|
181
|
+
if prefix:
|
|
182
|
+
prefix = f"{prefix}__"
|
|
183
|
+
else:
|
|
184
|
+
prefix = ""
|
|
185
|
+
|
|
186
|
+
graph = self.graph.replace(":", "_")
|
|
187
|
+
|
|
188
|
+
return f"{prefix}{graph}__{self.name}"
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
@dataclass
|
|
192
|
+
class Graph:
|
|
193
|
+
file_path: str
|
|
194
|
+
config: dict = field(default_factory=dict)
|
|
195
|
+
queries: Dict[str, Query] = field(default_factory=dict)
|
|
196
|
+
|
|
197
|
+
def has_query(self, query):
|
|
198
|
+
return query in self.queries
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
class GraphStorage(dict):
|
|
202
|
+
def get_query(self, otq_file: Optional[str], query_name: Optional[str]) -> Optional[Query]:
|
|
203
|
+
if not query_name or not otq_file:
|
|
204
|
+
return None
|
|
205
|
+
|
|
206
|
+
if otq_file not in self or query_name not in self[otq_file].queries:
|
|
207
|
+
return None
|
|
208
|
+
|
|
209
|
+
return self[otq_file].queries[query_name]
|
|
210
|
+
|
|
211
|
+
def get_query_unique_id(self, query: Union[str, Query, NestedQuery], graph: Optional[str] = None) -> Optional[str]:
|
|
212
|
+
query_obj = None
|
|
213
|
+
if isinstance(query, Query):
|
|
214
|
+
query_obj = query
|
|
215
|
+
elif isinstance(query, NestedQuery):
|
|
216
|
+
if not query.is_local or query.expression:
|
|
217
|
+
raise RuntimeError("Couldn't get id for non-local or expression-based NestedQuery")
|
|
218
|
+
|
|
219
|
+
query_obj = self.get_query(query.file_path, query.query)
|
|
220
|
+
elif isinstance(query, str):
|
|
221
|
+
if not graph:
|
|
222
|
+
raise ValueError("`graph` with path to otq file is required for `str` query")
|
|
223
|
+
|
|
224
|
+
query_obj = self.get_query(graph, query)
|
|
225
|
+
else:
|
|
226
|
+
raise RuntimeError(f"Unsupported query type: {type(query)}")
|
|
227
|
+
|
|
228
|
+
if not query_obj:
|
|
229
|
+
return None
|
|
230
|
+
else:
|
|
231
|
+
return query_obj.get_id()
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
class GVTable:
|
|
235
|
+
def __init__(self, border=0, cellborder=1, cellspacing=0, attrs: Optional[dict] = None, auto_colspan=True):
|
|
236
|
+
"""
|
|
237
|
+
Generate HTML tables for Graphviz
|
|
238
|
+
|
|
239
|
+
Attributes for each row or cell can be set by passing `attrs` attribute to `GVTable.row` or `GVTable.cell`.
|
|
240
|
+
Accoringly `attrs` params have reverse prioriry: higher for cells, less for rows.
|
|
241
|
+
|
|
242
|
+
Parameters
|
|
243
|
+
----------
|
|
244
|
+
border: int
|
|
245
|
+
Value of `BORDER` attribute of table HTML element
|
|
246
|
+
cellborder: int
|
|
247
|
+
Value of `CELLBORDER` attribute of table HTML element
|
|
248
|
+
cellspacing: int
|
|
249
|
+
Value of `CELLSPACING` attribute of table HTML element
|
|
250
|
+
attrs: dict
|
|
251
|
+
HTML attributes to apply to table element.
|
|
252
|
+
auto_colspan: bool
|
|
253
|
+
If set True, then last cell in each row automaticly fills table width,
|
|
254
|
+
if `colspan` attribute not set for this cell.
|
|
255
|
+
|
|
256
|
+
Examples
|
|
257
|
+
--------
|
|
258
|
+
|
|
259
|
+
Simple two rows table:
|
|
260
|
+
|
|
261
|
+
>>> table = otp.utils.render.GVTable()
|
|
262
|
+
>>> table.row(["cell_1_1", "cell_1_2"]) # doctest: +SKIP
|
|
263
|
+
>>> table.row(["cell_2_1", "cell_2_2"]) # doctest: +SKIP
|
|
264
|
+
>>> table_html = str(table) # doctest: +SKIP
|
|
265
|
+
"""
|
|
266
|
+
self.rows: List[Tuple[List[Tuple[Union[List[str], str], dict]], dict]] = []
|
|
267
|
+
self.attrs = {
|
|
268
|
+
"border": border,
|
|
269
|
+
"cellborder": cellborder,
|
|
270
|
+
"cellspacing": cellspacing,
|
|
271
|
+
}
|
|
272
|
+
if attrs:
|
|
273
|
+
self.attrs.update(attrs)
|
|
274
|
+
|
|
275
|
+
self.auto_colspan = auto_colspan
|
|
276
|
+
self.max_cols = 0
|
|
277
|
+
|
|
278
|
+
def cell(self, data: list, attrs: Optional[dict] = None):
|
|
279
|
+
"""
|
|
280
|
+
Append cell in the last row
|
|
281
|
+
"""
|
|
282
|
+
if len(self) == 0:
|
|
283
|
+
raise RuntimeError("No rows in table")
|
|
284
|
+
|
|
285
|
+
row, row_attrs = self.rows[-1]
|
|
286
|
+
|
|
287
|
+
for cell_data in data:
|
|
288
|
+
cell_attrs = row_attrs.copy()
|
|
289
|
+
|
|
290
|
+
if attrs:
|
|
291
|
+
cell_attrs.update(attrs)
|
|
292
|
+
|
|
293
|
+
if isinstance(cell_data, tuple):
|
|
294
|
+
cell, _cell_attrs = cell_data
|
|
295
|
+
if _cell_attrs:
|
|
296
|
+
cell_attrs.update(_cell_attrs)
|
|
297
|
+
else:
|
|
298
|
+
cell = cell_data
|
|
299
|
+
|
|
300
|
+
row.append((cell, cell_attrs))
|
|
301
|
+
|
|
302
|
+
self.max_cols = max(self.max_cols, len(row))
|
|
303
|
+
|
|
304
|
+
return self
|
|
305
|
+
|
|
306
|
+
def row(self, data: list, attrs: Optional[dict] = None):
|
|
307
|
+
self.rows.append(([], attrs if attrs else {}))
|
|
308
|
+
self.cell(data)
|
|
309
|
+
|
|
310
|
+
return self
|
|
311
|
+
|
|
312
|
+
def __len__(self):
|
|
313
|
+
return len(self.rows)
|
|
314
|
+
|
|
315
|
+
def __str__(self):
|
|
316
|
+
tags = []
|
|
317
|
+
|
|
318
|
+
attrs = " ".join([f"{k.upper()}=\"{v}\"" for k, v in self.attrs.items()])
|
|
319
|
+
tags.append(f"<TABLE {attrs}>")
|
|
320
|
+
|
|
321
|
+
for row, row_attrs in self.rows:
|
|
322
|
+
col_count = len(row)
|
|
323
|
+
tags.append("<TR>")
|
|
324
|
+
|
|
325
|
+
for i in range(col_count):
|
|
326
|
+
cell, cell_attrs = row[i]
|
|
327
|
+
|
|
328
|
+
if (
|
|
329
|
+
self.auto_colspan and col_count - 1 == i and
|
|
330
|
+
len(row) < self.max_cols and "colspan" not in cell_attrs
|
|
331
|
+
):
|
|
332
|
+
cell_attrs["colspan"] = self.max_cols - col_count + 1
|
|
333
|
+
|
|
334
|
+
attrs = " ".join([f"{k.upper()}=\"{v}\"" for k, v in cell_attrs.items()])
|
|
335
|
+
if not isinstance(cell, list):
|
|
336
|
+
cell = [cell]
|
|
337
|
+
|
|
338
|
+
cell_str = "<BR/>".join(cell)
|
|
339
|
+
tags.append(f"<TD {attrs}>{cell_str}</TD>")
|
|
340
|
+
|
|
341
|
+
tags.append("</TR>")
|
|
342
|
+
|
|
343
|
+
tags.append("</TABLE>")
|
|
344
|
+
|
|
345
|
+
return "<" + "".join(tags) + ">"
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
def _posix_path(path: str):
|
|
349
|
+
return path.replace(os.sep, "/")
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def _get_node_unique_id(node: Union[Node, str], query: Query):
|
|
353
|
+
if isinstance(node, str):
|
|
354
|
+
node = query.nodes[node]
|
|
355
|
+
|
|
356
|
+
return f"{query.get_id(prefix=None)}__{node.id}"
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def _save_param(storage, key, value):
|
|
360
|
+
if key in storage:
|
|
361
|
+
if not isinstance(storage[key], list):
|
|
362
|
+
storage[key] = [storage[key]]
|
|
363
|
+
|
|
364
|
+
storage[key].append(value)
|
|
365
|
+
else:
|
|
366
|
+
storage[key] = value
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def _is_local_query(f_path: Optional[str]) -> bool:
|
|
370
|
+
return f_path in ["THIS", "___ME___"]
|
|
371
|
+
|
|
372
|
+
|
|
373
|
+
def _parse_function_params(func_params: str) -> Tuple[list, dict]:
|
|
374
|
+
def save_param(_key, _value, _args, _kwargs):
|
|
375
|
+
if _key:
|
|
376
|
+
_kwargs[_key.lower()] = (_key, _value)
|
|
377
|
+
else:
|
|
378
|
+
if _kwargs:
|
|
379
|
+
raise RuntimeError("Positional argument could not be after keyword argument")
|
|
380
|
+
|
|
381
|
+
_args.append(_value)
|
|
382
|
+
|
|
383
|
+
args: list = []
|
|
384
|
+
kwargs: dict = {}
|
|
385
|
+
key = ""
|
|
386
|
+
str_buffer: list = []
|
|
387
|
+
in_quotes = None
|
|
388
|
+
escape_next = False
|
|
389
|
+
|
|
390
|
+
if not func_params:
|
|
391
|
+
return args, kwargs
|
|
392
|
+
|
|
393
|
+
for ch in func_params:
|
|
394
|
+
if escape_next:
|
|
395
|
+
escape_next = False
|
|
396
|
+
str_buffer.append(ch)
|
|
397
|
+
elif ch == "\\":
|
|
398
|
+
escape_next = True
|
|
399
|
+
elif in_quotes:
|
|
400
|
+
if ch == in_quotes:
|
|
401
|
+
in_quotes = None
|
|
402
|
+
else:
|
|
403
|
+
str_buffer.append(ch)
|
|
404
|
+
else:
|
|
405
|
+
if ch in "\"\'":
|
|
406
|
+
in_quotes = ch
|
|
407
|
+
elif ch.isspace():
|
|
408
|
+
continue
|
|
409
|
+
elif ch == "=":
|
|
410
|
+
key = "".join(str_buffer)
|
|
411
|
+
str_buffer.clear()
|
|
412
|
+
elif ch == ",":
|
|
413
|
+
save_param(key, "".join(str_buffer), args, kwargs)
|
|
414
|
+
str_buffer.clear()
|
|
415
|
+
key = ""
|
|
416
|
+
else:
|
|
417
|
+
str_buffer.append(ch)
|
|
418
|
+
|
|
419
|
+
if in_quotes:
|
|
420
|
+
raise ValueError("`func_params` unclosed quote")
|
|
421
|
+
|
|
422
|
+
if str_buffer:
|
|
423
|
+
save_param(key, "".join(str_buffer), args, kwargs)
|
|
424
|
+
str_buffer.clear()
|
|
425
|
+
|
|
426
|
+
return args, kwargs
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
def _parse_function(expression: str, pattern: Optional[str] = None) -> Tuple[Optional[str], list, dict]:
|
|
430
|
+
# EP_NAME(PARAM_NAME=PARAM_VALUE,...)
|
|
431
|
+
# [a-zA-Z_:] is EP_NAME, can contain letters, underscore and colon
|
|
432
|
+
# [\s\S] is any symbol including newline (because . doesn't include newline by default)
|
|
433
|
+
if not pattern:
|
|
434
|
+
pattern = r"^([a-zA-Z_:]*)\s*\(([\s\S]*)\)\s*$"
|
|
435
|
+
|
|
436
|
+
m = re.search(pattern, expression)
|
|
437
|
+
|
|
438
|
+
if not m:
|
|
439
|
+
return None, [], {}
|
|
440
|
+
|
|
441
|
+
ep, params_str = m.groups()
|
|
442
|
+
args, kwargs = _parse_function_params(params_str)
|
|
443
|
+
|
|
444
|
+
return ep, args, kwargs
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
def _get_ep_from_str(ep_string: str) -> Tuple[str, list, dict]:
|
|
448
|
+
ep, args, kwargs = _parse_function(ep_string)
|
|
449
|
+
|
|
450
|
+
if not ep:
|
|
451
|
+
ep = ep_string
|
|
452
|
+
|
|
453
|
+
return ep, args, kwargs
|
|
454
|
+
|
|
455
|
+
|
|
456
|
+
def _parse_query_path(query_path: str) -> Union[Tuple[str, Optional[str]], List[str]]:
|
|
457
|
+
query_path_splitted = query_path.rsplit("::", maxsplit=1)
|
|
458
|
+
|
|
459
|
+
if len(query_path_splitted) == 1:
|
|
460
|
+
return _posix_path(query_path_splitted[0]), None
|
|
461
|
+
else:
|
|
462
|
+
file_path, query = query_path_splitted
|
|
463
|
+
|
|
464
|
+
return _posix_path(file_path), query
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def _parse_ep(ep_string: str, parse_eval_from_params: bool = False) -> Union[EP, NestedQuery]:
|
|
468
|
+
if ep_string.startswith("NESTED_OTQ"):
|
|
469
|
+
query_path = " ".join(ep_string.split(" ")[1:])
|
|
470
|
+
|
|
471
|
+
file_path, query = _parse_query_path(query_path)
|
|
472
|
+
is_local = _is_local_query(file_path)
|
|
473
|
+
|
|
474
|
+
return NestedQuery(
|
|
475
|
+
name="NESTED_OTQ", raw_string=ep_string, query=query, file_path=None if is_local else file_path,
|
|
476
|
+
is_local=is_local,
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
ep, args, kwargs = _get_ep_from_str(ep_string)
|
|
480
|
+
|
|
481
|
+
if parse_eval_from_params:
|
|
482
|
+
for param_name, param_value in kwargs.items():
|
|
483
|
+
m = re.search(r"^(eval\([^)]+\)).*$", param_value[1], re.IGNORECASE)
|
|
484
|
+
if not m:
|
|
485
|
+
continue
|
|
486
|
+
|
|
487
|
+
param_ep_str = m.group(1)
|
|
488
|
+
|
|
489
|
+
try:
|
|
490
|
+
param_ep = _parse_ep(param_ep_str, parse_eval_from_params=False)
|
|
491
|
+
if isinstance(param_ep, NestedQuery):
|
|
492
|
+
kwargs[param_name] = (param_value[0], param_ep)
|
|
493
|
+
except Exception:
|
|
494
|
+
pass
|
|
495
|
+
|
|
496
|
+
if ep.lower() in EPS_WITH_QUERIES:
|
|
497
|
+
ep_description = EPS_WITH_QUERIES[ep.lower()]
|
|
498
|
+
args_idx, kwargs_key = ep_description
|
|
499
|
+
|
|
500
|
+
is_query_found = True
|
|
501
|
+
|
|
502
|
+
if kwargs_key in kwargs:
|
|
503
|
+
query_path = kwargs[kwargs_key][1]
|
|
504
|
+
elif 0 <= args_idx < len(args):
|
|
505
|
+
query_path = args[args_idx]
|
|
506
|
+
else:
|
|
507
|
+
# don't do anything, just process as EP
|
|
508
|
+
is_query_found = False
|
|
509
|
+
|
|
510
|
+
if is_query_found:
|
|
511
|
+
if query_path[0] in ["\"", "\'"] and query_path[0] == query_path[-1]:
|
|
512
|
+
query_path = query_path[1:-1]
|
|
513
|
+
|
|
514
|
+
file_path, query = _parse_query_path(query_path)
|
|
515
|
+
|
|
516
|
+
if file_path and query:
|
|
517
|
+
is_local = _is_local_query(file_path)
|
|
518
|
+
return NestedQuery(
|
|
519
|
+
name=ep, raw_string=ep_string, query=query, file_path=None if is_local else file_path,
|
|
520
|
+
args=args, kwargs=kwargs, is_local=is_local,
|
|
521
|
+
)
|
|
522
|
+
else:
|
|
523
|
+
return NestedQuery(
|
|
524
|
+
name=ep, raw_string=ep_string, expression=file_path, args=args, kwargs=kwargs, is_local=True,
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
if ep in IF_ELSE_EPS:
|
|
528
|
+
return IfElseEP(name=ep, raw_string=ep_string, args=args, kwargs=kwargs)
|
|
529
|
+
|
|
530
|
+
return EP(name=ep, raw_string=ep_string, args=args, kwargs=kwargs)
|
|
531
|
+
|
|
532
|
+
|
|
533
|
+
def _parse_security(value: str) -> Tuple[Union[str, EP, NestedQuery], str, bool]:
|
|
534
|
+
is_security_active = True
|
|
535
|
+
split_value = value.split()
|
|
536
|
+
|
|
537
|
+
try:
|
|
538
|
+
int(split_value[-1])
|
|
539
|
+
except ValueError:
|
|
540
|
+
# assume that third value is "No"
|
|
541
|
+
is_security_active = False
|
|
542
|
+
split_value.pop()
|
|
543
|
+
|
|
544
|
+
security = " ".join(split_value[:-1])
|
|
545
|
+
|
|
546
|
+
try:
|
|
547
|
+
security_ep = _parse_ep(security)
|
|
548
|
+
except ValueError:
|
|
549
|
+
security_ep = None
|
|
550
|
+
|
|
551
|
+
return (security_ep if security_ep else security), split_value[-1], is_security_active
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
def _move_between_dicts(source, output, key, func):
|
|
555
|
+
if not isinstance(source[key], list):
|
|
556
|
+
source[key] = [source[key]]
|
|
557
|
+
|
|
558
|
+
output.update([func(k.split()) for k in source[key]])
|
|
559
|
+
del source[key]
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
def _move_parameters(_from, _to):
|
|
563
|
+
if "PARAMETER" in _from:
|
|
564
|
+
_move_between_dicts(
|
|
565
|
+
_from,
|
|
566
|
+
_to,
|
|
567
|
+
"PARAMETER",
|
|
568
|
+
lambda x: (x[0], " ".join(x[1:]))
|
|
569
|
+
)
|
|
570
|
+
|
|
571
|
+
if "PARAMETER_MANDATORY" in _from:
|
|
572
|
+
_move_between_dicts(
|
|
573
|
+
_from,
|
|
574
|
+
_to,
|
|
575
|
+
"PARAMETER_MANDATORY",
|
|
576
|
+
lambda x: (x[0], None)
|
|
577
|
+
)
|
|
578
|
+
|
|
579
|
+
|
|
580
|
+
def _build_query_tree(query: Query):
|
|
581
|
+
roots = {*query.nodes.keys()}
|
|
582
|
+
leaves = {*query.nodes.keys()}
|
|
583
|
+
|
|
584
|
+
for node_id, node in query.nodes.items():
|
|
585
|
+
_move_parameters(node.config, node.params)
|
|
586
|
+
|
|
587
|
+
# save labels
|
|
588
|
+
if "NESTED_INPUT" in node.config:
|
|
589
|
+
node.labels["IN"] = node.config["NESTED_INPUT"]
|
|
590
|
+
|
|
591
|
+
if "NESTED_OUTPUT" in node.config:
|
|
592
|
+
node.labels["OUT"] = node.config["NESTED_OUTPUT"]
|
|
593
|
+
|
|
594
|
+
if "SOURCE_DESCRIPTION" in node.config:
|
|
595
|
+
descriptions = node.config["SOURCE_DESCRIPTION"]
|
|
596
|
+
if isinstance(descriptions, str):
|
|
597
|
+
descriptions = [descriptions]
|
|
598
|
+
for description in descriptions:
|
|
599
|
+
description = description.strip().split(" ")
|
|
600
|
+
if len(description) > 1:
|
|
601
|
+
desc_node = description[0].split(".")[0]
|
|
602
|
+
labels = description[1].split(".")
|
|
603
|
+
|
|
604
|
+
if labels and desc_node in query.nodes:
|
|
605
|
+
if labels[0]:
|
|
606
|
+
query.nodes[desc_node].labels["IN"] = labels[0]
|
|
607
|
+
|
|
608
|
+
if labels[1]:
|
|
609
|
+
query.nodes[desc_node].labels["OUT"] = labels[1]
|
|
610
|
+
|
|
611
|
+
if "SINK_DESCRIPTION" in node.config:
|
|
612
|
+
description_path = node.config["SINK_DESCRIPTION"].strip().split(".")
|
|
613
|
+
if len(description_path) > 1:
|
|
614
|
+
desc_node = description_path[0]
|
|
615
|
+
label = description_path[-1]
|
|
616
|
+
|
|
617
|
+
if label and desc_node in query.nodes:
|
|
618
|
+
query.nodes[desc_node].labels["OUT"] = label
|
|
619
|
+
|
|
620
|
+
nodes = []
|
|
621
|
+
|
|
622
|
+
if "SINK" in node.config:
|
|
623
|
+
sink_nodes = [(sink, True) for sink in node.config["SINK"].strip().split()]
|
|
624
|
+
if sink_nodes:
|
|
625
|
+
leaves.discard(node_id)
|
|
626
|
+
|
|
627
|
+
nodes += sink_nodes
|
|
628
|
+
del node.config["SINK"]
|
|
629
|
+
|
|
630
|
+
if "SOURCE" in node.config:
|
|
631
|
+
source_nodes = [(sink, False) for sink in node.config["SOURCE"].strip().split()]
|
|
632
|
+
if source_nodes:
|
|
633
|
+
roots.discard(node_id)
|
|
634
|
+
|
|
635
|
+
nodes += source_nodes
|
|
636
|
+
del node.config["SOURCE"]
|
|
637
|
+
|
|
638
|
+
for source_node, is_sink_node in nodes:
|
|
639
|
+
# just ignore other nodes in path
|
|
640
|
+
source_node_path = source_node.split(".")
|
|
641
|
+
source_node_id = source_node_path[0]
|
|
642
|
+
|
|
643
|
+
if not is_sink_node and source_node_id in leaves:
|
|
644
|
+
leaves.discard(source_node_id)
|
|
645
|
+
|
|
646
|
+
if is_sink_node and source_node_id in roots:
|
|
647
|
+
roots.discard(source_node_id)
|
|
648
|
+
|
|
649
|
+
if source_node_id not in query.nodes:
|
|
650
|
+
raise RuntimeError(f"Malformed otq file passed: node {source_node_id} not found in {query.name}")
|
|
651
|
+
|
|
652
|
+
if is_sink_node:
|
|
653
|
+
query.nodes[node_id].sinks.append(source_node_id)
|
|
654
|
+
|
|
655
|
+
if isinstance(node.ep, IfElseEP):
|
|
656
|
+
if "IF" in source_node_path[1:]:
|
|
657
|
+
node.ep.if_nodes.add(source_node_id)
|
|
658
|
+
|
|
659
|
+
if "ELSE" in source_node_path[1:]:
|
|
660
|
+
node.ep.else_nodes.add(source_node_id)
|
|
661
|
+
else:
|
|
662
|
+
source_node = query.nodes[source_node_id]
|
|
663
|
+
source_node.sinks.append(node_id)
|
|
664
|
+
|
|
665
|
+
if isinstance(source_node.ep, IfElseEP):
|
|
666
|
+
if "IF" in source_node_path[1:]:
|
|
667
|
+
source_node.ep.if_nodes.add(node_id)
|
|
668
|
+
|
|
669
|
+
if "ELSE" in source_node_path[1:]:
|
|
670
|
+
source_node.ep.else_nodes.add(node_id)
|
|
671
|
+
|
|
672
|
+
query.roots = list(roots)
|
|
673
|
+
query.leaves = list(leaves)
|
|
674
|
+
|
|
675
|
+
|
|
676
|
+
def _save_dependency(obj, query: Query):
|
|
677
|
+
if isinstance(obj, (EP, NestedQuery)):
|
|
678
|
+
for kwarg_param in obj.kwargs.values():
|
|
679
|
+
if isinstance(kwarg_param[1], NestedQuery):
|
|
680
|
+
_save_dependency(kwarg_param[1], query)
|
|
681
|
+
|
|
682
|
+
if isinstance(obj, NestedQuery) and not obj.expression:
|
|
683
|
+
query.depends.add((obj.file_path, obj.query))
|
|
684
|
+
|
|
685
|
+
|
|
686
|
+
def _finalize_query(query: Query, graph: Graph):
|
|
687
|
+
if not query:
|
|
688
|
+
return
|
|
689
|
+
|
|
690
|
+
if query.name == "_meta":
|
|
691
|
+
graph.config = {k.upper(): v for k, v in query.config.items()}
|
|
692
|
+
return
|
|
693
|
+
|
|
694
|
+
if "SECURITY" in query.config:
|
|
695
|
+
if not isinstance(query.config["SECURITY"], list):
|
|
696
|
+
query.config["SECURITY"] = [query.config["SECURITY"]]
|
|
697
|
+
|
|
698
|
+
for security in query.config["SECURITY"]:
|
|
699
|
+
parsed_security = _parse_security(security)
|
|
700
|
+
query.symbols.append(parsed_security)
|
|
701
|
+
_save_dependency(parsed_security[0], query)
|
|
702
|
+
|
|
703
|
+
del query.config["SECURITY"]
|
|
704
|
+
|
|
705
|
+
_move_parameters(query.config, query.params)
|
|
706
|
+
_build_query_tree(query)
|
|
707
|
+
|
|
708
|
+
graph.queries[query.name] = query
|
|
709
|
+
|
|
710
|
+
|
|
711
|
+
def read_otq(path: str, parse_eval_from_params: bool = False) -> Optional[Graph]:
|
|
712
|
+
if path.startswith("remote://") or not os.path.exists(path):
|
|
713
|
+
return None
|
|
714
|
+
|
|
715
|
+
graph = Graph(path)
|
|
716
|
+
current_query = None
|
|
717
|
+
|
|
718
|
+
with open(path, "r") as input_otq:
|
|
719
|
+
tmp_line = ""
|
|
720
|
+
for line in input_otq:
|
|
721
|
+
line = line.rstrip()
|
|
722
|
+
|
|
723
|
+
if line.endswith("\\"):
|
|
724
|
+
tmp_line += f"{line[:-1]}\n"
|
|
725
|
+
continue
|
|
726
|
+
else:
|
|
727
|
+
tmp_line += line
|
|
728
|
+
|
|
729
|
+
line = tmp_line.strip()
|
|
730
|
+
tmp_line = ""
|
|
731
|
+
|
|
732
|
+
if not line:
|
|
733
|
+
continue
|
|
734
|
+
|
|
735
|
+
# found new query
|
|
736
|
+
m = re.search(r"^\[(.*)\]$", line)
|
|
737
|
+
if m:
|
|
738
|
+
if current_query and current_query.config['TYPE'] == 'GRAPH':
|
|
739
|
+
_finalize_query(current_query, graph)
|
|
740
|
+
|
|
741
|
+
query_name = m.groups()[0]
|
|
742
|
+
current_query = Query(name=query_name, graph=path)
|
|
743
|
+
continue
|
|
744
|
+
|
|
745
|
+
if not current_query:
|
|
746
|
+
continue
|
|
747
|
+
|
|
748
|
+
if line.startswith("NODE"):
|
|
749
|
+
prefix = r"NODE\D*?(\d+)"
|
|
750
|
+
elif line.startswith("ROOT"):
|
|
751
|
+
prefix = r"ROOT\D*?(\d*)"
|
|
752
|
+
else:
|
|
753
|
+
# other query param
|
|
754
|
+
line_expr = line.split("=")
|
|
755
|
+
param, value = line_expr[0], "=".join(line_expr[1:])
|
|
756
|
+
param = param.strip()
|
|
757
|
+
value = value.strip()
|
|
758
|
+
|
|
759
|
+
_save_param(current_query.config, param, value)
|
|
760
|
+
continue
|
|
761
|
+
|
|
762
|
+
m = re.search(rf"^({prefix})(_([a-zA-Z_]*[0-9]*))?\s*=\s*([\s\S]*)$", line)
|
|
763
|
+
if m:
|
|
764
|
+
node_id, _, _, node_param, value = m.groups()
|
|
765
|
+
|
|
766
|
+
if node_id not in current_query.nodes:
|
|
767
|
+
current_query.nodes[node_id] = Node(ep=None, id=node_id, query=current_query.name)
|
|
768
|
+
|
|
769
|
+
if not node_param:
|
|
770
|
+
ep = _parse_ep(value, parse_eval_from_params=parse_eval_from_params)
|
|
771
|
+
_save_dependency(ep, current_query)
|
|
772
|
+
current_query.nodes[node_id].ep = ep
|
|
773
|
+
elif node_param == "BIND_SECURITY":
|
|
774
|
+
security = _parse_security(value)
|
|
775
|
+
current_query.nodes[node_id].symbols.append(security)
|
|
776
|
+
_save_dependency(security[0], current_query)
|
|
777
|
+
elif node_param == "TICK_TYPE":
|
|
778
|
+
current_query.nodes[node_id].tick_type = value
|
|
779
|
+
else:
|
|
780
|
+
_save_param(current_query.nodes[node_id].config, node_param, value)
|
|
781
|
+
|
|
782
|
+
if current_query:
|
|
783
|
+
_finalize_query(current_query, graph)
|
|
784
|
+
|
|
785
|
+
return graph
|
|
786
|
+
|
|
787
|
+
|
|
788
|
+
def _truncate_param_value(value, height, width):
|
|
789
|
+
lines = [
|
|
790
|
+
line if len(line) <= width or not width else line[:width] + "..."
|
|
791
|
+
for line in value.splitlines()
|
|
792
|
+
]
|
|
793
|
+
|
|
794
|
+
if height and len(lines) > height:
|
|
795
|
+
lines = lines[:height] + ["..."]
|
|
796
|
+
|
|
797
|
+
return "\n".join(lines)
|
|
798
|
+
|
|
799
|
+
|
|
800
|
+
def _split_long_value_to_lines(value, height, width, indent=0, escape=False) -> list:
|
|
801
|
+
if len(value) <= width:
|
|
802
|
+
return [value]
|
|
803
|
+
|
|
804
|
+
result = []
|
|
805
|
+
lines = value.splitlines()
|
|
806
|
+
|
|
807
|
+
# textwrap.wrap replaces newline character to whitespace and brakes multiline strings
|
|
808
|
+
# If replace_whitespace=False, it preserves newline, but not use it for result array line splitting
|
|
809
|
+
for line in lines:
|
|
810
|
+
result.extend(textwrap.wrap(line, width=width, replace_whitespace=False))
|
|
811
|
+
|
|
812
|
+
if escape:
|
|
813
|
+
result = [html.escape(s) for s in result]
|
|
814
|
+
|
|
815
|
+
if indent:
|
|
816
|
+
indent_str = " " * indent
|
|
817
|
+
for i in range(1, len(result)):
|
|
818
|
+
result[i] = indent_str + result[i]
|
|
819
|
+
|
|
820
|
+
if height and len(result) > height:
|
|
821
|
+
result = result[:height] + ['...']
|
|
822
|
+
return result
|
|
823
|
+
|
|
824
|
+
|
|
825
|
+
def transform_param_value(ep: Any, param, value, height, width):
|
|
826
|
+
if isinstance(ep, EP) and (
|
|
827
|
+
ep.name == "PER_TICK_SCRIPT" and param.lower() == "script" or
|
|
828
|
+
ep.name == "CSV_FILE_LISTING" and param.lower() == "file_contents"
|
|
829
|
+
):
|
|
830
|
+
return _truncate_param_value(value, height, width)
|
|
831
|
+
|
|
832
|
+
if not (isinstance(ep, EP) and EP_TO_MULTILINE_ATTRS.get(ep.name, {}).get(param.lower())):
|
|
833
|
+
return "\n".join(_split_long_value_to_lines(value, height, width))
|
|
834
|
+
|
|
835
|
+
return value
|
|
836
|
+
|
|
837
|
+
|
|
838
|
+
def build_symbols(
|
|
839
|
+
symbols, gr_nested, gr_static, graphs: GraphStorage, graph_node, config: Config, reverse=False, graph_file=None,
|
|
840
|
+
):
|
|
841
|
+
table = GVTable()
|
|
842
|
+
|
|
843
|
+
for symbol_data in symbols:
|
|
844
|
+
symbol, _, _ = symbol_data
|
|
845
|
+
|
|
846
|
+
if isinstance(symbol, NestedQuery):
|
|
847
|
+
if symbol.query:
|
|
848
|
+
if symbol.is_local:
|
|
849
|
+
# reversed directions here brakes everything
|
|
850
|
+
|
|
851
|
+
if graph_file is None:
|
|
852
|
+
raise ValueError('`graph_file` parameter required for this case')
|
|
853
|
+
|
|
854
|
+
nested_cluster_id = graphs.get_query_unique_id(symbol.query, graph_file)
|
|
855
|
+
|
|
856
|
+
gr_nested.edge(
|
|
857
|
+
f"{nested_cluster_id}__footer",
|
|
858
|
+
f"{graph_node}:symbols",
|
|
859
|
+
ltail=f"{nested_cluster_id}",
|
|
860
|
+
style="dashed", dir="both", constraint=config.constraint_edges,
|
|
861
|
+
)
|
|
862
|
+
continue
|
|
863
|
+
|
|
864
|
+
nested_cluster_id = graphs.get_query_unique_id(symbol.query, symbol.file_path)
|
|
865
|
+
|
|
866
|
+
if nested_cluster_id:
|
|
867
|
+
gr_nested.edge(
|
|
868
|
+
f"{nested_cluster_id}__footer",
|
|
869
|
+
f"{graph_node}:symbols",
|
|
870
|
+
ltail=nested_cluster_id,
|
|
871
|
+
style="dashed", dir="both", constraint=config.constraint_edges,
|
|
872
|
+
)
|
|
873
|
+
continue
|
|
874
|
+
|
|
875
|
+
query = symbol.to_string()
|
|
876
|
+
elif isinstance(symbol, EP):
|
|
877
|
+
query = symbol.raw_string
|
|
878
|
+
else:
|
|
879
|
+
query = symbol
|
|
880
|
+
|
|
881
|
+
table.row([query])
|
|
882
|
+
|
|
883
|
+
if len(table):
|
|
884
|
+
gr_static.node(f"{graph_node}__symbols", str(table))
|
|
885
|
+
gr_static.edge(
|
|
886
|
+
f"{graph_node}__symbols" if not reverse else f"{graph_node}:symbols",
|
|
887
|
+
f"{graph_node}:symbols" if not reverse else f"{graph_node}__symbols",
|
|
888
|
+
style="dashed", constraint=config.constraint_edges,
|
|
889
|
+
)
|
|
890
|
+
|
|
891
|
+
|
|
892
|
+
def _parse_special_attribute(param_name, param_lines, parser, height, width, cols=4):
|
|
893
|
+
"""
|
|
894
|
+
Builds better param representation for selected parameters and EPs
|
|
895
|
+
"""
|
|
896
|
+
def generate_row_string(_line: list) -> list:
|
|
897
|
+
sep = " "
|
|
898
|
+
|
|
899
|
+
# only in this case line could be longer than width
|
|
900
|
+
if len(_line) == 1 and len(_line[0]) > width:
|
|
901
|
+
_lines = _split_long_value_to_lines(_line[0], height, width, indent=4, escape=True)
|
|
902
|
+
else:
|
|
903
|
+
_lines = [sep.join(html.escape(s) for s in _line)]
|
|
904
|
+
|
|
905
|
+
return [" " * 2 + s for s in _lines]
|
|
906
|
+
|
|
907
|
+
param_value = ' '.join(param_lines)
|
|
908
|
+
params = parser(param_value)
|
|
909
|
+
|
|
910
|
+
params_table = [f"{param_name}:"]
|
|
911
|
+
current_line = []
|
|
912
|
+
current_width = 0
|
|
913
|
+
|
|
914
|
+
for param in params:
|
|
915
|
+
if width and current_line and current_width + len(param) >= width or len(current_line) == cols:
|
|
916
|
+
params_table.extend(generate_row_string(current_line))
|
|
917
|
+
current_line = []
|
|
918
|
+
current_width = 0
|
|
919
|
+
|
|
920
|
+
current_line.append(param)
|
|
921
|
+
current_width += len(param)
|
|
922
|
+
|
|
923
|
+
if current_line:
|
|
924
|
+
params_table.extend(generate_row_string(current_line))
|
|
925
|
+
|
|
926
|
+
return [(params_table, {"ALIGN": "LEFT", "BALIGN": "LEFT"})]
|
|
927
|
+
|
|
928
|
+
|
|
929
|
+
def build_node(graphs: GraphStorage, node: Node, config: Config):
|
|
930
|
+
if node.ep is None:
|
|
931
|
+
raise ValueError(f"EP of node {node.id} could not be None")
|
|
932
|
+
|
|
933
|
+
table = GVTable()
|
|
934
|
+
|
|
935
|
+
if "IN" in node.labels:
|
|
936
|
+
table.row([
|
|
937
|
+
("<FONT POINT-SIZE=\"10\">" + html.escape(node.labels["IN"]) + "</FONT>", {"port": "in"}),
|
|
938
|
+
], attrs={
|
|
939
|
+
"border": "1", "fixedsize": "TRUE", "colspan": "3",
|
|
940
|
+
})
|
|
941
|
+
|
|
942
|
+
table.row([(node.ep.name, {"port": "ep"})], attrs={"bgcolor": "gray95"})
|
|
943
|
+
|
|
944
|
+
if node.tick_type:
|
|
945
|
+
table.cell([node.tick_type])
|
|
946
|
+
|
|
947
|
+
if config.render_debug_info:
|
|
948
|
+
table.cell([node.id])
|
|
949
|
+
|
|
950
|
+
if node.symbols:
|
|
951
|
+
table.cell([("[■]", {"port": "symbols"})])
|
|
952
|
+
|
|
953
|
+
if node.ep and (node.ep.args or node.ep.kwargs):
|
|
954
|
+
params: List[Tuple[Optional[str], Union[str, NestedQuery]]] = \
|
|
955
|
+
[(None, v) for v in node.ep.args] + list(node.ep.kwargs.values())
|
|
956
|
+
|
|
957
|
+
param_args_lines = []
|
|
958
|
+
param_kwargs_lines = []
|
|
959
|
+
special_params = []
|
|
960
|
+
|
|
961
|
+
for idx, data in enumerate(params):
|
|
962
|
+
k, v = data
|
|
963
|
+
attrs = {"port": k}
|
|
964
|
+
if idx == len(params) - 1:
|
|
965
|
+
attrs["sides"] = "LRB"
|
|
966
|
+
else:
|
|
967
|
+
attrs["sides"] = "LR"
|
|
968
|
+
|
|
969
|
+
if isinstance(v, NestedQuery):
|
|
970
|
+
param_value = v.raw_string
|
|
971
|
+
else:
|
|
972
|
+
param_value = v
|
|
973
|
+
|
|
974
|
+
is_special_attribute = k and EP_TO_MULTILINE_ATTRS.get(node.ep.name, {}).get(k.lower())
|
|
975
|
+
|
|
976
|
+
param_value = transform_param_value(node.ep, k, param_value, config.height, config.width)
|
|
977
|
+
|
|
978
|
+
if not is_special_attribute:
|
|
979
|
+
param_value = html.escape(param_value)
|
|
980
|
+
|
|
981
|
+
param_value = param_value.replace("\t", " " * 4)
|
|
982
|
+
param_lines = param_value.splitlines()
|
|
983
|
+
|
|
984
|
+
# additional k check required by mypy
|
|
985
|
+
if is_special_attribute and k:
|
|
986
|
+
special_params.extend(
|
|
987
|
+
_parse_special_attribute(
|
|
988
|
+
k, param_lines, EP_TO_MULTILINE_ATTRS[node.ep.name][k.lower()], config.height, config.width,
|
|
989
|
+
)
|
|
990
|
+
)
|
|
991
|
+
else:
|
|
992
|
+
if k:
|
|
993
|
+
if len(param_lines) == 1:
|
|
994
|
+
param_lines[0] = f"{html.escape(k)}={param_lines[0]}"
|
|
995
|
+
else:
|
|
996
|
+
param_lines = [f"{html.escape(k)}:"] + param_lines
|
|
997
|
+
|
|
998
|
+
if len(param_lines) > 1:
|
|
999
|
+
# Add idents disable default horizontal central align
|
|
1000
|
+
# if there are multiline parameter for EP.
|
|
1001
|
+
# Align change affects all parameters for EP.
|
|
1002
|
+
for i in range(len(param_lines)):
|
|
1003
|
+
if i > 0:
|
|
1004
|
+
param_lines[i] = " " * 2 + param_lines[i]
|
|
1005
|
+
|
|
1006
|
+
attrs.update({"ALIGN": "LEFT", "BALIGN": "LEFT"})
|
|
1007
|
+
|
|
1008
|
+
if k:
|
|
1009
|
+
param_kwargs_lines.append((param_lines, attrs))
|
|
1010
|
+
else:
|
|
1011
|
+
param_args_lines.append((param_lines, attrs))
|
|
1012
|
+
|
|
1013
|
+
for param_lines, attrs in param_args_lines + special_params + param_kwargs_lines:
|
|
1014
|
+
table.row([param_lines], attrs=attrs)
|
|
1015
|
+
|
|
1016
|
+
if node.params:
|
|
1017
|
+
table.row([[
|
|
1018
|
+
f"{html.escape(k)}={html.escape(_truncate_param_value(v, config.height, config.width))}"
|
|
1019
|
+
for k, v in node.params.items()
|
|
1020
|
+
]])
|
|
1021
|
+
|
|
1022
|
+
if isinstance(node.ep, IfElseEP):
|
|
1023
|
+
table.row([
|
|
1024
|
+
("<FONT POINT-SIZE=\"10\">[IF]</FONT>", {"port": "if"}), ("", {"border": "0"}),
|
|
1025
|
+
("<FONT POINT-SIZE=\"10\">[ELSE]</FONT>", {"port": "else"})
|
|
1026
|
+
], attrs={
|
|
1027
|
+
"border": "1", "fixedsize": "TRUE", "colspan": "1",
|
|
1028
|
+
})
|
|
1029
|
+
elif "OUT" in node.labels:
|
|
1030
|
+
table.row([
|
|
1031
|
+
("<FONT POINT-SIZE=\"10\">" + html.escape(node.labels["OUT"]) + "</FONT>", {"port": "out"}),
|
|
1032
|
+
], attrs={
|
|
1033
|
+
"border": "1", "fixedsize": "TRUE", "colspan": "3",
|
|
1034
|
+
})
|
|
1035
|
+
|
|
1036
|
+
return str(table)
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
def _parse_time(time_str: str) -> str:
|
|
1040
|
+
if time_str:
|
|
1041
|
+
try:
|
|
1042
|
+
time_str = datetime.strptime(time_str, "%Y%m%d%H%M%S%f").strftime("%Y/%m/%d %H:%M:%S.%f"[:-3])
|
|
1043
|
+
except ValueError:
|
|
1044
|
+
pass
|
|
1045
|
+
else:
|
|
1046
|
+
time_str = "--"
|
|
1047
|
+
|
|
1048
|
+
return time_str
|
|
1049
|
+
|
|
1050
|
+
|
|
1051
|
+
def _build_time_expr(table: GVTable, name: str, time_expr: str):
|
|
1052
|
+
attrs = {}
|
|
1053
|
+
time_expr = html.escape(time_expr)
|
|
1054
|
+
|
|
1055
|
+
if len(time_expr) > 60:
|
|
1056
|
+
time_expr = f"<FONT POINT-SIZE=\"10\">{time_expr}</FONT>"
|
|
1057
|
+
attrs["cellpadding"] = "4"
|
|
1058
|
+
|
|
1059
|
+
table.row([name], attrs={"bgcolor": "gray95"}).row([time_expr], attrs=attrs)
|
|
1060
|
+
|
|
1061
|
+
|
|
1062
|
+
def _get_nested_query(nested_query: NestedQuery, local_graph: Graph, graphs: GraphStorage) -> Optional[Query]:
|
|
1063
|
+
if nested_query.query:
|
|
1064
|
+
if nested_query.is_local:
|
|
1065
|
+
return local_graph.queries[nested_query.query]
|
|
1066
|
+
else:
|
|
1067
|
+
return graphs.get_query(nested_query.file_path, nested_query.query)
|
|
1068
|
+
|
|
1069
|
+
return None
|
|
1070
|
+
|
|
1071
|
+
|
|
1072
|
+
def _render_graph(
|
|
1073
|
+
gr_root, gr, graphs: GraphStorage, graph_name: str, queries: set, config: Config,
|
|
1074
|
+
):
|
|
1075
|
+
graph = graphs[graph_name]
|
|
1076
|
+
|
|
1077
|
+
if not queries or queries == {"*"}:
|
|
1078
|
+
queries = set(graph.queries.keys())
|
|
1079
|
+
|
|
1080
|
+
for query_name in queries:
|
|
1081
|
+
if query_name not in graph.queries:
|
|
1082
|
+
continue
|
|
1083
|
+
|
|
1084
|
+
query = graph.queries[query_name]
|
|
1085
|
+
query_id = query.get_id()
|
|
1086
|
+
|
|
1087
|
+
with gr.subgraph(name=query_id, node_attr={"shape": "plaintext"}) as gr_sub:
|
|
1088
|
+
gr_sub.attr(label=query_name)
|
|
1089
|
+
|
|
1090
|
+
start_time = _parse_time(query.config.get("START", graph.config.get("START")))
|
|
1091
|
+
end_time = _parse_time(query.config.get("END", graph.config.get("END")))
|
|
1092
|
+
|
|
1093
|
+
start_expression = query.config.get("START_EXPRESSION", graph.config.get("START_EXPRESSION"))
|
|
1094
|
+
end_expression = query.config.get("END_EXPRESSION", graph.config.get("END_EXPRESSION"))
|
|
1095
|
+
|
|
1096
|
+
tz_data = query.config.get("TZ", graph.config.get("TZ"))
|
|
1097
|
+
if not tz_data:
|
|
1098
|
+
tz_data = "--"
|
|
1099
|
+
|
|
1100
|
+
table = GVTable().row([
|
|
1101
|
+
"START_TIME", "END_TIME", "TZ"
|
|
1102
|
+
], attrs={"bgcolor": "gray95"}).row([
|
|
1103
|
+
start_time, end_time, tz_data,
|
|
1104
|
+
])
|
|
1105
|
+
|
|
1106
|
+
if start_expression:
|
|
1107
|
+
_build_time_expr(table, "START_EXPRESSION", start_expression)
|
|
1108
|
+
|
|
1109
|
+
if end_expression:
|
|
1110
|
+
_build_time_expr(table, "END_EXPRESSION", end_expression)
|
|
1111
|
+
|
|
1112
|
+
table.row([
|
|
1113
|
+
("PARAMETERS", {"port": "params"}),
|
|
1114
|
+
("SYMBOLS", {"port": "symbols"}),
|
|
1115
|
+
], attrs={"bgcolor": "gray95"})
|
|
1116
|
+
|
|
1117
|
+
footer_id = f"{query_id}__footer"
|
|
1118
|
+
gr_sub.node(footer_id, str(table), labelloc="c")
|
|
1119
|
+
|
|
1120
|
+
# Put footer to the bottom and, most times, to the center
|
|
1121
|
+
for node_id in query.leaves:
|
|
1122
|
+
gr_sub.edge(_get_node_unique_id(node_id, query), footer_id, style="invis")
|
|
1123
|
+
|
|
1124
|
+
if query.params:
|
|
1125
|
+
gr_sub.node(
|
|
1126
|
+
f"{query_id}__params",
|
|
1127
|
+
str(GVTable().row([[
|
|
1128
|
+
f"{html.escape(k)}" + (f" = {html.escape(v)}" if v else "") for k, v in query.params.items()
|
|
1129
|
+
]]))
|
|
1130
|
+
)
|
|
1131
|
+
|
|
1132
|
+
gr_sub.edge(
|
|
1133
|
+
f"{footer_id}:params", f"{query_id}__params",
|
|
1134
|
+
style="dashed", constraint=config.constraint_edges,
|
|
1135
|
+
)
|
|
1136
|
+
|
|
1137
|
+
if query.symbols:
|
|
1138
|
+
build_symbols(
|
|
1139
|
+
query.symbols, gr, gr_sub, graphs, f"{query_id}__footer", config,
|
|
1140
|
+
reverse=True, graph_file=graph.file_path,
|
|
1141
|
+
)
|
|
1142
|
+
|
|
1143
|
+
for node_id, node in query.nodes.items():
|
|
1144
|
+
node_unique_id = _get_node_unique_id(node, query)
|
|
1145
|
+
gr_sub.node(node_unique_id, build_node(graphs, node, config), group=query_name)
|
|
1146
|
+
|
|
1147
|
+
for sink in node.sinks:
|
|
1148
|
+
if "OUT" in node.labels:
|
|
1149
|
+
output_port = ":out"
|
|
1150
|
+
else:
|
|
1151
|
+
output_port = ""
|
|
1152
|
+
|
|
1153
|
+
if isinstance(node.ep, IfElseEP):
|
|
1154
|
+
if sink in node.ep.else_nodes:
|
|
1155
|
+
output_port = ":else"
|
|
1156
|
+
else:
|
|
1157
|
+
output_port = ":if"
|
|
1158
|
+
|
|
1159
|
+
sink_node = query.nodes[sink]
|
|
1160
|
+
if "IN" in sink_node.labels:
|
|
1161
|
+
sink_port = ":in"
|
|
1162
|
+
else:
|
|
1163
|
+
sink_port = ""
|
|
1164
|
+
|
|
1165
|
+
gr_sub.edge(
|
|
1166
|
+
f"{node_unique_id}{output_port}", f"{_get_node_unique_id(sink_node, query)}{sink_port}",
|
|
1167
|
+
)
|
|
1168
|
+
|
|
1169
|
+
for param_name, param_value in node.ep.kwargs.items():
|
|
1170
|
+
if isinstance(param_value[1], NestedQuery):
|
|
1171
|
+
nested_cluster = _get_nested_query(param_value[1], graph, graphs)
|
|
1172
|
+
if not nested_cluster:
|
|
1173
|
+
continue
|
|
1174
|
+
|
|
1175
|
+
gr_root.edge(
|
|
1176
|
+
f"{node_unique_id}:{param_name}",
|
|
1177
|
+
_get_node_unique_id(nested_cluster.roots[0], nested_cluster),
|
|
1178
|
+
lhead=nested_cluster.get_id(),
|
|
1179
|
+
style="dashed", dir="both", constraint=config.constraint_edges,
|
|
1180
|
+
)
|
|
1181
|
+
|
|
1182
|
+
if node.symbols:
|
|
1183
|
+
build_symbols(node.symbols, gr, gr_sub, graphs, node_unique_id, config, graph_file=graph.file_path)
|
|
1184
|
+
|
|
1185
|
+
if isinstance(node.ep, NestedQuery):
|
|
1186
|
+
nested_cluster = _get_nested_query(node.ep, graph, graphs)
|
|
1187
|
+
if not nested_cluster:
|
|
1188
|
+
continue
|
|
1189
|
+
|
|
1190
|
+
gr_root.edge(
|
|
1191
|
+
node_unique_id,
|
|
1192
|
+
_get_node_unique_id(nested_cluster.roots[0], nested_cluster),
|
|
1193
|
+
lhead=nested_cluster.get_id(),
|
|
1194
|
+
style="dashed", dir="both", constraint=config.constraint_edges,
|
|
1195
|
+
)
|
|
1196
|
+
|
|
1197
|
+
|
|
1198
|
+
def render_otq(
|
|
1199
|
+
path: Union[str, List[str]],
|
|
1200
|
+
image_path: Optional[str] = None,
|
|
1201
|
+
output_format: Optional[str] = None,
|
|
1202
|
+
load_external_otqs: bool = True,
|
|
1203
|
+
view: bool = False,
|
|
1204
|
+
line_limit: Optional[Tuple[int, int]] = (10, 60),
|
|
1205
|
+
parse_eval_from_params: bool = False,
|
|
1206
|
+
render_debug_info: bool = False,
|
|
1207
|
+
debug: bool = False,
|
|
1208
|
+
graphviz_compat_mode: bool = False,
|
|
1209
|
+
) -> str:
|
|
1210
|
+
"""
|
|
1211
|
+
Render queries from .otq files.
|
|
1212
|
+
|
|
1213
|
+
Parameters
|
|
1214
|
+
----------
|
|
1215
|
+
path: str, List[str]
|
|
1216
|
+
Path to .otq file or list of paths to multiple .otq files.
|
|
1217
|
+
Needed to render query could be specified with the next format: `path_to_otq::query_name`
|
|
1218
|
+
image_path: str, None
|
|
1219
|
+
Path for generated image. If omitted, image will be saved in a temp dir
|
|
1220
|
+
output_format: str, None
|
|
1221
|
+
`Graphviz` rendering format. Default: `svg`.
|
|
1222
|
+
If `image_path` contains one of next extensions, `output_format` will be set automatically: `png`, `svg`, `dot`.
|
|
1223
|
+
load_external_otqs: bool
|
|
1224
|
+
If set to `True` (default) dependencies from external .otq files (not listed in ``path`` param)
|
|
1225
|
+
will be loaded automatically.
|
|
1226
|
+
view: bool
|
|
1227
|
+
Defines should generated image be shown after render.
|
|
1228
|
+
line_limit: Tuple[int, int], None
|
|
1229
|
+
Limit for maximum number of lines and length of some EP parameters strings.
|
|
1230
|
+
First param is limit of lines, second - limit of characters in each line.
|
|
1231
|
+
If set to None limit disabled.
|
|
1232
|
+
If one of tuple values set to zero the corresponding limit disabled.
|
|
1233
|
+
parse_eval_from_params: bool
|
|
1234
|
+
Enable parsing and printing `eval` sub-queries from EP parameters.
|
|
1235
|
+
render_debug_info: bool
|
|
1236
|
+
Render additional debug information.
|
|
1237
|
+
debug: bool
|
|
1238
|
+
Allow to print stdout or stderr from `Graphviz` render.
|
|
1239
|
+
graphviz_compat_mode: bool
|
|
1240
|
+
Change internal parameters of result graph for better compatibility with old `Graphviz` versions.
|
|
1241
|
+
Could produce larger and less readable graphs.
|
|
1242
|
+
|
|
1243
|
+
Returns
|
|
1244
|
+
-------
|
|
1245
|
+
Path to rendered image
|
|
1246
|
+
|
|
1247
|
+
Examples
|
|
1248
|
+
--------
|
|
1249
|
+
|
|
1250
|
+
Render single file:
|
|
1251
|
+
|
|
1252
|
+
>>> otp.utils.render_otq("./test.otq") # doctest: +SKIP
|
|
1253
|
+
|
|
1254
|
+
.. image:: ../../static/testing/images/render_otq_1.png
|
|
1255
|
+
|
|
1256
|
+
Render multiple files:
|
|
1257
|
+
|
|
1258
|
+
>>> otp.utils.render_otq(["./first.otq", "./second.otq"]) # doctest: +SKIP
|
|
1259
|
+
|
|
1260
|
+
.. image:: ../../static/testing/images/render_otq_2.png
|
|
1261
|
+
|
|
1262
|
+
Render specific queries from multiple files:
|
|
1263
|
+
|
|
1264
|
+
>>> otp.utils.render_otq(["./first.otq", "./second.otq::some_query"]) # doctest: +SKIP
|
|
1265
|
+
"""
|
|
1266
|
+
if line_limit is None:
|
|
1267
|
+
line_limit = (0, 0)
|
|
1268
|
+
|
|
1269
|
+
height, width = line_limit
|
|
1270
|
+
if height < 0 or width < 0:
|
|
1271
|
+
raise ValueError("line_limit values should not be negative")
|
|
1272
|
+
|
|
1273
|
+
config_kwargs = {}
|
|
1274
|
+
if graphviz_compat_mode:
|
|
1275
|
+
config_kwargs["constraint_edges"] = "false"
|
|
1276
|
+
|
|
1277
|
+
config = Config(height=height, width=width, render_debug_info=render_debug_info, **config_kwargs)
|
|
1278
|
+
|
|
1279
|
+
if not isinstance(path, list):
|
|
1280
|
+
path = [path]
|
|
1281
|
+
|
|
1282
|
+
path = [_posix_path(p) for p in path]
|
|
1283
|
+
|
|
1284
|
+
graphs = GraphStorage()
|
|
1285
|
+
|
|
1286
|
+
queries_to_render: Dict[str, Set[str]] = defaultdict(set)
|
|
1287
|
+
path_files: List[str] = []
|
|
1288
|
+
|
|
1289
|
+
for otq_path in path:
|
|
1290
|
+
query_file, query_name = _parse_query_path(otq_path)
|
|
1291
|
+
|
|
1292
|
+
path_files.append(query_file)
|
|
1293
|
+
|
|
1294
|
+
if queries_to_render[query_file] == {"*"}:
|
|
1295
|
+
continue
|
|
1296
|
+
|
|
1297
|
+
if query_name:
|
|
1298
|
+
queries_to_render[query_file].add(query_name)
|
|
1299
|
+
else:
|
|
1300
|
+
queries_to_render[query_file] = {"*"}
|
|
1301
|
+
|
|
1302
|
+
otq_files: Deque[str] = deque(path_files)
|
|
1303
|
+
|
|
1304
|
+
while otq_files:
|
|
1305
|
+
otq_path = otq_files.popleft()
|
|
1306
|
+
|
|
1307
|
+
graph = read_otq(otq_path, parse_eval_from_params=parse_eval_from_params)
|
|
1308
|
+
|
|
1309
|
+
if not graph:
|
|
1310
|
+
continue
|
|
1311
|
+
|
|
1312
|
+
graphs[otq_path] = graph
|
|
1313
|
+
|
|
1314
|
+
for graph_query in graph.queries.values():
|
|
1315
|
+
for dep_file, dep_query in graph_query.depends:
|
|
1316
|
+
if dep_file is None:
|
|
1317
|
+
dep_file = otq_path
|
|
1318
|
+
|
|
1319
|
+
if dep_file not in graphs and load_external_otqs:
|
|
1320
|
+
otq_files.append(dep_file)
|
|
1321
|
+
|
|
1322
|
+
if queries_to_render[dep_file] == {"*"}:
|
|
1323
|
+
continue
|
|
1324
|
+
|
|
1325
|
+
if (
|
|
1326
|
+
load_external_otqs and dep_file != otq_path or
|
|
1327
|
+
dep_file == otq_path and graph_query.name in queries_to_render[otq_path]
|
|
1328
|
+
):
|
|
1329
|
+
if dep_query:
|
|
1330
|
+
queries_to_render[dep_file].add(dep_query)
|
|
1331
|
+
else:
|
|
1332
|
+
queries_to_render[dep_file] = {"*"}
|
|
1333
|
+
|
|
1334
|
+
if image_path:
|
|
1335
|
+
extension = Path(image_path).suffix
|
|
1336
|
+
|
|
1337
|
+
if extension:
|
|
1338
|
+
extension = extension[1:]
|
|
1339
|
+
|
|
1340
|
+
if extension == output_format or output_format is None and extension in {"png", "svg", "dot"}:
|
|
1341
|
+
image_path = str(Path(image_path).with_suffix(""))
|
|
1342
|
+
|
|
1343
|
+
if output_format is None:
|
|
1344
|
+
output_format = extension
|
|
1345
|
+
|
|
1346
|
+
if not output_format:
|
|
1347
|
+
output_format = "svg"
|
|
1348
|
+
|
|
1349
|
+
if not image_path:
|
|
1350
|
+
image_path = TmpFile().path
|
|
1351
|
+
|
|
1352
|
+
gr = gv.Digraph(format=output_format, filename=image_path, engine="dot")
|
|
1353
|
+
gr.attr("graph", compound="true")
|
|
1354
|
+
gr.attr("node", shape="plaintext", margin="0")
|
|
1355
|
+
|
|
1356
|
+
idx = 0
|
|
1357
|
+
for otq_path, graph in graphs.items():
|
|
1358
|
+
if not queries_to_render[otq_path]:
|
|
1359
|
+
continue
|
|
1360
|
+
|
|
1361
|
+
with gr.subgraph(name=f"cluster__graph__{idx}", node_attr={"shape": "plaintext"}) as gr_otq:
|
|
1362
|
+
gr_otq.attr(label=otq_path)
|
|
1363
|
+
gr_otq.attr(margin="16")
|
|
1364
|
+
_render_graph(gr, gr_otq, graphs, otq_path, queries_to_render[otq_path], config)
|
|
1365
|
+
|
|
1366
|
+
idx += 1
|
|
1367
|
+
|
|
1368
|
+
try:
|
|
1369
|
+
return gr.render(view=view, quiet=not debug)
|
|
1370
|
+
except Exception as exc:
|
|
1371
|
+
raise RuntimeError(
|
|
1372
|
+
"Graphviz render failed. Try to set parameter `graphviz_compat_mode=True` "
|
|
1373
|
+
"for better compatibility if you use old Graphviz version"
|
|
1374
|
+
) from exc
|