pydpm_xl 0.2.2__py3-none-any.whl → 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_dpm/__init__.py +1 -1
- py_dpm/api/__init__.py +23 -51
- py_dpm/api/dpm/__init__.py +2 -2
- py_dpm/api/dpm/instance.py +111 -0
- py_dpm/api/dpm_xl/__init__.py +10 -2
- py_dpm/api/dpm_xl/ast_generator.py +690 -10
- py_dpm/api/dpm_xl/complete_ast.py +54 -565
- py_dpm/api/{dpm → dpm_xl}/operation_scopes.py +2 -2
- py_dpm/cli/main.py +1 -1
- py_dpm/dpm/models.py +5 -1
- py_dpm/instance/__init__.py +0 -0
- py_dpm/instance/instance.py +265 -0
- {pydpm_xl-0.2.2.dist-info → pydpm_xl-0.2.3.dist-info}/METADATA +1 -1
- {pydpm_xl-0.2.2.dist-info → pydpm_xl-0.2.3.dist-info}/RECORD +18 -22
- py_dpm/api/explorer.py +0 -4
- py_dpm/api/semantic.py +0 -56
- py_dpm/dpm_xl/validation/__init__.py +0 -12
- py_dpm/dpm_xl/validation/generation_utils.py +0 -428
- py_dpm/dpm_xl/validation/property_constraints.py +0 -225
- py_dpm/dpm_xl/validation/utils.py +0 -98
- py_dpm/dpm_xl/validation/variants.py +0 -359
- {pydpm_xl-0.2.2.dist-info → pydpm_xl-0.2.3.dist-info}/WHEEL +0 -0
- {pydpm_xl-0.2.2.dist-info → pydpm_xl-0.2.3.dist-info}/entry_points.txt +0 -0
- {pydpm_xl-0.2.2.dist-info → pydpm_xl-0.2.3.dist-info}/licenses/LICENSE +0 -0
- {pydpm_xl-0.2.2.dist-info → pydpm_xl-0.2.3.dist-info}/top_level.txt +0 -0
|
@@ -2,16 +2,18 @@
|
|
|
2
2
|
"""
|
|
3
3
|
Complete AST API - Generate ASTs exactly like the JSON examples
|
|
4
4
|
|
|
5
|
-
This
|
|
6
|
-
|
|
5
|
+
This module provides backwards-compatible standalone functions that delegate to ASTGeneratorAPI.
|
|
6
|
+
All AST-related functionality is now consolidated in the ASTGeneratorAPI class.
|
|
7
7
|
|
|
8
|
-
|
|
9
|
-
|
|
8
|
+
For new code, prefer using ASTGeneratorAPI directly:
|
|
9
|
+
from py_dpm.api.dpm_xl import ASTGeneratorAPI
|
|
10
|
+
|
|
11
|
+
generator = ASTGeneratorAPI(database_path="data.db")
|
|
12
|
+
result = generator.generate_complete_ast(expression)
|
|
10
13
|
"""
|
|
11
14
|
|
|
12
|
-
from
|
|
13
|
-
from
|
|
14
|
-
from py_dpm.dpm_xl.utils.serialization import ASTToJSONVisitor
|
|
15
|
+
from typing import Dict, Any, Optional, List
|
|
16
|
+
from py_dpm.api.dpm_xl.ast_generator import ASTGeneratorAPI
|
|
15
17
|
|
|
16
18
|
|
|
17
19
|
def generate_complete_ast(
|
|
@@ -23,8 +25,7 @@ def generate_complete_ast(
|
|
|
23
25
|
"""
|
|
24
26
|
Generate complete AST with all data fields, exactly like json_scripts examples.
|
|
25
27
|
|
|
26
|
-
This function
|
|
27
|
-
using the new SemanticAPI to perform full semantic validation and operand checking.
|
|
28
|
+
This function delegates to ASTGeneratorAPI for backwards compatibility.
|
|
28
29
|
|
|
29
30
|
Args:
|
|
30
31
|
expression: DPM-XL expression string
|
|
@@ -37,126 +38,12 @@ def generate_complete_ast(
|
|
|
37
38
|
dict with keys:
|
|
38
39
|
success, ast, context, error, data_populated, semantic_result
|
|
39
40
|
"""
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
if connection_url or database_path:
|
|
47
|
-
try:
|
|
48
|
-
get_engine(database_path=database_path, connection_url=connection_url)
|
|
49
|
-
except Exception as e:
|
|
50
|
-
return {
|
|
51
|
-
"success": False,
|
|
52
|
-
"ast": None,
|
|
53
|
-
"context": None,
|
|
54
|
-
"error": f"Database connection failed: {e}",
|
|
55
|
-
"data_populated": False,
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
# Use the modern SemanticAPI which performs full semantic validation and operand checking
|
|
59
|
-
semantic_api = SemanticAPI(
|
|
60
|
-
database_path=database_path, connection_url=connection_url
|
|
61
|
-
)
|
|
62
|
-
|
|
63
|
-
semantic_result = semantic_api.validate_expression(
|
|
64
|
-
expression, release_id=release_id
|
|
65
|
-
)
|
|
66
|
-
|
|
67
|
-
# If semantic validation failed, return structured error
|
|
68
|
-
if not semantic_result.is_valid:
|
|
69
|
-
return {
|
|
70
|
-
"success": False,
|
|
71
|
-
"ast": None,
|
|
72
|
-
"context": None,
|
|
73
|
-
"error": semantic_result.error_message,
|
|
74
|
-
"data_populated": False,
|
|
75
|
-
"semantic_result": semantic_result,
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
ast_root = getattr(semantic_api, "ast", None)
|
|
79
|
-
|
|
80
|
-
if ast_root is None:
|
|
81
|
-
return {
|
|
82
|
-
"success": False,
|
|
83
|
-
"ast": None,
|
|
84
|
-
"context": None,
|
|
85
|
-
"error": "Semantic validation did not generate AST",
|
|
86
|
-
"data_populated": False,
|
|
87
|
-
"semantic_result": semantic_result,
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
# Extract components exactly like batch_validator does
|
|
91
|
-
def extract_components(ast_obj):
|
|
92
|
-
if hasattr(ast_obj, "children") and len(ast_obj.children) > 0:
|
|
93
|
-
child = ast_obj.children[0]
|
|
94
|
-
if hasattr(child, "expression"):
|
|
95
|
-
return child.expression, child.partial_selection
|
|
96
|
-
else:
|
|
97
|
-
return child, None
|
|
98
|
-
return ast_obj, None
|
|
99
|
-
|
|
100
|
-
actual_ast, context = extract_components(ast_root)
|
|
101
|
-
|
|
102
|
-
# Convert to JSON using the ASTToJSONVisitor, which uses VarID.data populated
|
|
103
|
-
# during semantic validation / operand checking.
|
|
104
|
-
visitor = ASTToJSONVisitor(context)
|
|
105
|
-
ast_dict = visitor.visit(actual_ast)
|
|
106
|
-
|
|
107
|
-
# Check if data fields were populated
|
|
108
|
-
data_populated = _check_data_fields_populated(ast_dict)
|
|
109
|
-
|
|
110
|
-
# Serialize context
|
|
111
|
-
context_dict = None
|
|
112
|
-
if context:
|
|
113
|
-
context_dict = {
|
|
114
|
-
"table": getattr(context, "table", None),
|
|
115
|
-
"rows": getattr(context, "rows", None),
|
|
116
|
-
"columns": getattr(context, "cols", None),
|
|
117
|
-
"sheets": getattr(context, "sheets", None),
|
|
118
|
-
"default": getattr(context, "default", None),
|
|
119
|
-
"interval": getattr(context, "interval", None),
|
|
120
|
-
}
|
|
121
|
-
|
|
122
|
-
return {
|
|
123
|
-
"success": True,
|
|
124
|
-
"ast": ast_dict,
|
|
125
|
-
"context": context_dict,
|
|
126
|
-
"error": None,
|
|
127
|
-
"data_populated": data_populated,
|
|
128
|
-
"semantic_result": semantic_result,
|
|
129
|
-
}
|
|
130
|
-
|
|
131
|
-
except Exception as e:
|
|
132
|
-
return {
|
|
133
|
-
"success": False,
|
|
134
|
-
"ast": None,
|
|
135
|
-
"context": None,
|
|
136
|
-
"error": f"API error: {str(e)}",
|
|
137
|
-
"data_populated": False,
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
def _check_data_fields_populated(ast_dict):
|
|
142
|
-
"""Check if any VarID nodes have data fields populated"""
|
|
143
|
-
if not isinstance(ast_dict, dict):
|
|
144
|
-
return False
|
|
145
|
-
|
|
146
|
-
if ast_dict.get("class_name") == "VarID" and "data" in ast_dict:
|
|
147
|
-
return True
|
|
148
|
-
|
|
149
|
-
# Recursively check nested structures
|
|
150
|
-
for value in ast_dict.values():
|
|
151
|
-
if isinstance(value, dict):
|
|
152
|
-
if _check_data_fields_populated(value):
|
|
153
|
-
return True
|
|
154
|
-
elif isinstance(value, list):
|
|
155
|
-
for item in value:
|
|
156
|
-
if isinstance(item, dict) and _check_data_fields_populated(item):
|
|
157
|
-
return True
|
|
158
|
-
|
|
159
|
-
return False
|
|
41
|
+
generator = ASTGeneratorAPI(
|
|
42
|
+
database_path=database_path,
|
|
43
|
+
connection_url=connection_url,
|
|
44
|
+
enable_semantic_validation=True
|
|
45
|
+
)
|
|
46
|
+
return generator.generate_complete_ast(expression, release_id=release_id)
|
|
160
47
|
|
|
161
48
|
|
|
162
49
|
def generate_complete_batch(
|
|
@@ -168,6 +55,8 @@ def generate_complete_batch(
|
|
|
168
55
|
"""
|
|
169
56
|
Generate complete ASTs for multiple expressions.
|
|
170
57
|
|
|
58
|
+
This function delegates to ASTGeneratorAPI for backwards compatibility.
|
|
59
|
+
|
|
171
60
|
Args:
|
|
172
61
|
expressions: List of DPM-XL expression strings
|
|
173
62
|
database_path: Path to SQLite database file
|
|
@@ -178,14 +67,12 @@ def generate_complete_batch(
|
|
|
178
67
|
Returns:
|
|
179
68
|
list: List of result dictionaries
|
|
180
69
|
"""
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
results.append(result)
|
|
188
|
-
return results
|
|
70
|
+
generator = ASTGeneratorAPI(
|
|
71
|
+
database_path=database_path,
|
|
72
|
+
connection_url=connection_url,
|
|
73
|
+
enable_semantic_validation=True
|
|
74
|
+
)
|
|
75
|
+
return generator.generate_complete_batch(expressions, release_id=release_id)
|
|
189
76
|
|
|
190
77
|
|
|
191
78
|
# Convenience function with cleaner interface
|
|
@@ -198,6 +85,8 @@ def parse_with_data_fields(
|
|
|
198
85
|
"""
|
|
199
86
|
Simple function to parse expression and get AST with data fields.
|
|
200
87
|
|
|
88
|
+
This function delegates to ASTGeneratorAPI for backwards compatibility.
|
|
89
|
+
|
|
201
90
|
Args:
|
|
202
91
|
expression: DPM-XL expression string
|
|
203
92
|
database_path: Path to SQLite database file
|
|
@@ -232,8 +121,7 @@ def generate_enriched_ast(
|
|
|
232
121
|
"""
|
|
233
122
|
Generate enriched, engine-ready AST from DPM-XL expression.
|
|
234
123
|
|
|
235
|
-
This
|
|
236
|
-
(operations, variables, tables, preconditions) for execution engines.
|
|
124
|
+
This function delegates to ASTGeneratorAPI for backwards compatibility.
|
|
237
125
|
|
|
238
126
|
Args:
|
|
239
127
|
expression: DPM-XL expression string
|
|
@@ -253,42 +141,19 @@ def generate_enriched_ast(
|
|
|
253
141
|
'error': str # Error message if failed
|
|
254
142
|
}
|
|
255
143
|
"""
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
complete_ast = complete_result["ast"]
|
|
270
|
-
context = complete_result.get("context") or table_context
|
|
271
|
-
|
|
272
|
-
# Enrich with framework structure
|
|
273
|
-
enriched_ast = enrich_ast_with_metadata(
|
|
274
|
-
ast_dict=complete_ast,
|
|
275
|
-
expression=expression,
|
|
276
|
-
context=context,
|
|
277
|
-
database_path=database_path,
|
|
278
|
-
connection_url=connection_url,
|
|
279
|
-
dpm_version=dpm_version,
|
|
280
|
-
operation_code=operation_code,
|
|
281
|
-
precondition=precondition,
|
|
282
|
-
)
|
|
283
|
-
|
|
284
|
-
return {"success": True, "enriched_ast": enriched_ast, "error": None}
|
|
285
|
-
|
|
286
|
-
except Exception as e:
|
|
287
|
-
return {
|
|
288
|
-
"success": False,
|
|
289
|
-
"enriched_ast": None,
|
|
290
|
-
"error": f"Enrichment error: {str(e)}",
|
|
291
|
-
}
|
|
144
|
+
generator = ASTGeneratorAPI(
|
|
145
|
+
database_path=database_path,
|
|
146
|
+
connection_url=connection_url,
|
|
147
|
+
enable_semantic_validation=True
|
|
148
|
+
)
|
|
149
|
+
return generator.generate_enriched_ast(
|
|
150
|
+
expression=expression,
|
|
151
|
+
dpm_version=dpm_version,
|
|
152
|
+
operation_code=operation_code,
|
|
153
|
+
table_context=table_context,
|
|
154
|
+
precondition=precondition,
|
|
155
|
+
release_id=release_id,
|
|
156
|
+
)
|
|
292
157
|
|
|
293
158
|
|
|
294
159
|
def enrich_ast_with_metadata(
|
|
@@ -304,7 +169,7 @@ def enrich_ast_with_metadata(
|
|
|
304
169
|
"""
|
|
305
170
|
Add framework structure (operations, variables, tables, preconditions) to complete AST.
|
|
306
171
|
|
|
307
|
-
This
|
|
172
|
+
This function delegates to ASTGeneratorAPI for backwards compatibility.
|
|
308
173
|
|
|
309
174
|
Args:
|
|
310
175
|
ast_dict: Complete AST dictionary (from generate_complete_ast)
|
|
@@ -319,392 +184,16 @@ def enrich_ast_with_metadata(
|
|
|
319
184
|
Returns:
|
|
320
185
|
dict: Engine-ready AST with framework structure
|
|
321
186
|
"""
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
# Query database for release information using SQLAlchemy
|
|
337
|
-
release_info = _get_release_info(dpm_version, engine)
|
|
338
|
-
|
|
339
|
-
# Build module info
|
|
340
|
-
module_info = {
|
|
341
|
-
"module_code": "default",
|
|
342
|
-
"module_version": "1.0.0",
|
|
343
|
-
"framework_code": "default",
|
|
344
|
-
"dpm_release": {
|
|
345
|
-
"release": release_info["release"],
|
|
346
|
-
"publication_date": release_info["publication_date"],
|
|
347
|
-
},
|
|
348
|
-
"dates": {"from": "2001-01-01", "to": None},
|
|
349
|
-
}
|
|
350
|
-
|
|
351
|
-
# Add coordinates to AST data entries
|
|
352
|
-
ast_with_coords = _add_coordinates_to_ast(ast_dict, context)
|
|
353
|
-
|
|
354
|
-
# Build operations section
|
|
355
|
-
operations = {
|
|
356
|
-
operation_code: {
|
|
357
|
-
"version_id": hash(expression) % 10000,
|
|
358
|
-
"code": operation_code,
|
|
359
|
-
"expression": expression,
|
|
360
|
-
"root_operator_id": 24, # Default for now
|
|
361
|
-
"ast": ast_with_coords,
|
|
362
|
-
"from_submission_date": current_date,
|
|
363
|
-
"severity": "Error",
|
|
364
|
-
}
|
|
365
|
-
}
|
|
366
|
-
|
|
367
|
-
# Build variables section by extracting from the complete AST
|
|
368
|
-
all_variables, variables_by_table = _extract_variables_from_ast(ast_with_coords)
|
|
369
|
-
|
|
370
|
-
variables = all_variables
|
|
371
|
-
tables = {}
|
|
372
|
-
|
|
373
|
-
# Build tables with their specific variables
|
|
374
|
-
for table_code, table_variables in variables_by_table.items():
|
|
375
|
-
tables[table_code] = {"variables": table_variables, "open_keys": {}}
|
|
376
|
-
|
|
377
|
-
# Build preconditions
|
|
378
|
-
preconditions = {}
|
|
379
|
-
precondition_variables = {}
|
|
380
|
-
|
|
381
|
-
if precondition or (context and "table" in context):
|
|
382
|
-
preconditions, precondition_variables = _build_preconditions(
|
|
383
|
-
precondition=precondition,
|
|
384
|
-
context=context,
|
|
385
|
-
operation_code=operation_code,
|
|
386
|
-
engine=engine,
|
|
387
|
-
)
|
|
388
|
-
|
|
389
|
-
# Build dependency information
|
|
390
|
-
dependency_info = {
|
|
391
|
-
"intra_instance_validations": [operation_code],
|
|
392
|
-
"cross_instance_dependencies": [],
|
|
393
|
-
}
|
|
394
|
-
|
|
395
|
-
# Build dependency modules
|
|
396
|
-
dependency_modules = {}
|
|
397
|
-
|
|
398
|
-
# Build complete structure
|
|
399
|
-
namespace = "default_module"
|
|
400
|
-
|
|
401
|
-
return {
|
|
402
|
-
namespace: {
|
|
403
|
-
**module_info,
|
|
404
|
-
"operations": operations,
|
|
405
|
-
"variables": variables,
|
|
406
|
-
"tables": tables,
|
|
407
|
-
"preconditions": preconditions,
|
|
408
|
-
"precondition_variables": precondition_variables,
|
|
409
|
-
"dependency_information": dependency_info,
|
|
410
|
-
"dependency_modules": dependency_modules,
|
|
411
|
-
}
|
|
412
|
-
}
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
def _get_release_info(dpm_version: Optional[str], engine) -> Dict[str, Any]:
|
|
416
|
-
"""
|
|
417
|
-
Get release information from database using SQLAlchemy.
|
|
418
|
-
|
|
419
|
-
Args:
|
|
420
|
-
dpm_version: DPM version code (e.g., "4.0", "4.1", "4.2")
|
|
421
|
-
engine: SQLAlchemy engine
|
|
422
|
-
|
|
423
|
-
Returns:
|
|
424
|
-
dict: {'release': str, 'publication_date': str}
|
|
425
|
-
"""
|
|
426
|
-
from py_dpm.dpm.models import Release
|
|
427
|
-
from sqlalchemy.orm import sessionmaker
|
|
428
|
-
|
|
429
|
-
Session = sessionmaker(bind=engine)
|
|
430
|
-
session = Session()
|
|
431
|
-
|
|
432
|
-
try:
|
|
433
|
-
if dpm_version:
|
|
434
|
-
# Query for specific version
|
|
435
|
-
version_float = float(dpm_version)
|
|
436
|
-
release = (
|
|
437
|
-
session.query(Release)
|
|
438
|
-
.filter(Release.code == str(version_float))
|
|
439
|
-
.first()
|
|
440
|
-
)
|
|
441
|
-
|
|
442
|
-
if release:
|
|
443
|
-
return {
|
|
444
|
-
"release": str(release.code) if release.code else dpm_version,
|
|
445
|
-
"publication_date": (
|
|
446
|
-
release.date.strftime("%Y-%m-%d")
|
|
447
|
-
if release.date
|
|
448
|
-
else "2001-01-01"
|
|
449
|
-
),
|
|
450
|
-
}
|
|
451
|
-
|
|
452
|
-
# Fallback: get latest released version
|
|
453
|
-
release = (
|
|
454
|
-
session.query(Release)
|
|
455
|
-
.filter(Release.status == "released")
|
|
456
|
-
.order_by(Release.code.desc())
|
|
457
|
-
.first()
|
|
458
|
-
)
|
|
459
|
-
|
|
460
|
-
if release:
|
|
461
|
-
return {
|
|
462
|
-
"release": str(release.code) if release.code else "4.1",
|
|
463
|
-
"publication_date": (
|
|
464
|
-
release.date.strftime("%Y-%m-%d") if release.date else "2001-01-01"
|
|
465
|
-
),
|
|
466
|
-
}
|
|
467
|
-
|
|
468
|
-
# Final fallback
|
|
469
|
-
return {"release": "4.1", "publication_date": "2001-01-01"}
|
|
470
|
-
|
|
471
|
-
except Exception:
|
|
472
|
-
# Fallback on any error
|
|
473
|
-
return {"release": "4.1", "publication_date": "2001-01-01"}
|
|
474
|
-
finally:
|
|
475
|
-
session.close()
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
def _get_table_info(table_code: str, engine) -> Optional[Dict[str, Any]]:
|
|
479
|
-
"""
|
|
480
|
-
Get table information from database using SQLAlchemy.
|
|
481
|
-
|
|
482
|
-
Args:
|
|
483
|
-
table_code: Table code like 'F_25_01' or 'F_25.01'
|
|
484
|
-
engine: SQLAlchemy engine
|
|
485
|
-
|
|
486
|
-
Returns:
|
|
487
|
-
dict: {'table_vid': int, 'code': str} or None if not found
|
|
488
|
-
"""
|
|
489
|
-
from py_dpm.dpm.models import TableVersion
|
|
490
|
-
from sqlalchemy.orm import sessionmaker
|
|
491
|
-
import re
|
|
492
|
-
|
|
493
|
-
Session = sessionmaker(bind=engine)
|
|
494
|
-
session = Session()
|
|
495
|
-
|
|
496
|
-
try:
|
|
497
|
-
# Try exact match first
|
|
498
|
-
table = (
|
|
499
|
-
session.query(TableVersion).filter(TableVersion.code == table_code).first()
|
|
500
|
-
)
|
|
501
|
-
|
|
502
|
-
if table:
|
|
503
|
-
return {"table_vid": table.tablevid, "code": table.code}
|
|
504
|
-
|
|
505
|
-
# Handle precondition parser format: F_25_01 -> F_25.01
|
|
506
|
-
if re.match(r"^[A-Z]_\d+_\d+", table_code):
|
|
507
|
-
parts = table_code.split("_", 2)
|
|
508
|
-
if len(parts) >= 3:
|
|
509
|
-
table_code_with_dot = f"{parts[0]}_{parts[1]}.{parts[2]}"
|
|
510
|
-
table = (
|
|
511
|
-
session.query(TableVersion)
|
|
512
|
-
.filter(TableVersion.code == table_code_with_dot)
|
|
513
|
-
.first()
|
|
514
|
-
)
|
|
515
|
-
|
|
516
|
-
if table:
|
|
517
|
-
return {"table_vid": table.tablevid, "code": table.code}
|
|
518
|
-
|
|
519
|
-
# Try LIKE pattern as last resort (handles sub-tables like F_25.01.a)
|
|
520
|
-
table = (
|
|
521
|
-
session.query(TableVersion)
|
|
522
|
-
.filter(TableVersion.code.like(f"{table_code}%"))
|
|
523
|
-
.order_by(TableVersion.code)
|
|
524
|
-
.first()
|
|
525
|
-
)
|
|
526
|
-
|
|
527
|
-
if table:
|
|
528
|
-
return {"table_vid": table.tablevid, "code": table.code}
|
|
529
|
-
|
|
530
|
-
return None
|
|
531
|
-
|
|
532
|
-
except Exception:
|
|
533
|
-
return None
|
|
534
|
-
finally:
|
|
535
|
-
session.close()
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
def _build_preconditions(
|
|
539
|
-
precondition: Optional[str],
|
|
540
|
-
context: Optional[Dict[str, Any]],
|
|
541
|
-
operation_code: str,
|
|
542
|
-
engine,
|
|
543
|
-
) -> tuple:
|
|
544
|
-
"""
|
|
545
|
-
Build preconditions and precondition_variables sections.
|
|
546
|
-
|
|
547
|
-
Args:
|
|
548
|
-
precondition: Precondition variable reference (e.g., {v_F_44_04})
|
|
549
|
-
context: Context dict with 'table' key
|
|
550
|
-
operation_code: Operation code
|
|
551
|
-
engine: SQLAlchemy engine
|
|
552
|
-
|
|
553
|
-
Returns:
|
|
554
|
-
tuple: (preconditions_dict, precondition_variables_dict)
|
|
555
|
-
"""
|
|
556
|
-
import re
|
|
557
|
-
|
|
558
|
-
preconditions = {}
|
|
559
|
-
precondition_variables = {}
|
|
560
|
-
|
|
561
|
-
# Extract table code from precondition or context
|
|
562
|
-
table_code = None
|
|
563
|
-
|
|
564
|
-
if precondition:
|
|
565
|
-
# Extract variable code from precondition reference like {v_F_44_04}
|
|
566
|
-
match = re.match(r"\{v_([^}]+)\}", precondition)
|
|
567
|
-
if match:
|
|
568
|
-
table_code = match.group(1)
|
|
569
|
-
elif context and "table" in context:
|
|
570
|
-
table_code = context["table"]
|
|
571
|
-
|
|
572
|
-
if table_code:
|
|
573
|
-
# Query database for actual variable ID and version
|
|
574
|
-
table_info = _get_table_info(table_code, engine)
|
|
575
|
-
|
|
576
|
-
if table_info:
|
|
577
|
-
precondition_var_id = table_info["table_vid"]
|
|
578
|
-
version_id = table_info["table_vid"]
|
|
579
|
-
precondition_code = f"p_{precondition_var_id}"
|
|
580
|
-
|
|
581
|
-
preconditions[precondition_code] = {
|
|
582
|
-
"ast": {
|
|
583
|
-
"class_name": "PreconditionItem",
|
|
584
|
-
"variable_id": precondition_var_id,
|
|
585
|
-
"variable_code": table_code,
|
|
586
|
-
},
|
|
587
|
-
"affected_operations": [operation_code],
|
|
588
|
-
"version_id": version_id,
|
|
589
|
-
"code": precondition_code,
|
|
590
|
-
}
|
|
591
|
-
|
|
592
|
-
precondition_variables[str(precondition_var_id)] = "b"
|
|
593
|
-
|
|
594
|
-
return preconditions, precondition_variables
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
def _extract_variables_from_ast(ast_dict: Dict[str, Any]) -> tuple:
|
|
598
|
-
"""
|
|
599
|
-
Extract variables from complete AST by table.
|
|
600
|
-
|
|
601
|
-
Args:
|
|
602
|
-
ast_dict: Complete AST dictionary
|
|
603
|
-
|
|
604
|
-
Returns:
|
|
605
|
-
tuple: (all_variables_dict, variables_by_table_dict)
|
|
606
|
-
"""
|
|
607
|
-
variables_by_table = {}
|
|
608
|
-
all_variables = {}
|
|
609
|
-
|
|
610
|
-
def extract_from_node(node):
|
|
611
|
-
if isinstance(node, dict):
|
|
612
|
-
# Check if this is a VarID node with data
|
|
613
|
-
if node.get("class_name") == "VarID" and "data" in node:
|
|
614
|
-
table = node.get("table")
|
|
615
|
-
if table:
|
|
616
|
-
if table not in variables_by_table:
|
|
617
|
-
variables_by_table[table] = {}
|
|
618
|
-
|
|
619
|
-
# Extract variable IDs and data types from AST data array
|
|
620
|
-
for data_item in node["data"]:
|
|
621
|
-
if "datapoint" in data_item:
|
|
622
|
-
var_id = str(int(data_item["datapoint"]))
|
|
623
|
-
data_type = data_item.get("data_type", "e")
|
|
624
|
-
variables_by_table[table][var_id] = data_type
|
|
625
|
-
all_variables[var_id] = data_type
|
|
626
|
-
|
|
627
|
-
# Recursively process nested nodes
|
|
628
|
-
for value in node.values():
|
|
629
|
-
if isinstance(value, (dict, list)):
|
|
630
|
-
extract_from_node(value)
|
|
631
|
-
elif isinstance(node, list):
|
|
632
|
-
for item in node:
|
|
633
|
-
extract_from_node(item)
|
|
634
|
-
|
|
635
|
-
extract_from_node(ast_dict)
|
|
636
|
-
return all_variables, variables_by_table
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
def _add_coordinates_to_ast(
|
|
640
|
-
ast_dict: Dict[str, Any], context: Optional[Dict[str, Any]]
|
|
641
|
-
) -> Dict[str, Any]:
|
|
642
|
-
"""
|
|
643
|
-
Add x/y/z coordinates to data entries in AST.
|
|
644
|
-
|
|
645
|
-
Args:
|
|
646
|
-
ast_dict: Complete AST dictionary
|
|
647
|
-
context: Context dict with 'columns' key
|
|
648
|
-
|
|
649
|
-
Returns:
|
|
650
|
-
dict: AST with coordinates added to data entries
|
|
651
|
-
"""
|
|
652
|
-
import copy
|
|
653
|
-
|
|
654
|
-
def add_coords_to_node(node):
|
|
655
|
-
if isinstance(node, dict):
|
|
656
|
-
# Handle VarID nodes with data arrays
|
|
657
|
-
if node.get("class_name") == "VarID" and "data" in node:
|
|
658
|
-
# Get column information from context
|
|
659
|
-
cols = []
|
|
660
|
-
if context and "columns" in context and context["columns"]:
|
|
661
|
-
cols = context["columns"]
|
|
662
|
-
|
|
663
|
-
# Group data entries by row to assign coordinates correctly
|
|
664
|
-
entries_by_row = {}
|
|
665
|
-
for data_entry in node["data"]:
|
|
666
|
-
row_code = data_entry.get("row", "")
|
|
667
|
-
if row_code not in entries_by_row:
|
|
668
|
-
entries_by_row[row_code] = []
|
|
669
|
-
entries_by_row[row_code].append(data_entry)
|
|
670
|
-
|
|
671
|
-
# Assign coordinates based on column order and row grouping
|
|
672
|
-
rows = list(entries_by_row.keys())
|
|
673
|
-
for x_index, row_code in enumerate(rows, 1):
|
|
674
|
-
for data_entry in entries_by_row[row_code]:
|
|
675
|
-
column_code = data_entry.get("column", "")
|
|
676
|
-
|
|
677
|
-
# Find y coordinate based on column position in context
|
|
678
|
-
y_index = 1 # default
|
|
679
|
-
if cols and column_code in cols:
|
|
680
|
-
y_index = cols.index(column_code) + 1
|
|
681
|
-
elif cols:
|
|
682
|
-
# Fallback to order in data
|
|
683
|
-
row_columns = [
|
|
684
|
-
entry.get("column", "")
|
|
685
|
-
for entry in entries_by_row[row_code]
|
|
686
|
-
]
|
|
687
|
-
if column_code in row_columns:
|
|
688
|
-
y_index = row_columns.index(column_code) + 1
|
|
689
|
-
|
|
690
|
-
# Always add y coordinate
|
|
691
|
-
data_entry["y"] = y_index
|
|
692
|
-
|
|
693
|
-
# Add x coordinate only if there are multiple rows
|
|
694
|
-
if len(rows) > 1:
|
|
695
|
-
data_entry["x"] = x_index
|
|
696
|
-
|
|
697
|
-
# TODO: Add z coordinate for sheets when needed
|
|
698
|
-
|
|
699
|
-
# Recursively process child nodes
|
|
700
|
-
for key, value in node.items():
|
|
701
|
-
if isinstance(value, (dict, list)):
|
|
702
|
-
add_coords_to_node(value)
|
|
703
|
-
elif isinstance(node, list):
|
|
704
|
-
for item in node:
|
|
705
|
-
add_coords_to_node(item)
|
|
706
|
-
|
|
707
|
-
# Create a deep copy to avoid modifying the original
|
|
708
|
-
result = copy.deepcopy(ast_dict)
|
|
709
|
-
add_coords_to_node(result)
|
|
710
|
-
return result
|
|
187
|
+
generator = ASTGeneratorAPI(
|
|
188
|
+
database_path=database_path,
|
|
189
|
+
connection_url=connection_url,
|
|
190
|
+
enable_semantic_validation=True
|
|
191
|
+
)
|
|
192
|
+
return generator._enrich_ast_with_metadata(
|
|
193
|
+
ast_dict=ast_dict,
|
|
194
|
+
expression=expression,
|
|
195
|
+
context=context,
|
|
196
|
+
dpm_version=dpm_version,
|
|
197
|
+
operation_code=operation_code,
|
|
198
|
+
precondition=precondition,
|
|
199
|
+
)
|