pydpm_xl 0.1.39rc32__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_dpm/__init__.py +1 -1
- py_dpm/api/__init__.py +58 -189
- py_dpm/api/dpm/__init__.py +20 -0
- py_dpm/api/{data_dictionary.py → dpm/data_dictionary.py} +903 -984
- py_dpm/api/dpm/explorer.py +236 -0
- py_dpm/api/dpm/hierarchical_queries.py +142 -0
- py_dpm/api/{migration.py → dpm/migration.py} +16 -19
- py_dpm/api/{operation_scopes.py → dpm/operation_scopes.py} +319 -267
- py_dpm/api/dpm_xl/__init__.py +25 -0
- py_dpm/api/{ast_generator.py → dpm_xl/ast_generator.py} +3 -3
- py_dpm/api/{complete_ast.py → dpm_xl/complete_ast.py} +186 -284
- py_dpm/api/dpm_xl/semantic.py +358 -0
- py_dpm/api/{syntax.py → dpm_xl/syntax.py} +6 -5
- py_dpm/api/explorer.py +4 -0
- py_dpm/api/semantic.py +30 -306
- py_dpm/cli/__init__.py +9 -0
- py_dpm/{client.py → cli/main.py} +12 -10
- py_dpm/dpm/__init__.py +11 -0
- py_dpm/{models.py → dpm/models.py} +112 -88
- py_dpm/dpm/queries/base.py +100 -0
- py_dpm/dpm/queries/basic_objects.py +33 -0
- py_dpm/dpm/queries/explorer_queries.py +352 -0
- py_dpm/dpm/queries/filters.py +139 -0
- py_dpm/dpm/queries/glossary.py +45 -0
- py_dpm/dpm/queries/hierarchical_queries.py +838 -0
- py_dpm/dpm/queries/tables.py +133 -0
- py_dpm/dpm/utils.py +356 -0
- py_dpm/dpm_xl/__init__.py +8 -0
- py_dpm/dpm_xl/ast/__init__.py +14 -0
- py_dpm/{AST/ASTConstructor.py → dpm_xl/ast/constructor.py} +6 -6
- py_dpm/{AST/MLGeneration.py → dpm_xl/ast/ml_generation.py} +137 -87
- py_dpm/{AST/ModuleAnalyzer.py → dpm_xl/ast/module_analyzer.py} +7 -7
- py_dpm/{AST/ModuleDependencies.py → dpm_xl/ast/module_dependencies.py} +56 -41
- py_dpm/{AST/ASTObjects.py → dpm_xl/ast/nodes.py} +1 -1
- py_dpm/{AST/check_operands.py → dpm_xl/ast/operands.py} +16 -13
- py_dpm/{AST/ASTTemplate.py → dpm_xl/ast/template.py} +2 -2
- py_dpm/{AST/WhereClauseChecker.py → dpm_xl/ast/where_clause.py} +2 -2
- py_dpm/dpm_xl/grammar/__init__.py +18 -0
- py_dpm/dpm_xl/operators/__init__.py +19 -0
- py_dpm/{Operators/AggregateOperators.py → dpm_xl/operators/aggregate.py} +7 -7
- py_dpm/{Operators/NumericOperators.py → dpm_xl/operators/arithmetic.py} +6 -6
- py_dpm/{Operators/Operator.py → dpm_xl/operators/base.py} +5 -5
- py_dpm/{Operators/BooleanOperators.py → dpm_xl/operators/boolean.py} +5 -5
- py_dpm/{Operators/ClauseOperators.py → dpm_xl/operators/clause.py} +8 -8
- py_dpm/{Operators/ComparisonOperators.py → dpm_xl/operators/comparison.py} +5 -5
- py_dpm/{Operators/ConditionalOperators.py → dpm_xl/operators/conditional.py} +7 -7
- py_dpm/{Operators/StringOperators.py → dpm_xl/operators/string.py} +5 -5
- py_dpm/{Operators/TimeOperators.py → dpm_xl/operators/time.py} +6 -6
- py_dpm/{semantics/SemanticAnalyzer.py → dpm_xl/semantic_analyzer.py} +168 -68
- py_dpm/{semantics/Symbols.py → dpm_xl/symbols.py} +3 -3
- py_dpm/dpm_xl/types/__init__.py +13 -0
- py_dpm/{DataTypes/TypePromotion.py → dpm_xl/types/promotion.py} +2 -2
- py_dpm/{DataTypes/ScalarTypes.py → dpm_xl/types/scalar.py} +2 -2
- py_dpm/dpm_xl/utils/__init__.py +14 -0
- py_dpm/{data_handlers.py → dpm_xl/utils/data_handlers.py} +2 -2
- py_dpm/{Utils → dpm_xl/utils}/operands_mapping.py +1 -1
- py_dpm/{Utils → dpm_xl/utils}/operator_mapping.py +8 -8
- py_dpm/{OperationScopes/OperationScopeService.py → dpm_xl/utils/scopes_calculator.py} +148 -58
- py_dpm/{Utils/ast_serialization.py → dpm_xl/utils/serialization.py} +3 -4
- py_dpm/dpm_xl/validation/__init__.py +12 -0
- py_dpm/{Utils/ValidationsGenerationUtils.py → dpm_xl/validation/generation_utils.py} +2 -3
- py_dpm/{ValidationsGeneration/PropertiesConstraintsProcessor.py → dpm_xl/validation/property_constraints.py} +56 -21
- py_dpm/{ValidationsGeneration/auxiliary_functions.py → dpm_xl/validation/utils.py} +2 -2
- py_dpm/{ValidationsGeneration/VariantsProcessor.py → dpm_xl/validation/variants.py} +149 -55
- py_dpm/exceptions/__init__.py +23 -0
- py_dpm/{Exceptions → exceptions}/exceptions.py +7 -2
- pydpm_xl-0.2.1.dist-info/METADATA +278 -0
- pydpm_xl-0.2.1.dist-info/RECORD +88 -0
- pydpm_xl-0.2.1.dist-info/entry_points.txt +2 -0
- py_dpm/Exceptions/__init__.py +0 -0
- py_dpm/OperationScopes/__init__.py +0 -0
- py_dpm/Operators/__init__.py +0 -0
- py_dpm/Utils/__init__.py +0 -0
- py_dpm/Utils/utils.py +0 -2
- py_dpm/ValidationsGeneration/Utils.py +0 -364
- py_dpm/ValidationsGeneration/__init__.py +0 -0
- py_dpm/api/data_dictionary_validation.py +0 -614
- py_dpm/db_utils.py +0 -221
- py_dpm/grammar/__init__.py +0 -0
- py_dpm/grammar/dist/__init__.py +0 -0
- py_dpm/grammar/dpm_xlLexer.g4 +0 -437
- py_dpm/grammar/dpm_xlParser.g4 +0 -263
- py_dpm/semantics/DAG/DAGAnalyzer.py +0 -158
- py_dpm/semantics/DAG/__init__.py +0 -0
- py_dpm/semantics/__init__.py +0 -0
- py_dpm/views/data_types.sql +0 -12
- py_dpm/views/datapoints.sql +0 -65
- py_dpm/views/hierarchy_operand_reference.sql +0 -11
- py_dpm/views/hierarchy_preconditions.sql +0 -13
- py_dpm/views/hierarchy_variables.sql +0 -26
- py_dpm/views/hierarchy_variables_context.sql +0 -14
- py_dpm/views/key_components.sql +0 -18
- py_dpm/views/module_from_table.sql +0 -11
- py_dpm/views/open_keys.sql +0 -13
- py_dpm/views/operation_info.sql +0 -27
- py_dpm/views/operation_list.sql +0 -18
- py_dpm/views/operations_versions_from_module_version.sql +0 -30
- py_dpm/views/precondition_info.sql +0 -17
- py_dpm/views/report_type_operand_reference_info.sql +0 -18
- py_dpm/views/subcategory_info.sql +0 -17
- py_dpm/views/table_info.sql +0 -19
- pydpm_xl-0.1.39rc32.dist-info/METADATA +0 -53
- pydpm_xl-0.1.39rc32.dist-info/RECORD +0 -96
- pydpm_xl-0.1.39rc32.dist-info/entry_points.txt +0 -2
- /py_dpm/{AST → cli/commands}/__init__.py +0 -0
- /py_dpm/{migration.py → dpm/migration.py} +0 -0
- /py_dpm/{AST/ASTVisitor.py → dpm_xl/ast/visitor.py} +0 -0
- /py_dpm/{DataTypes → dpm_xl/grammar/generated}/__init__.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.interp +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.tokens +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.interp +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.tokens +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParserListener.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParserVisitor.py +0 -0
- /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/listeners.py +0 -0
- /py_dpm/{DataTypes/TimeClasses.py → dpm_xl/types/time.py} +0 -0
- /py_dpm/{Utils → dpm_xl/utils}/tokens.py +0 -0
- /py_dpm/{Exceptions → exceptions}/messages.py +0 -0
- {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.1.dist-info}/WHEEL +0 -0
- {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.1.dist-info}/licenses/LICENSE +0 -0
- {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from typing import Optional
|
|
2
|
+
|
|
3
|
+
from sqlalchemy import distinct
|
|
4
|
+
from py_dpm.dpm.models import (
|
|
5
|
+
TableVersion,
|
|
6
|
+
ViewDatapoints,
|
|
7
|
+
ModuleVersion,
|
|
8
|
+
ModuleVersionComposition,
|
|
9
|
+
)
|
|
10
|
+
from py_dpm.dpm.queries.base import BaseQuery
|
|
11
|
+
from py_dpm.dpm.queries.filters import filter_by_release, filter_by_date
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class TableQuery:
|
|
15
|
+
"""
|
|
16
|
+
Queries related to data structure references (Tables, Rows, Columns, Sheets).
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
@staticmethod
|
|
20
|
+
def get_tables(
|
|
21
|
+
session,
|
|
22
|
+
release_id: Optional[int] = None,
|
|
23
|
+
date: Optional[str] = None,
|
|
24
|
+
release_code: Optional[str] = None,
|
|
25
|
+
) -> BaseQuery:
|
|
26
|
+
"""Get all available table codes."""
|
|
27
|
+
|
|
28
|
+
if sum(bool(x) for x in [release_id, date, release_code]) > 1:
|
|
29
|
+
raise ValueError(
|
|
30
|
+
"Specify a maximum of one of release_id, release_code or date."
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
q = session.query(TableVersion)
|
|
34
|
+
|
|
35
|
+
if date:
|
|
36
|
+
q = q.join(
|
|
37
|
+
ModuleVersionComposition,
|
|
38
|
+
TableVersion.tablevid == ModuleVersionComposition.tablevid,
|
|
39
|
+
).join(
|
|
40
|
+
ModuleVersion,
|
|
41
|
+
ModuleVersionComposition.modulevid == ModuleVersion.modulevid,
|
|
42
|
+
)
|
|
43
|
+
q = filter_by_date(
|
|
44
|
+
q, date, ModuleVersion.fromreferencedate, ModuleVersion.toreferencedate
|
|
45
|
+
)
|
|
46
|
+
elif release_id:
|
|
47
|
+
q = filter_by_release(
|
|
48
|
+
q,
|
|
49
|
+
release_id=release_id,
|
|
50
|
+
start_col=TableVersion.startreleaseid,
|
|
51
|
+
end_col=TableVersion.endreleaseid,
|
|
52
|
+
)
|
|
53
|
+
elif release_code:
|
|
54
|
+
q = filter_by_release(
|
|
55
|
+
q,
|
|
56
|
+
release_code=release_code,
|
|
57
|
+
start_col=TableVersion.startreleaseid,
|
|
58
|
+
end_col=TableVersion.endreleaseid,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
q = q.order_by(TableVersion.code)
|
|
62
|
+
|
|
63
|
+
return BaseQuery(session, q)
|
|
64
|
+
|
|
65
|
+
@staticmethod
|
|
66
|
+
def get_available_tables_from_datapoints(
|
|
67
|
+
session, release_id: Optional[int] = None
|
|
68
|
+
) -> BaseQuery:
|
|
69
|
+
"""Get available table codes from datapoints view."""
|
|
70
|
+
base_query = ViewDatapoints.create_view_query(session)
|
|
71
|
+
subq = base_query.subquery()
|
|
72
|
+
|
|
73
|
+
q = session.query(distinct(subq.c.table_code).label("table_code")).filter(
|
|
74
|
+
subq.c.table_code.isnot(None)
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
q = filter_by_release(q, release_id, subq.c.start_release, subq.c.end_release)
|
|
78
|
+
q = q.order_by(subq.c.table_code)
|
|
79
|
+
|
|
80
|
+
return BaseQuery(session, q)
|
|
81
|
+
|
|
82
|
+
@staticmethod
|
|
83
|
+
def get_available_rows(
|
|
84
|
+
session, table_code: str, release_id: Optional[int] = None
|
|
85
|
+
) -> BaseQuery:
|
|
86
|
+
"""Get available row codes for a table."""
|
|
87
|
+
base_query = ViewDatapoints.create_view_query(session)
|
|
88
|
+
subq = base_query.subquery()
|
|
89
|
+
|
|
90
|
+
q = session.query(distinct(subq.c.row_code).label("row_code")).filter(
|
|
91
|
+
subq.c.table_code == table_code, subq.c.row_code.isnot(None)
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
q = filter_by_release(q, release_id, subq.c.start_release, subq.c.end_release)
|
|
95
|
+
q = q.order_by(subq.c.row_code)
|
|
96
|
+
|
|
97
|
+
return BaseQuery(session, q)
|
|
98
|
+
|
|
99
|
+
@staticmethod
|
|
100
|
+
def get_available_columns(
|
|
101
|
+
session, table_code: str, release_id: Optional[int] = None
|
|
102
|
+
) -> BaseQuery:
|
|
103
|
+
"""Get available column codes for a table."""
|
|
104
|
+
base_query = ViewDatapoints.create_view_query(session)
|
|
105
|
+
subq = base_query.subquery()
|
|
106
|
+
|
|
107
|
+
q = session.query(distinct(subq.c.column_code).label("column_code")).filter(
|
|
108
|
+
subq.c.table_code == table_code, subq.c.column_code.isnot(None)
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
q = filter_by_release(q, release_id, subq.c.start_release, subq.c.end_release)
|
|
112
|
+
q = q.order_by(subq.c.column_code)
|
|
113
|
+
|
|
114
|
+
return BaseQuery(session, q)
|
|
115
|
+
|
|
116
|
+
@staticmethod
|
|
117
|
+
def get_available_sheets(
|
|
118
|
+
session, table_code: str, release_id: Optional[int] = None
|
|
119
|
+
) -> BaseQuery:
|
|
120
|
+
"""Get available sheet codes."""
|
|
121
|
+
base_query = ViewDatapoints.create_view_query(session)
|
|
122
|
+
subq = base_query.subquery()
|
|
123
|
+
|
|
124
|
+
q = session.query(distinct(subq.c.sheet_code).label("sheet_code")).filter(
|
|
125
|
+
subq.c.table_code == table_code,
|
|
126
|
+
subq.c.sheet_code.isnot(None),
|
|
127
|
+
subq.c.sheet_code != "",
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
q = filter_by_release(q, release_id, subq.c.start_release, subq.c.end_release)
|
|
131
|
+
q = q.order_by(subq.c.sheet_code)
|
|
132
|
+
|
|
133
|
+
return BaseQuery(session, q)
|
py_dpm/dpm/utils.py
ADDED
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from urllib.parse import quote_plus
|
|
3
|
+
|
|
4
|
+
from dotenv import load_dotenv
|
|
5
|
+
from sqlalchemy import create_engine
|
|
6
|
+
from sqlalchemy.engine import URL
|
|
7
|
+
from sqlalchemy.orm import close_all_sessions, sessionmaker
|
|
8
|
+
from contextlib import contextmanager
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
|
|
11
|
+
console = Console()
|
|
12
|
+
|
|
13
|
+
# Try to load .env from parent directories
|
|
14
|
+
current_dir = os.path.dirname(os.path.abspath(__file__))
|
|
15
|
+
while current_dir != os.path.dirname(current_dir): # Stop at root
|
|
16
|
+
env_path = os.path.join(current_dir, ".env")
|
|
17
|
+
if os.path.exists(env_path):
|
|
18
|
+
load_dotenv(env_path)
|
|
19
|
+
break
|
|
20
|
+
current_dir = os.path.dirname(current_dir)
|
|
21
|
+
|
|
22
|
+
# SQLite configuration
|
|
23
|
+
sqlite_db_path = os.getenv("SQLITE_DB_PATH", "database.db")
|
|
24
|
+
|
|
25
|
+
# Unified RDBMS configuration (preferred)
|
|
26
|
+
#
|
|
27
|
+
# These environment variables provide a single, backend-agnostic way to
|
|
28
|
+
# configure a server-based database for pyDPM. `PYDPM_RDBMS` selects the
|
|
29
|
+
# backend and the remaining variables provide connection details:
|
|
30
|
+
# - PYDPM_RDBMS: "postgres" or "sqlserver"
|
|
31
|
+
# - PYDPM_DB_HOST: hostname or IP
|
|
32
|
+
# - PYDPM_DB_PORT: port number (optional; defaults per backend)
|
|
33
|
+
# - PYDPM_DB_NAME: database name
|
|
34
|
+
# - PYDPM_DB_USER: username
|
|
35
|
+
# - PYDPM_DB_PASSWORD: password
|
|
36
|
+
pydpm_rdbms = os.getenv("PYDPM_RDBMS", "").strip().lower()
|
|
37
|
+
db_host = os.getenv("PYDPM_DB_HOST", None)
|
|
38
|
+
db_port = os.getenv("PYDPM_DB_PORT", None)
|
|
39
|
+
db_name = os.getenv("PYDPM_DB_NAME", None)
|
|
40
|
+
db_user = os.getenv("PYDPM_DB_USER", None)
|
|
41
|
+
db_password = os.getenv("PYDPM_DB_PASSWORD", None)
|
|
42
|
+
|
|
43
|
+
if pydpm_rdbms == "postgres" and not db_port:
|
|
44
|
+
db_port = "5432"
|
|
45
|
+
elif pydpm_rdbms == "sqlserver" and not db_port:
|
|
46
|
+
db_port = "1433"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
# PostgreSQL configuration
|
|
51
|
+
postgres_host = os.getenv("POSTGRES_HOST", None)
|
|
52
|
+
postgres_port = os.getenv("POSTGRES_PORT", "5432")
|
|
53
|
+
postgres_db = os.getenv("POSTGRES_DB", None)
|
|
54
|
+
postgres_user = os.getenv("POSTGRES_USER", None)
|
|
55
|
+
postgres_pass = os.getenv("POSTGRES_PASS", None)
|
|
56
|
+
|
|
57
|
+
# Legacy SQL Server configuration (kept for backward compatibility)
|
|
58
|
+
server = os.getenv("DATABASE_SERVER", None)
|
|
59
|
+
username = os.getenv("DATABASE_USER", None)
|
|
60
|
+
password = os.getenv("DATABASE_PASS", None)
|
|
61
|
+
database_name = os.getenv("DATABASE_NAME", None)
|
|
62
|
+
|
|
63
|
+
# Determine database type
|
|
64
|
+
use_postgres = os.getenv("USE_POSTGRES", "false").lower() == "true"
|
|
65
|
+
use_sqlite = os.getenv("USE_SQLITE", "true").lower() == "true" and not use_postgres
|
|
66
|
+
|
|
67
|
+
if use_postgres and not (
|
|
68
|
+
postgres_host and postgres_user and postgres_pass and postgres_db
|
|
69
|
+
):
|
|
70
|
+
console.print(f"Warning: PostgreSQL credentials not provided", style="bold yellow")
|
|
71
|
+
elif not use_sqlite and not use_postgres and not (server and username and password):
|
|
72
|
+
console.print(f"Warning: Database credentials not provided", style="bold yellow")
|
|
73
|
+
elif not use_sqlite and not use_postgres:
|
|
74
|
+
# Handling special characters in password for SQL Server
|
|
75
|
+
password = password.replace("}", "}}")
|
|
76
|
+
for x in "%&.@#/\\=;":
|
|
77
|
+
if x in password:
|
|
78
|
+
password = "{" + password + "}"
|
|
79
|
+
break
|
|
80
|
+
|
|
81
|
+
engine = None
|
|
82
|
+
connection = None
|
|
83
|
+
sessionMakerObject = None
|
|
84
|
+
_current_engine_url = None
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def create_engine_from_url(connection_url):
|
|
88
|
+
"""
|
|
89
|
+
Create SQLAlchemy engine from a connection URL with appropriate pooling parameters.
|
|
90
|
+
|
|
91
|
+
Detects database type from URL scheme and applies pooling parameters conditionally:
|
|
92
|
+
- SQLite: Only pool_pre_ping=True (no connection pooling)
|
|
93
|
+
- PostgreSQL/MySQL/others: Full connection pooling parameters
|
|
94
|
+
|
|
95
|
+
Also initializes the global sessionMakerObject for use by get_session().
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
connection_url (str): SQLAlchemy connection URL (e.g., 'sqlite:///path.db', 'postgresql://user:pass@host/db')
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
sqlalchemy.engine.Engine: Configured database engine
|
|
102
|
+
|
|
103
|
+
Examples:
|
|
104
|
+
>>> engine = create_engine_from_url('sqlite:///database.db')
|
|
105
|
+
>>> engine = create_engine_from_url('postgresql://user:pass@localhost/mydb')
|
|
106
|
+
"""
|
|
107
|
+
global engine, sessionMakerObject, _current_engine_url
|
|
108
|
+
|
|
109
|
+
# Detect database type from URL scheme
|
|
110
|
+
is_sqlite = connection_url.startswith("sqlite://")
|
|
111
|
+
|
|
112
|
+
# For SQLite URLs, always create a fresh engine to avoid
|
|
113
|
+
# surprising cross-test or cross-call state sharing, especially
|
|
114
|
+
# for in-memory databases. For server-based databases, reuse the
|
|
115
|
+
# engine when the URL has not changed.
|
|
116
|
+
if not is_sqlite and engine is not None and _current_engine_url == str(
|
|
117
|
+
connection_url
|
|
118
|
+
):
|
|
119
|
+
return engine
|
|
120
|
+
|
|
121
|
+
if is_sqlite:
|
|
122
|
+
# SQLite doesn't support connection pooling
|
|
123
|
+
engine = create_engine(connection_url, pool_pre_ping=True)
|
|
124
|
+
else:
|
|
125
|
+
# Server-based databases (PostgreSQL, MySQL, etc.) with connection pooling
|
|
126
|
+
engine = create_engine(
|
|
127
|
+
connection_url,
|
|
128
|
+
pool_size=20,
|
|
129
|
+
max_overflow=10,
|
|
130
|
+
pool_recycle=180,
|
|
131
|
+
pool_pre_ping=True,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
# Initialize global sessionMakerObject
|
|
135
|
+
if sessionMakerObject is not None:
|
|
136
|
+
close_all_sessions()
|
|
137
|
+
sessionMakerObject = sessionmaker(bind=engine)
|
|
138
|
+
_current_engine_url = str(connection_url)
|
|
139
|
+
|
|
140
|
+
return engine
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def create_engine_object(url):
|
|
144
|
+
global engine, _current_engine_url
|
|
145
|
+
|
|
146
|
+
# Convert URL to string for type detection if needed
|
|
147
|
+
url_str = str(url)
|
|
148
|
+
|
|
149
|
+
# Detect database type from URL scheme (not from environment variables)
|
|
150
|
+
is_sqlite = url_str.startswith("sqlite://")
|
|
151
|
+
|
|
152
|
+
# Only reuse engines for non-SQLite URLs. SQLite (especially in-memory)
|
|
153
|
+
# should create independent engines to avoid leaking state between calls.
|
|
154
|
+
if not is_sqlite and engine is not None and _current_engine_url == url_str:
|
|
155
|
+
return engine
|
|
156
|
+
|
|
157
|
+
if is_sqlite:
|
|
158
|
+
engine = create_engine(url, pool_pre_ping=True)
|
|
159
|
+
else:
|
|
160
|
+
# Server-based databases (PostgreSQL, MySQL, SQL Server, etc.) with connection pooling
|
|
161
|
+
engine = create_engine(
|
|
162
|
+
url, pool_size=20, max_overflow=10, pool_recycle=180, pool_pre_ping=True
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
global sessionMakerObject
|
|
166
|
+
if sessionMakerObject is not None:
|
|
167
|
+
close_all_sessions()
|
|
168
|
+
sessionMakerObject = sessionmaker(bind=engine)
|
|
169
|
+
_current_engine_url = url_str
|
|
170
|
+
return engine
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def get_engine(owner=None, database_path=None, connection_url=None):
|
|
174
|
+
"""
|
|
175
|
+
Get database engine based on configuration or explicit parameters.
|
|
176
|
+
|
|
177
|
+
Priority order:
|
|
178
|
+
1. Explicit connection_url parameter (for PostgreSQL or other databases)
|
|
179
|
+
2. Explicit database_path parameter (for SQLite)
|
|
180
|
+
3. Environment variable USE_POSTGRES (from .env)
|
|
181
|
+
4. Environment variable USE_SQLITE (from .env)
|
|
182
|
+
|
|
183
|
+
Args:
|
|
184
|
+
owner: Owner for SQL Server databases (EBA/EIOPA) - legacy support
|
|
185
|
+
database_path: Explicit SQLite database path
|
|
186
|
+
connection_url: Explicit SQLAlchemy connection URL (e.g., for PostgreSQL)
|
|
187
|
+
|
|
188
|
+
Returns:
|
|
189
|
+
SQLAlchemy Engine
|
|
190
|
+
"""
|
|
191
|
+
# Priority 1: If explicit connection URL is provided, use it directly
|
|
192
|
+
if connection_url:
|
|
193
|
+
return create_engine_from_url(connection_url)
|
|
194
|
+
|
|
195
|
+
# Priority 2: If explicit database_path is provided, use SQLite with that path
|
|
196
|
+
if database_path:
|
|
197
|
+
connection_url = f"sqlite:///{database_path}"
|
|
198
|
+
return create_engine_object(connection_url)
|
|
199
|
+
|
|
200
|
+
# Priority 3: Check unified PYDPM_RDBMS configuration
|
|
201
|
+
if pydpm_rdbms in ("postgres", "sqlserver"):
|
|
202
|
+
if not (db_host and db_name and db_user and db_password):
|
|
203
|
+
console.print(
|
|
204
|
+
"Warning: PYDPM_RDBMS is set but PYDPM_DB_* variables are incomplete; "
|
|
205
|
+
"falling back to legacy configuration",
|
|
206
|
+
style="bold yellow",
|
|
207
|
+
)
|
|
208
|
+
else:
|
|
209
|
+
if pydpm_rdbms == "postgres":
|
|
210
|
+
# PostgreSQL via unified PYDPM_* configuration
|
|
211
|
+
port = db_port or "5432"
|
|
212
|
+
connection_url = (
|
|
213
|
+
f"postgresql://{db_user}:{db_password}@{db_host}:{port}/{db_name}"
|
|
214
|
+
)
|
|
215
|
+
return create_engine_object(connection_url)
|
|
216
|
+
else:
|
|
217
|
+
# SQL Server via unified PYDPM_* configuration
|
|
218
|
+
port = db_port or "1433"
|
|
219
|
+
server_with_port = f"{db_host},{port}" if port else db_host
|
|
220
|
+
|
|
221
|
+
# Handling special characters in password for SQL Server
|
|
222
|
+
sqlserver_password = db_password.replace("}", "}}")
|
|
223
|
+
for x in "%&.@#/\\=;":
|
|
224
|
+
if x in sqlserver_password:
|
|
225
|
+
sqlserver_password = "{" + sqlserver_password + "}"
|
|
226
|
+
break
|
|
227
|
+
|
|
228
|
+
if os.name == "nt":
|
|
229
|
+
driver = "{SQL Server}"
|
|
230
|
+
else:
|
|
231
|
+
driver = os.getenv(
|
|
232
|
+
"SQL_DRIVER", "{ODBC Driver 18 for SQL Server}"
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
connection_string = (
|
|
236
|
+
f"DRIVER={driver}",
|
|
237
|
+
f"SERVER={server_with_port}",
|
|
238
|
+
f"DATABASE={db_name}",
|
|
239
|
+
f"UID={db_user}",
|
|
240
|
+
f"PWD={sqlserver_password}",
|
|
241
|
+
"TrustServerCertificate=yes",
|
|
242
|
+
)
|
|
243
|
+
connection_url = URL.create(
|
|
244
|
+
"mssql+pyodbc",
|
|
245
|
+
query={"odbc_connect": quote_plus(';'.join(connection_string))},
|
|
246
|
+
)
|
|
247
|
+
return create_engine_object(connection_url)
|
|
248
|
+
|
|
249
|
+
# Priority 4: Check legacy PostgreSQL configuration
|
|
250
|
+
if use_postgres:
|
|
251
|
+
if not (postgres_host and postgres_user and postgres_pass and postgres_db):
|
|
252
|
+
console.print(
|
|
253
|
+
"Warning: USE_POSTGRES is true but PostgreSQL credentials are incomplete; "
|
|
254
|
+
"falling back to SQLite or SQL Server defaults",
|
|
255
|
+
style="bold yellow",
|
|
256
|
+
)
|
|
257
|
+
else:
|
|
258
|
+
connection_url = (
|
|
259
|
+
f"postgresql://{postgres_user}:{postgres_pass}@"
|
|
260
|
+
f"{postgres_host}:{postgres_port}/{postgres_db}"
|
|
261
|
+
)
|
|
262
|
+
return create_engine_object(connection_url)
|
|
263
|
+
|
|
264
|
+
# Priority 5: Check environment variable USE_SQLITE
|
|
265
|
+
if use_sqlite:
|
|
266
|
+
# For SQLite, create the database path if it doesn't exist
|
|
267
|
+
db_dir = os.path.dirname(sqlite_db_path)
|
|
268
|
+
if db_dir and not os.path.exists(db_dir):
|
|
269
|
+
os.makedirs(db_dir)
|
|
270
|
+
|
|
271
|
+
# If owner is specified, append it to the filename
|
|
272
|
+
if owner:
|
|
273
|
+
base_name = os.path.splitext(sqlite_db_path)[0]
|
|
274
|
+
extension = os.path.splitext(sqlite_db_path)[1] or ".db"
|
|
275
|
+
db_path = f"{base_name}_{owner}{extension}"
|
|
276
|
+
else:
|
|
277
|
+
db_path = sqlite_db_path
|
|
278
|
+
|
|
279
|
+
connection_url = f"sqlite:///{db_path}"
|
|
280
|
+
return create_engine_object(connection_url)
|
|
281
|
+
|
|
282
|
+
# Priority 6: Legacy SQL Server logic
|
|
283
|
+
if owner is None:
|
|
284
|
+
raise Exception("Cannot generate engine. No owner used.")
|
|
285
|
+
|
|
286
|
+
if owner not in ("EBA", "EIOPA"):
|
|
287
|
+
raise Exception("Invalid owner, must be EBA or EIOPA")
|
|
288
|
+
|
|
289
|
+
if database_name is None:
|
|
290
|
+
database = "DPM_" + owner
|
|
291
|
+
else:
|
|
292
|
+
database = database_name
|
|
293
|
+
|
|
294
|
+
if os.name == "nt":
|
|
295
|
+
driver = "{SQL Server}"
|
|
296
|
+
else:
|
|
297
|
+
driver = os.getenv("SQL_DRIVER", "{ODBC Driver 18 for SQL Server}")
|
|
298
|
+
|
|
299
|
+
# Handling special characters in password for legacy SQL Server configuration
|
|
300
|
+
sqlserver_password = password.replace("}", "}}") if password else ""
|
|
301
|
+
for x in "%&.@#/\\=;":
|
|
302
|
+
if x in sqlserver_password:
|
|
303
|
+
sqlserver_password = "{" + sqlserver_password + "}"
|
|
304
|
+
break
|
|
305
|
+
|
|
306
|
+
connection_string = (
|
|
307
|
+
f"DRIVER={driver}",
|
|
308
|
+
f"SERVER={server}",
|
|
309
|
+
f"DATABASE={database}",
|
|
310
|
+
f"UID={username}",
|
|
311
|
+
f"PWD={sqlserver_password}",
|
|
312
|
+
"TrustServerCertificate=yes",
|
|
313
|
+
)
|
|
314
|
+
connection_string = ";".join(connection_string)
|
|
315
|
+
connection_url = URL.create(
|
|
316
|
+
"mssql+pyodbc", query={"odbc_connect": quote_plus(connection_string)}
|
|
317
|
+
)
|
|
318
|
+
return create_engine_object(connection_url)
|
|
319
|
+
|
|
320
|
+
|
|
321
|
+
def get_connection(owner=None):
|
|
322
|
+
global engine
|
|
323
|
+
if engine is None:
|
|
324
|
+
engine = get_engine(owner)
|
|
325
|
+
connection = engine.connect()
|
|
326
|
+
return connection
|
|
327
|
+
|
|
328
|
+
|
|
329
|
+
def get_session():
|
|
330
|
+
global sessionMakerObject
|
|
331
|
+
"""Returns as session on the connection string"""
|
|
332
|
+
if sessionMakerObject is None:
|
|
333
|
+
raise Exception("Not found Session Maker")
|
|
334
|
+
session = sessionMakerObject()
|
|
335
|
+
return session
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
@contextmanager
|
|
339
|
+
def session_scope():
|
|
340
|
+
"""
|
|
341
|
+
Provide a transactional scope around a series of operations.
|
|
342
|
+
|
|
343
|
+
This helper is intended for short-lived, one-off operations where
|
|
344
|
+
explicit session closing is desired. It does not commit automatically,
|
|
345
|
+
leaving transaction control to the caller, but always closes the
|
|
346
|
+
session in a finally block.
|
|
347
|
+
"""
|
|
348
|
+
session = get_session()
|
|
349
|
+
try:
|
|
350
|
+
yield session
|
|
351
|
+
finally:
|
|
352
|
+
try:
|
|
353
|
+
session.close()
|
|
354
|
+
except Exception:
|
|
355
|
+
# Best-effort close; suppress errors on shutdown paths
|
|
356
|
+
pass
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""
|
|
2
|
+
DPM-XL Abstract Syntax Tree
|
|
3
|
+
|
|
4
|
+
AST construction, manipulation, and analysis for DPM-XL expressions.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from py_dpm.dpm_xl.ast.nodes import *
|
|
8
|
+
from py_dpm.dpm_xl.ast.constructor import *
|
|
9
|
+
from py_dpm.dpm_xl.ast.visitor import *
|
|
10
|
+
from py_dpm.dpm_xl.ast.template import *
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
# Re-export will be handled by import *
|
|
14
|
+
]
|
|
@@ -10,12 +10,12 @@ import re
|
|
|
10
10
|
|
|
11
11
|
from antlr4.tree.Tree import TerminalNodeImpl
|
|
12
12
|
|
|
13
|
-
from py_dpm.
|
|
14
|
-
from py_dpm.
|
|
15
|
-
from py_dpm.
|
|
16
|
-
from py_dpm.
|
|
17
|
-
from py_dpm.grammar.
|
|
18
|
-
from py_dpm.grammar.
|
|
13
|
+
from py_dpm.dpm_xl.ast.nodes import *
|
|
14
|
+
from py_dpm.exceptions import exceptions
|
|
15
|
+
from py_dpm.exceptions.exceptions import SemanticError
|
|
16
|
+
from py_dpm.dpm_xl.utils.tokens import TABLE_GROUP_PREFIX
|
|
17
|
+
from py_dpm.dpm_xl.grammar.generated.dpm_xlParser import dpm_xlParser
|
|
18
|
+
from py_dpm.dpm_xl.grammar.generated.dpm_xlParserVisitor import dpm_xlParserVisitor
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
class ASTVisitor(dpm_xlParserVisitor):
|