pydpm_xl 0.1.39rc32__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. py_dpm/__init__.py +1 -1
  2. py_dpm/api/__init__.py +58 -189
  3. py_dpm/api/dpm/__init__.py +20 -0
  4. py_dpm/api/{data_dictionary.py → dpm/data_dictionary.py} +903 -984
  5. py_dpm/api/dpm/explorer.py +236 -0
  6. py_dpm/api/dpm/hierarchical_queries.py +142 -0
  7. py_dpm/api/{migration.py → dpm/migration.py} +16 -19
  8. py_dpm/api/{operation_scopes.py → dpm/operation_scopes.py} +319 -267
  9. py_dpm/api/dpm_xl/__init__.py +25 -0
  10. py_dpm/api/{ast_generator.py → dpm_xl/ast_generator.py} +3 -3
  11. py_dpm/api/{complete_ast.py → dpm_xl/complete_ast.py} +191 -167
  12. py_dpm/api/dpm_xl/semantic.py +354 -0
  13. py_dpm/api/{syntax.py → dpm_xl/syntax.py} +6 -5
  14. py_dpm/api/explorer.py +4 -0
  15. py_dpm/api/semantic.py +30 -306
  16. py_dpm/cli/__init__.py +9 -0
  17. py_dpm/{client.py → cli/main.py} +8 -8
  18. py_dpm/dpm/__init__.py +11 -0
  19. py_dpm/{models.py → dpm/models.py} +112 -88
  20. py_dpm/dpm/queries/base.py +100 -0
  21. py_dpm/dpm/queries/basic_objects.py +33 -0
  22. py_dpm/dpm/queries/explorer_queries.py +352 -0
  23. py_dpm/dpm/queries/filters.py +139 -0
  24. py_dpm/dpm/queries/glossary.py +45 -0
  25. py_dpm/dpm/queries/hierarchical_queries.py +838 -0
  26. py_dpm/dpm/queries/tables.py +133 -0
  27. py_dpm/dpm/utils.py +356 -0
  28. py_dpm/dpm_xl/__init__.py +8 -0
  29. py_dpm/dpm_xl/ast/__init__.py +14 -0
  30. py_dpm/{AST/ASTConstructor.py → dpm_xl/ast/constructor.py} +6 -6
  31. py_dpm/{AST/MLGeneration.py → dpm_xl/ast/ml_generation.py} +137 -87
  32. py_dpm/{AST/ModuleAnalyzer.py → dpm_xl/ast/module_analyzer.py} +7 -7
  33. py_dpm/{AST/ModuleDependencies.py → dpm_xl/ast/module_dependencies.py} +56 -41
  34. py_dpm/{AST/ASTObjects.py → dpm_xl/ast/nodes.py} +1 -1
  35. py_dpm/{AST/check_operands.py → dpm_xl/ast/operands.py} +16 -13
  36. py_dpm/{AST/ASTTemplate.py → dpm_xl/ast/template.py} +2 -2
  37. py_dpm/{AST/WhereClauseChecker.py → dpm_xl/ast/where_clause.py} +2 -2
  38. py_dpm/dpm_xl/grammar/__init__.py +18 -0
  39. py_dpm/dpm_xl/operators/__init__.py +19 -0
  40. py_dpm/{Operators/AggregateOperators.py → dpm_xl/operators/aggregate.py} +7 -7
  41. py_dpm/{Operators/NumericOperators.py → dpm_xl/operators/arithmetic.py} +6 -6
  42. py_dpm/{Operators/Operator.py → dpm_xl/operators/base.py} +5 -5
  43. py_dpm/{Operators/BooleanOperators.py → dpm_xl/operators/boolean.py} +5 -5
  44. py_dpm/{Operators/ClauseOperators.py → dpm_xl/operators/clause.py} +8 -8
  45. py_dpm/{Operators/ComparisonOperators.py → dpm_xl/operators/comparison.py} +5 -5
  46. py_dpm/{Operators/ConditionalOperators.py → dpm_xl/operators/conditional.py} +7 -7
  47. py_dpm/{Operators/StringOperators.py → dpm_xl/operators/string.py} +5 -5
  48. py_dpm/{Operators/TimeOperators.py → dpm_xl/operators/time.py} +6 -6
  49. py_dpm/{semantics/SemanticAnalyzer.py → dpm_xl/semantic_analyzer.py} +168 -68
  50. py_dpm/{semantics/Symbols.py → dpm_xl/symbols.py} +3 -3
  51. py_dpm/dpm_xl/types/__init__.py +13 -0
  52. py_dpm/{DataTypes/TypePromotion.py → dpm_xl/types/promotion.py} +2 -2
  53. py_dpm/{DataTypes/ScalarTypes.py → dpm_xl/types/scalar.py} +2 -2
  54. py_dpm/dpm_xl/utils/__init__.py +14 -0
  55. py_dpm/{data_handlers.py → dpm_xl/utils/data_handlers.py} +2 -2
  56. py_dpm/{Utils → dpm_xl/utils}/operands_mapping.py +1 -1
  57. py_dpm/{Utils → dpm_xl/utils}/operator_mapping.py +8 -8
  58. py_dpm/{OperationScopes/OperationScopeService.py → dpm_xl/utils/scopes_calculator.py} +148 -58
  59. py_dpm/{Utils/ast_serialization.py → dpm_xl/utils/serialization.py} +2 -2
  60. py_dpm/dpm_xl/validation/__init__.py +12 -0
  61. py_dpm/{Utils/ValidationsGenerationUtils.py → dpm_xl/validation/generation_utils.py} +2 -3
  62. py_dpm/{ValidationsGeneration/PropertiesConstraintsProcessor.py → dpm_xl/validation/property_constraints.py} +56 -21
  63. py_dpm/{ValidationsGeneration/auxiliary_functions.py → dpm_xl/validation/utils.py} +2 -2
  64. py_dpm/{ValidationsGeneration/VariantsProcessor.py → dpm_xl/validation/variants.py} +149 -55
  65. py_dpm/exceptions/__init__.py +23 -0
  66. py_dpm/{Exceptions → exceptions}/exceptions.py +7 -2
  67. pydpm_xl-0.2.0.dist-info/METADATA +278 -0
  68. pydpm_xl-0.2.0.dist-info/RECORD +88 -0
  69. pydpm_xl-0.2.0.dist-info/entry_points.txt +2 -0
  70. py_dpm/Exceptions/__init__.py +0 -0
  71. py_dpm/OperationScopes/__init__.py +0 -0
  72. py_dpm/Operators/__init__.py +0 -0
  73. py_dpm/Utils/__init__.py +0 -0
  74. py_dpm/Utils/utils.py +0 -2
  75. py_dpm/ValidationsGeneration/Utils.py +0 -364
  76. py_dpm/ValidationsGeneration/__init__.py +0 -0
  77. py_dpm/api/data_dictionary_validation.py +0 -614
  78. py_dpm/db_utils.py +0 -221
  79. py_dpm/grammar/__init__.py +0 -0
  80. py_dpm/grammar/dist/__init__.py +0 -0
  81. py_dpm/grammar/dpm_xlLexer.g4 +0 -437
  82. py_dpm/grammar/dpm_xlParser.g4 +0 -263
  83. py_dpm/semantics/DAG/DAGAnalyzer.py +0 -158
  84. py_dpm/semantics/DAG/__init__.py +0 -0
  85. py_dpm/semantics/__init__.py +0 -0
  86. py_dpm/views/data_types.sql +0 -12
  87. py_dpm/views/datapoints.sql +0 -65
  88. py_dpm/views/hierarchy_operand_reference.sql +0 -11
  89. py_dpm/views/hierarchy_preconditions.sql +0 -13
  90. py_dpm/views/hierarchy_variables.sql +0 -26
  91. py_dpm/views/hierarchy_variables_context.sql +0 -14
  92. py_dpm/views/key_components.sql +0 -18
  93. py_dpm/views/module_from_table.sql +0 -11
  94. py_dpm/views/open_keys.sql +0 -13
  95. py_dpm/views/operation_info.sql +0 -27
  96. py_dpm/views/operation_list.sql +0 -18
  97. py_dpm/views/operations_versions_from_module_version.sql +0 -30
  98. py_dpm/views/precondition_info.sql +0 -17
  99. py_dpm/views/report_type_operand_reference_info.sql +0 -18
  100. py_dpm/views/subcategory_info.sql +0 -17
  101. py_dpm/views/table_info.sql +0 -19
  102. pydpm_xl-0.1.39rc32.dist-info/METADATA +0 -53
  103. pydpm_xl-0.1.39rc32.dist-info/RECORD +0 -96
  104. pydpm_xl-0.1.39rc32.dist-info/entry_points.txt +0 -2
  105. /py_dpm/{AST → cli/commands}/__init__.py +0 -0
  106. /py_dpm/{migration.py → dpm/migration.py} +0 -0
  107. /py_dpm/{AST/ASTVisitor.py → dpm_xl/ast/visitor.py} +0 -0
  108. /py_dpm/{DataTypes → dpm_xl/grammar/generated}/__init__.py +0 -0
  109. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.interp +0 -0
  110. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.py +0 -0
  111. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlLexer.tokens +0 -0
  112. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.interp +0 -0
  113. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.py +0 -0
  114. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParser.tokens +0 -0
  115. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParserListener.py +0 -0
  116. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/dpm_xlParserVisitor.py +0 -0
  117. /py_dpm/{grammar/dist → dpm_xl/grammar/generated}/listeners.py +0 -0
  118. /py_dpm/{DataTypes/TimeClasses.py → dpm_xl/types/time.py} +0 -0
  119. /py_dpm/{Utils → dpm_xl/utils}/tokens.py +0 -0
  120. /py_dpm/{Exceptions → exceptions}/messages.py +0 -0
  121. {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.0.dist-info}/WHEEL +0 -0
  122. {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.0.dist-info}/licenses/LICENSE +0 -0
  123. {pydpm_xl-0.1.39rc32.dist-info → pydpm_xl-0.2.0.dist-info}/top_level.txt +0 -0
@@ -22,10 +22,34 @@ from sqlalchemy import func
22
22
  import pandas as pd
23
23
  import warnings
24
24
 
25
- # Suppress pandas UserWarning about SQLAlchemy connection types
26
- warnings.filterwarnings("ignore", message=".*pandas only supports SQLAlchemy.*")
25
+ from sqlalchemy.inspection import inspect
27
26
 
28
- Base = declarative_base()
27
+
28
+ class SerializationMixin:
29
+ """Mixin to add serialization capabilities to SQLAlchemy models."""
30
+
31
+ def to_dict(self):
32
+ """Convert the model instance to a dictionary."""
33
+ return {c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs}
34
+
35
+
36
+ Base = declarative_base(cls=SerializationMixin)
37
+
38
+
39
+ def _read_sql_with_connection(sql, session):
40
+ """
41
+ Execute pd.read_sql with proper connection handling to avoid pandas warnings.
42
+
43
+ Uses the raw DBAPI connection which works reliably with compiled SQL strings,
44
+ while suppressing the pandas warning about DBAPI2 connections.
45
+ """
46
+ with warnings.catch_warnings():
47
+ warnings.filterwarnings(
48
+ "ignore",
49
+ message=".*pandas only supports SQLAlchemy.*",
50
+ category=UserWarning,
51
+ )
52
+ return pd.read_sql(sql, session.connection().connection)
29
53
 
30
54
 
31
55
  def _compile_query_for_pandas(query_statement, session):
@@ -670,6 +694,12 @@ class ModuleVersion(Base):
670
694
  module_version_compositions = relationship(
671
695
  "ModuleVersionComposition", back_populates="module_version"
672
696
  )
697
+
698
+ table_versions = relationship(
699
+ "TableVersion",
700
+ secondary="ModuleVersionComposition",
701
+ viewonly=True,
702
+ )
673
703
  operation_scope_compositions = relationship(
674
704
  "OperationScopeComposition", back_populates="module_version"
675
705
  )
@@ -768,7 +798,7 @@ class ModuleVersion(Base):
768
798
  ]
769
799
  )
770
800
 
771
- from py_dpm.models import TableVersion
801
+ from py_dpm.dpm.models import TableVersion
772
802
 
773
803
  query = (
774
804
  session.query(
@@ -1371,8 +1401,6 @@ class Release(Base):
1371
1401
  iscurrent = Column("IsCurrent", Boolean)
1372
1402
  rowguid = Column("RowGUID", String(36), ForeignKey("Concept.ConceptGUID"))
1373
1403
  latestvariablegentime = Column("LatestVariableGenTime", DateTime)
1374
- name = Column("Name", String(50))
1375
-
1376
1404
  # Relationships
1377
1405
  concept = relationship("Concept", foreign_keys=[rowguid])
1378
1406
  changelogs = relationship("Changelog", back_populates="release")
@@ -2224,9 +2252,9 @@ class ViewDatapoints(Base):
2224
2252
  release_id,
2225
2253
  )
2226
2254
 
2227
- return pd.read_sql(
2255
+ return _read_sql_with_connection(
2228
2256
  _compile_query_for_pandas(query.statement, session),
2229
- session.connection().connection,
2257
+ session,
2230
2258
  )
2231
2259
 
2232
2260
  @classmethod
@@ -2239,9 +2267,7 @@ class ViewDatapoints(Base):
2239
2267
  def get_datapoints_sample(cls, session, limit=1000):
2240
2268
  """Get a sample of datapoints"""
2241
2269
  query = cls.create_view_query(session)
2242
- return pd.read_sql_query(
2243
- query.limit(limit).statement, session.connection().connection
2244
- )
2270
+ return pd.read_sql_query(query.limit(limit).statement, session.get_bind())
2245
2271
 
2246
2272
  @classmethod
2247
2273
  def export_datapoints_query(cls, session):
@@ -2249,7 +2275,7 @@ class ViewDatapoints(Base):
2249
2275
  query = cls.create_view_query(session)
2250
2276
  return str(
2251
2277
  query.statement.compile(
2252
- dialect=session.connection().connection.dialect,
2278
+ dialect=session.get_bind().dialect,
2253
2279
  compile_kwargs={"literal_binds": True},
2254
2280
  )
2255
2281
  )
@@ -2257,9 +2283,9 @@ class ViewDatapoints(Base):
2257
2283
  @classmethod
2258
2284
  def get_all_datapoints(cls, session):
2259
2285
  query = cls.create_view_query(session)
2260
- return pd.read_sql(
2286
+ return _read_sql_with_connection(
2261
2287
  _compile_query_for_pandas(query.statement, session),
2262
- session.connection().connection,
2288
+ session,
2263
2289
  )
2264
2290
 
2265
2291
  @classmethod
@@ -2364,9 +2390,9 @@ class ViewDatapoints(Base):
2364
2390
  release_id,
2365
2391
  )
2366
2392
 
2367
- data = pd.read_sql(
2393
+ data = _read_sql_with_connection(
2368
2394
  _compile_query_for_pandas(query.statement, session),
2369
- session.connection().connection,
2395
+ session,
2370
2396
  )
2371
2397
 
2372
2398
  # BUGFIX: Remove duplicates based on cell_code
@@ -2405,9 +2431,9 @@ class ViewDatapoints(Base):
2405
2431
  query, ModuleVersion.startreleaseid, ModuleVersion.endreleaseid, release_id
2406
2432
  )
2407
2433
 
2408
- return pd.read_sql(
2434
+ return _read_sql_with_connection(
2409
2435
  _compile_query_for_pandas(query.statement, session),
2410
- session.connection().connection,
2436
+ session,
2411
2437
  )
2412
2438
 
2413
2439
  @classmethod
@@ -2420,9 +2446,9 @@ class ViewDatapoints(Base):
2420
2446
  ModuleVersion.endreleaseid,
2421
2447
  release_id=None,
2422
2448
  )
2423
- return pd.read_sql(
2449
+ return _read_sql_with_connection(
2424
2450
  _compile_query_for_pandas(query.statement, session),
2425
- session.connection().connection,
2451
+ session,
2426
2452
  )
2427
2453
 
2428
2454
  @classmethod
@@ -2432,9 +2458,9 @@ class ViewDatapoints(Base):
2432
2458
  query = filter_by_release(
2433
2459
  query, ModuleVersion.startreleaseid, ModuleVersion.endreleaseid, release_id
2434
2460
  )
2435
- return pd.read_sql(
2461
+ return _read_sql_with_connection(
2436
2462
  _compile_query_for_pandas(query.statement, session),
2437
- session.connection().connection,
2463
+ session,
2438
2464
  )
2439
2465
 
2440
2466
  @classmethod
@@ -2460,9 +2486,9 @@ class ViewDatapoints(Base):
2460
2486
  ).distinct()
2461
2487
 
2462
2488
  query = query.filter(TableVersion.tablevid == table_version_id)
2463
- return pd.read_sql(
2489
+ return _read_sql_with_connection(
2464
2490
  _compile_query_for_pandas(query.statement, session),
2465
- session.connection().connection,
2491
+ session,
2466
2492
  )
2467
2493
 
2468
2494
  @classmethod
@@ -2494,9 +2520,9 @@ class ViewDatapoints(Base):
2494
2520
  query = filter_by_release(
2495
2521
  query, ModuleVersion.startreleaseid, ModuleVersion.endreleaseid, release_id
2496
2522
  )
2497
- return pd.read_sql(
2523
+ return _read_sql_with_connection(
2498
2524
  _compile_query_for_pandas(query.statement, session),
2499
- session.connection().connection,
2525
+ session,
2500
2526
  )
2501
2527
 
2502
2528
  @classmethod
@@ -2522,9 +2548,9 @@ class ViewDatapoints(Base):
2522
2548
  query = filter_by_release(
2523
2549
  query, ModuleVersion.startreleaseid, ModuleVersion.endreleaseid, release_id
2524
2550
  )
2525
- return pd.read_sql(
2551
+ return _read_sql_with_connection(
2526
2552
  _compile_query_for_pandas(query.statement, session),
2527
- session.connection().connection,
2553
+ session,
2528
2554
  )
2529
2555
 
2530
2556
 
@@ -2614,9 +2640,9 @@ class ViewKeyComponents(Base):
2614
2640
  query = query.distinct()
2615
2641
 
2616
2642
  # Execute and return as DataFrame
2617
- data = pd.read_sql(
2643
+ data = _read_sql_with_connection(
2618
2644
  _compile_query_for_pandas(query.statement, session),
2619
- session.connection().connection,
2645
+ session,
2620
2646
  )
2621
2647
  return data
2622
2648
 
@@ -2644,9 +2670,9 @@ class ViewKeyComponents(Base):
2644
2670
  # Add DISTINCT to eliminate duplicate rows from joins with composite PK tables
2645
2671
  query = query.distinct()
2646
2672
 
2647
- data = pd.read_sql(
2673
+ data = _read_sql_with_connection(
2648
2674
  _compile_query_for_pandas(query.statement, session),
2649
- session.connection().connection,
2675
+ session,
2650
2676
  )
2651
2677
  return data
2652
2678
 
@@ -2665,9 +2691,9 @@ class ViewKeyComponents(Base):
2665
2691
  # Add DISTINCT to eliminate duplicate rows from joins with composite PK tables
2666
2692
  query = query.distinct()
2667
2693
 
2668
- return pd.read_sql(
2694
+ return _read_sql_with_connection(
2669
2695
  _compile_query_for_pandas(query.statement, session),
2670
- session.connection().connection,
2696
+ session,
2671
2697
  )
2672
2698
 
2673
2699
 
@@ -2737,9 +2763,9 @@ class ViewOpenKeys(Base):
2737
2763
  # ItemCategory has composite PK (itemid, startreleaseid), so join on itemid creates duplicates
2738
2764
  query = query.distinct()
2739
2765
 
2740
- data = pd.read_sql(
2766
+ data = _read_sql_with_connection(
2741
2767
  _compile_query_for_pandas(query.statement, session),
2742
- session.connection().connection,
2768
+ session,
2743
2769
  )
2744
2770
  return data
2745
2771
 
@@ -2758,9 +2784,9 @@ class ViewOpenKeys(Base):
2758
2784
  # Add DISTINCT to eliminate duplicate rows from joins with composite PK tables
2759
2785
  query = query.distinct()
2760
2786
 
2761
- data = pd.read_sql(
2787
+ data = _read_sql_with_connection(
2762
2788
  _compile_query_for_pandas(query.statement, session),
2763
- session.connection().connection,
2789
+ session,
2764
2790
  )
2765
2791
  return data
2766
2792
 
@@ -2788,9 +2814,9 @@ class ViewDataTypes(Base):
2788
2814
  )
2789
2815
  query = query.filter(cls.datapoint.in_(datapoints_batch))
2790
2816
  results.append(
2791
- pd.read_sql(
2817
+ _read_sql_with_connection(
2792
2818
  _compile_query_for_pandas(query.statement, session),
2793
- session.connection().connection,
2819
+ session,
2794
2820
  )
2795
2821
  )
2796
2822
  batch_start += batch_size
@@ -2826,9 +2852,9 @@ class ViewSubcategoryItemInfo(Base):
2826
2852
  query, cls.start_release_id, cls.end_release_id, release_id
2827
2853
  )
2828
2854
  query = query.order_by(cls.ordering)
2829
- data = pd.read_sql(
2855
+ data = _read_sql_with_connection(
2830
2856
  _compile_query_for_pandas(query.statement, session),
2831
- session.connection().connection,
2857
+ session,
2832
2858
  )
2833
2859
  return data
2834
2860
 
@@ -2857,9 +2883,9 @@ class ViewHierarchyVariables(Base):
2857
2883
  query = filter_by_release(
2858
2884
  query, cls.start_release_id, cls.end_release_id, release_id
2859
2885
  )
2860
- data = pd.read_sql(
2886
+ data = _read_sql_with_connection(
2861
2887
  _compile_query_for_pandas(query.statement, session),
2862
- session.connection().connection,
2888
+ session,
2863
2889
  )
2864
2890
  return data
2865
2891
 
@@ -2888,9 +2914,9 @@ class ViewHierarchyVariablesContext(Base):
2888
2914
  query, cls.start_release_id, cls.end_release_id, release_id
2889
2915
  )
2890
2916
  results.append(
2891
- pd.read_sql(
2917
+ _read_sql_with_connection(
2892
2918
  _compile_query_for_pandas(query.statement, session),
2893
- session.connection().connection,
2919
+ session,
2894
2920
  )
2895
2921
  )
2896
2922
  data = pd.concat(results, axis=0)
@@ -2909,9 +2935,9 @@ class ViewHierarchyPreconditions(Base):
2909
2935
  @classmethod
2910
2936
  def get_preconditions(cls, session):
2911
2937
  query = session.query(cls)
2912
- return pd.read_sql(
2938
+ return _read_sql_with_connection(
2913
2939
  _compile_query_for_pandas(query.statement, session),
2914
- session.connection().connection,
2940
+ session,
2915
2941
  )
2916
2942
 
2917
2943
 
@@ -2928,9 +2954,9 @@ class ViewOperations(Base):
2928
2954
  @classmethod
2929
2955
  def get_operations(cls, session):
2930
2956
  query = session.query(cls).distinct()
2931
- operations = pd.read_sql(
2957
+ operations = _read_sql_with_connection(
2932
2958
  _compile_query_for_pandas(query.statement, session),
2933
- session.connection().connection,
2959
+ session,
2934
2960
  )
2935
2961
  return operations.to_dict(orient="records")
2936
2962
 
@@ -2963,9 +2989,9 @@ class ViewOperations(Base):
2963
2989
  query = session.query(cls)
2964
2990
 
2965
2991
  query = query.filter(cls.operation_version_id.in_(preconditions_ids)).distinct()
2966
- preconditions = pd.read_sql(
2992
+ preconditions = _read_sql_with_connection(
2967
2993
  _compile_query_for_pandas(query.statement, session),
2968
- session.connection().connection,
2994
+ session,
2969
2995
  )
2970
2996
 
2971
2997
  return preconditions.to_dict(orient="records")
@@ -2982,9 +3008,9 @@ class ViewModules(Base):
2982
3008
  @classmethod
2983
3009
  def get_all_modules(cls, session):
2984
3010
  query = session.query(cls.module_code, cls.table_code).distinct()
2985
- return pd.read_sql(
3011
+ return _read_sql_with_connection(
2986
3012
  _compile_query_for_pandas(query.statement, session),
2987
- session.connection().connection,
3013
+ session,
2988
3014
  )
2989
3015
 
2990
3016
  @classmethod
@@ -3025,9 +3051,9 @@ class ViewOperationFromModule(Base):
3025
3051
  )
3026
3052
  query = query.filter(cls.module_code == module_code)
3027
3053
  query = filter_by_date(query, cls.from_date, cls.to_date, ref_date)
3028
- return pd.read_sql(
3054
+ return _read_sql_with_connection(
3029
3055
  _compile_query_for_pandas(query.statement, session),
3030
- session.connection().connection,
3056
+ session,
3031
3057
  ).to_dict(orient="records")
3032
3058
 
3033
3059
  @classmethod
@@ -3038,9 +3064,9 @@ class ViewOperationFromModule(Base):
3038
3064
  query = query.filter(
3039
3065
  cls.operation_version_id == operation_version_id
3040
3066
  ).distinct()
3041
- return pd.read_sql(
3067
+ return _read_sql_with_connection(
3042
3068
  _compile_query_for_pandas(query.statement, session),
3043
- session.connection().connection,
3069
+ session,
3044
3070
  ).to_dict(orient="records")
3045
3071
 
3046
3072
  @classmethod
@@ -3059,9 +3085,9 @@ class ViewOperationFromModule(Base):
3059
3085
  cls.severity,
3060
3086
  )
3061
3087
  query = query.filter(cls.module_version_id == module_version_id).distinct()
3062
- reference = pd.read_sql(
3088
+ reference = _read_sql_with_connection(
3063
3089
  _compile_query_for_pandas(query.statement, session),
3064
- session.connection().connection,
3090
+ session,
3065
3091
  )
3066
3092
  not_errors = []
3067
3093
  preconditions_to_remove = []
@@ -3069,9 +3095,7 @@ class ViewOperationFromModule(Base):
3069
3095
  not_errors = session.query(
3070
3096
  OperationNode.nodeid.label("operation_version_id")
3071
3097
  ).distinct()
3072
- not_errors = pd.read_sql_query(
3073
- not_errors.statement, session.connection().connection
3074
- )
3098
+ not_errors = pd.read_sql_query(not_errors.statement, session.get_bind())
3075
3099
  not_errors = list(not_errors["operation_version_id"])
3076
3100
  reference = reference[reference["operation_version_id"].isin(not_errors)]
3077
3101
  if not with_preconditions:
@@ -3079,7 +3103,7 @@ class ViewOperationFromModule(Base):
3079
3103
  ViewPreconditionInfo.operation_version_id
3080
3104
  ).distinct()
3081
3105
  preconditions = pd.read_sql_query(
3082
- preconditions.statement, session.connection().connection
3106
+ preconditions.statement, session.get_bind()
3083
3107
  )
3084
3108
  preconditions_to_remove = list(preconditions["operation_version_id"])
3085
3109
  reference = reference[
@@ -3117,9 +3141,9 @@ class ViewOperationInfo(Base):
3117
3141
  query = session.query(cls).filter(
3118
3142
  cls.operation_version_id == operation_version_id
3119
3143
  )
3120
- return pd.read_sql(
3144
+ return _read_sql_with_connection(
3121
3145
  _compile_query_for_pandas(query.statement, session),
3122
- session.connection().connection,
3146
+ session,
3123
3147
  ).to_dict(orient="records")
3124
3148
 
3125
3149
  @classmethod
@@ -3145,9 +3169,9 @@ class ViewOperationInfo(Base):
3145
3169
  query = session.query(cls).filter(
3146
3170
  cls.operation_version_id.in_(operation_version_ids)
3147
3171
  )
3148
- df = pd.read_sql(
3172
+ df = _read_sql_with_connection(
3149
3173
  _compile_query_for_pandas(query.statement, session),
3150
- session.connection().connection,
3174
+ session,
3151
3175
  )
3152
3176
  df = df.rename(columns=rename_dict)
3153
3177
  return df
@@ -3171,9 +3195,9 @@ class ViewTableInfo(Base):
3171
3195
  .filter(cls.module_code == module_code)
3172
3196
  .distinct()
3173
3197
  )
3174
- return pd.read_sql(
3198
+ return _read_sql_with_connection(
3175
3199
  _compile_query_for_pandas(query.statement, session),
3176
- session.connection().connection,
3200
+ session,
3177
3201
  ).to_dict(orient="records")
3178
3202
 
3179
3203
  @classmethod
@@ -3183,9 +3207,9 @@ class ViewTableInfo(Base):
3183
3207
  .filter(cls.module_version_id == module_version_id)
3184
3208
  .distinct()
3185
3209
  )
3186
- return pd.read_sql(
3210
+ return _read_sql_with_connection(
3187
3211
  _compile_query_for_pandas(query.statement, session),
3188
- session.connection().connection,
3212
+ session,
3189
3213
  ).to_dict(orient="records")
3190
3214
 
3191
3215
  @classmethod
@@ -3193,9 +3217,9 @@ class ViewTableInfo(Base):
3193
3217
  query = session.query(cls.variable_id, cls.variable_version_id).filter(
3194
3218
  cls.table_code == table_code
3195
3219
  )
3196
- data = pd.read_sql(
3220
+ data = _read_sql_with_connection(
3197
3221
  _compile_query_for_pandas(query.statement, session),
3198
- session.connection().connection,
3222
+ session,
3199
3223
  )
3200
3224
  if to_dict:
3201
3225
  return data.to_dict(orient="records")
@@ -3206,17 +3230,17 @@ class ViewTableInfo(Base):
3206
3230
  query = session.query(cls.variable_id, cls.variable_version_id).filter(
3207
3231
  cls.table_version_id == table_version_id
3208
3232
  )
3209
- return pd.read_sql(
3233
+ return _read_sql_with_connection(
3210
3234
  _compile_query_for_pandas(query.statement, session),
3211
- session.connection().connection,
3235
+ session,
3212
3236
  ).to_dict(orient="records")
3213
3237
 
3214
3238
  @classmethod
3215
3239
  def get_intra_module_variables(cls, session):
3216
3240
  query = session.query(cls.variable_version_id, cls.module_code).distinct()
3217
- module_data = pd.read_sql(
3241
+ module_data = _read_sql_with_connection(
3218
3242
  _compile_query_for_pandas(query.statement, session),
3219
- session.connection().connection,
3243
+ session,
3220
3244
  )
3221
3245
  intra_module_data = module_data.drop_duplicates(
3222
3246
  subset=["variable_version_id"], keep=False, ignore_index=True
@@ -3235,9 +3259,9 @@ class ViewTableInfo(Base):
3235
3259
  .distinct()
3236
3260
  .filter(cls.table_code.in_(table_codes))
3237
3261
  )
3238
- module_data = pd.read_sql(
3262
+ module_data = _read_sql_with_connection(
3239
3263
  _compile_query_for_pandas(query.statement, session),
3240
- session.connection().connection,
3264
+ session,
3241
3265
  )
3242
3266
 
3243
3267
  all_combinations = (
@@ -3277,9 +3301,9 @@ class ViewPreconditionInfo(Base):
3277
3301
  query = session.query(
3278
3302
  cls.operation_version_id, cls.operation_code, cls.variable_code
3279
3303
  ).distinct()
3280
- return pd.read_sql(
3304
+ return _read_sql_with_connection(
3281
3305
  _compile_query_for_pandas(query.statement, session),
3282
- session.connection().connection,
3306
+ session,
3283
3307
  ).to_dict(orient="records")
3284
3308
 
3285
3309
  @classmethod
@@ -3307,9 +3331,9 @@ class ViewHierarchyOperandReferenceInfo(Base):
3307
3331
  .filter(cls.cell_id == cell_id)
3308
3332
  .distinct()
3309
3333
  )
3310
- operations = pd.read_sql(
3334
+ operations = _read_sql_with_connection(
3311
3335
  _compile_query_for_pandas(query.statement, session),
3312
- session.connection().connection,
3336
+ session,
3313
3337
  ).to_dict(orient="records")
3314
3338
  return operations
3315
3339
 
@@ -3318,9 +3342,9 @@ class ViewHierarchyOperandReferenceInfo(Base):
3318
3342
  query = session.query(cls).filter(cls.variable_id.in_(var_id_list))
3319
3343
  possible_op_codes = []
3320
3344
 
3321
- df = pd.read_sql(
3345
+ df = _read_sql_with_connection(
3322
3346
  _compile_query_for_pandas(query.statement, session),
3323
- session.connection().connection,
3347
+ session,
3324
3348
  )
3325
3349
  grouped_code = df.groupby("operation_code")
3326
3350
  for elto_k, elto_v in grouped_code.groups.items():
@@ -3348,8 +3372,8 @@ class ViewReportTypeOperandReferenceInfo(Base):
3348
3372
  .filter(cls.cell_id == cell_id)
3349
3373
  .distinct()
3350
3374
  )
3351
- operations = pd.read_sql(
3375
+ operations = _read_sql_with_connection(
3352
3376
  _compile_query_for_pandas(query.statement, session),
3353
- session.connection().connection,
3377
+ session,
3354
3378
  ).to_dict(orient="records")
3355
3379
  return operations
@@ -0,0 +1,100 @@
1
+ import warnings
2
+ import pandas as pd
3
+ from typing import List, Dict, Any, Optional, Union
4
+ from sqlalchemy.orm import Query, Session
5
+ from sqlalchemy.sql import Select
6
+
7
+
8
+ class BaseQuery:
9
+ """
10
+ Base wrapper for SQLAlchemy queries to provide unified output formatting.
11
+ """
12
+
13
+ def __init__(self, session: Session, query: Union[Query, Select]):
14
+ self.session = session
15
+ self._query = query
16
+
17
+ def filter(self, *criteria):
18
+ """Apply SQLAlchemy filters."""
19
+ # Check if it's a legacy ORM Query or composite Select
20
+ if hasattr(self._query, "filter"):
21
+ self._query = self._query.filter(*criteria)
22
+ else:
23
+ # For 1.4/2.0 style select() objects, use where if filter not avail, but distinct() usually returns Select
24
+ # SQLAlchemy Select objects usually have .where() or .filter() (which is an alias in 1.4+)
25
+ self._query = self._query.filter(*criteria)
26
+ return self
27
+
28
+ def apply(self, func, *args, **kwargs):
29
+ """
30
+ Apply a function that modifies the query.
31
+ The function must accept the BaseQuery (or its internal query) as first argument
32
+ and return a modified query object.
33
+ """
34
+ # We pass the internal query to the function, and expect a query back
35
+ # This allows filters to work on the raw SQLAlchemy object
36
+ self._query = func(self._query, *args, **kwargs)
37
+ return self
38
+
39
+ @property
40
+ def statement(self):
41
+ """Return the underlying SQL statement."""
42
+ if hasattr(self._query, "statement"):
43
+ return self._query.statement
44
+ return self._query
45
+
46
+ def _compile_for_pandas(self):
47
+ """Compile query for safe pandas execution."""
48
+ stmt = self.statement
49
+ # Compile with literal binds for pandas compatibility
50
+ return str(
51
+ stmt.compile(
52
+ dialect=self.session.get_bind().dialect,
53
+ compile_kwargs={"literal_binds": True},
54
+ )
55
+ )
56
+
57
+ def to_df(self) -> pd.DataFrame:
58
+ """Execute query and return as Pandas DataFrame."""
59
+ sql = self._compile_for_pandas()
60
+
61
+ # Suppress pandas/SQLAlchemy connection warnings
62
+ with warnings.catch_warnings():
63
+ warnings.filterwarnings(
64
+ "ignore",
65
+ message=".*pandas only supports SQLAlchemy.*",
66
+ category=UserWarning,
67
+ )
68
+ return pd.read_sql(sql, self.session.connection().connection)
69
+
70
+ def to_dict(self) -> List[Dict[str, Any]]:
71
+ """
72
+ Execute query and return as list of dictionaries.
73
+
74
+ Handles both ORM objects (using to_dict if available) and KeyedTuples.
75
+ """
76
+ # Execute the query
77
+ if isinstance(self._query, Query):
78
+ results = self._query.all()
79
+ else:
80
+ results = self.session.execute(self._query).all()
81
+
82
+ if not results:
83
+ return []
84
+
85
+ # If results are ORM objects with to_dict method
86
+ first = results[0]
87
+ if hasattr(first, "to_dict"):
88
+ return [r.to_dict() for r in results]
89
+
90
+ # If results are SQLAlchemy Rows/KeyedTuples
91
+ if hasattr(first, "_mapping"):
92
+ return [dict(r._mapping) for r in results]
93
+
94
+ # Fallback for older SQLAlchemy or simple tuples (try to map from query column descriptions if possible, but Row/KeyedTuple is standard)
95
+ # In modern SA, .all() returns Rows which behave like tuples but have _mapping
96
+ try:
97
+ return [dict(row) for row in results]
98
+ except (ValueError, TypeError):
99
+ # Scalar results?
100
+ return results
@@ -0,0 +1,33 @@
1
+ from sqlalchemy.orm import Session
2
+ from py_dpm.dpm.models import Release
3
+ from py_dpm.dpm.queries.base import BaseQuery
4
+
5
+
6
+ class ReleaseQuery:
7
+ """
8
+ Queries related to releases.
9
+ """
10
+
11
+ @staticmethod
12
+ def get_all_releases(session: Session) -> BaseQuery:
13
+ """
14
+ Fetch list of available releases.
15
+ """
16
+ q = session.query(Release).order_by(Release.date.desc())
17
+ return BaseQuery(session, q)
18
+
19
+ @staticmethod
20
+ def get_release_by_id(session: Session, release_id: int) -> BaseQuery:
21
+ """
22
+ Fetch release by id.
23
+ """
24
+ q = session.query(Release).filter(Release.releaseid == release_id)
25
+ return BaseQuery(session, q)
26
+
27
+ @staticmethod
28
+ def get_release_by_code(session: Session, release_code: str) -> BaseQuery:
29
+ """
30
+ Fetch release by code.
31
+ """
32
+ q = session.query(Release).filter(Release.code == release_code)
33
+ return BaseQuery(session, q)