pytrilogy 0.0.3.95__py3-none-any.whl → 0.0.3.96__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pytrilogy might be problematic. Click here for more details.
- {pytrilogy-0.0.3.95.dist-info → pytrilogy-0.0.3.96.dist-info}/METADATA +30 -7
- {pytrilogy-0.0.3.95.dist-info → pytrilogy-0.0.3.96.dist-info}/RECORD +17 -17
- trilogy/__init__.py +1 -1
- trilogy/authoring/__init__.py +59 -45
- trilogy/core/graph_models.py +4 -4
- trilogy/core/statements/execute.py +2 -0
- trilogy/core/validation/common.py +2 -1
- trilogy/core/validation/concept.py +24 -21
- trilogy/core/validation/datasource.py +16 -14
- trilogy/core/validation/environment.py +4 -4
- trilogy/dialect/base.py +9 -1
- trilogy/dialect/metadata.py +233 -0
- trilogy/executor.py +33 -163
- trilogy/compiler.py +0 -0
- {pytrilogy-0.0.3.95.dist-info → pytrilogy-0.0.3.96.dist-info}/WHEEL +0 -0
- {pytrilogy-0.0.3.95.dist-info → pytrilogy-0.0.3.96.dist-info}/entry_points.txt +0 -0
- {pytrilogy-0.0.3.95.dist-info → pytrilogy-0.0.3.96.dist-info}/licenses/LICENSE.md +0 -0
- {pytrilogy-0.0.3.95.dist-info → pytrilogy-0.0.3.96.dist-info}/top_level.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pytrilogy
|
|
3
|
-
Version: 0.0.3.
|
|
3
|
+
Version: 0.0.3.96
|
|
4
4
|
Summary: Declarative, typed query language that compiles to SQL.
|
|
5
5
|
Home-page:
|
|
6
6
|
Author:
|
|
@@ -136,11 +136,11 @@ Versus SQL, Trilogy aims to:
|
|
|
136
136
|
|
|
137
137
|
| Backend | Status | Notes |
|
|
138
138
|
|---------|--------|-------|
|
|
139
|
-
| **BigQuery** |
|
|
140
|
-
| **DuckDB** |
|
|
141
|
-
| **Snowflake** |
|
|
142
|
-
| **SQL Server** |
|
|
143
|
-
| **Presto** |
|
|
139
|
+
| **BigQuery** | Core | Full support |
|
|
140
|
+
| **DuckDB** | Core | Full support |
|
|
141
|
+
| **Snowflake** | Core | Full support |
|
|
142
|
+
| **SQL Server** | Experimental | Limited testing |
|
|
143
|
+
| **Presto** | Experimental | Limited testing |
|
|
144
144
|
|
|
145
145
|
## Examples
|
|
146
146
|
|
|
@@ -311,7 +311,30 @@ trilogy fmt <path to trilogy file>
|
|
|
311
311
|
- [Public model repository](https://github.com/trilogydata/trilogy-public-models) - Great place for modeling examples
|
|
312
312
|
- [Full documentation](https://trilogydata.dev/)
|
|
313
313
|
|
|
314
|
-
##
|
|
314
|
+
## Python API Integration
|
|
315
|
+
|
|
316
|
+
### Root Imports
|
|
317
|
+
|
|
318
|
+
Are stable and should be sufficient for executing code from Trilogy as text.
|
|
319
|
+
|
|
320
|
+
```python
|
|
321
|
+
from pytrilogy import Executor, Dialect
|
|
322
|
+
```
|
|
323
|
+
|
|
324
|
+
### Authoring Imports
|
|
325
|
+
|
|
326
|
+
Are also stable, and should be used for cases which programatically generate Trilogy statements without a base text format
|
|
327
|
+
or need to process/transform parsed code in more complicated ways.
|
|
328
|
+
|
|
329
|
+
```python
|
|
330
|
+
from pytrilogy.authoring import Concept, Function, ...
|
|
331
|
+
```
|
|
332
|
+
|
|
333
|
+
### Other Imports
|
|
334
|
+
|
|
335
|
+
Are likely to be unstable. Open an issue if you need to take dependencies on other modules outside those two paths.
|
|
336
|
+
|
|
337
|
+
## Trilogy Syntax Reference
|
|
315
338
|
|
|
316
339
|
### Import
|
|
317
340
|
```sql
|
|
@@ -1,14 +1,13 @@
|
|
|
1
|
-
pytrilogy-0.0.3.
|
|
2
|
-
trilogy/__init__.py,sha256=
|
|
3
|
-
trilogy/compiler.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
1
|
+
pytrilogy-0.0.3.96.dist-info/licenses/LICENSE.md,sha256=5ZRvtTyCCFwz1THxDTjAu3Lidds9WjPvvzgVwPSYNDo,1042
|
|
2
|
+
trilogy/__init__.py,sha256=hBxtfxlbUvTLp_8FCY_-wDqJM7G2RJQ2jntfMf3a0PM,303
|
|
4
3
|
trilogy/constants.py,sha256=eKb_EJvSqjN9tGbdVEViwdtwwh8fZ3-jpOEDqL71y70,1691
|
|
5
4
|
trilogy/engine.py,sha256=3MiADf5MKcmxqiHBuRqiYdsXiLj7oitDfVvXvHrfjkA,2178
|
|
6
|
-
trilogy/executor.py,sha256=
|
|
5
|
+
trilogy/executor.py,sha256=YfSjuJ0FVm2gHnNgmUlXijWDTUFjqq9FNakWpeEYO48,15769
|
|
7
6
|
trilogy/parser.py,sha256=o4cfk3j3yhUFoiDKq9ZX_GjBF3dKhDjXEwb63rcBkBM,293
|
|
8
7
|
trilogy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
8
|
trilogy/render.py,sha256=qQWwduymauOlB517UtM-VGbVe8Cswa4UJub5aGbSO6c,1512
|
|
10
9
|
trilogy/utility.py,sha256=euQccZLKoYBz0LNg5tzLlvv2YHvXh9HArnYp1V3uXsM,763
|
|
11
|
-
trilogy/authoring/__init__.py,sha256=
|
|
10
|
+
trilogy/authoring/__init__.py,sha256=TABMOETSMERrWuyDLR0nK4ISlqR0yaqeXrmuOdrSvAY,3060
|
|
12
11
|
trilogy/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
12
|
trilogy/core/constants.py,sha256=nizWYDCJQ1bigQMtkNIEMNTcN0NoEAXiIHLzpelxQ24,201
|
|
14
13
|
trilogy/core/enums.py,sha256=EusAzz7o_YrWf64TLIED7MfziFOJk8EHM8se5W3nyJk,8644
|
|
@@ -17,7 +16,7 @@ trilogy/core/environment_helpers.py,sha256=TRlqVctqIRBxzfjRBmpQsAVoiCcsEKBhG1B6P
|
|
|
17
16
|
trilogy/core/ergonomics.py,sha256=e-7gE29vPLFdg0_A1smQ7eOrUwKl5VYdxRSTddHweRA,1631
|
|
18
17
|
trilogy/core/exceptions.py,sha256=0Lmc3awJYx94k6uifbHc-EIqlFGV6YrX0QIwP84D4a4,1150
|
|
19
18
|
trilogy/core/functions.py,sha256=ESUWMRmwtavwCLl6z1NP9EFzWTJoXn3orTaaOSsj33Q,33093
|
|
20
|
-
trilogy/core/graph_models.py,sha256=
|
|
19
|
+
trilogy/core/graph_models.py,sha256=4EWFTHGfYd72zvS2HYoV6hm7nMC_VEd7vWr6txY-ig0,3400
|
|
21
20
|
trilogy/core/internal.py,sha256=r9QagDB2GvpqlyD_I7VrsfbVfIk5mnok2znEbv72Aa4,2681
|
|
22
21
|
trilogy/core/optimization.py,sha256=ojpn-p79lr03SSVQbbw74iPCyoYpDYBmj1dbZ3oXCjI,8860
|
|
23
22
|
trilogy/core/query_processor.py,sha256=uqygDJqkjIH4vLP-lbGRgTN7rRcYEkr3KGqNimNw_80,20345
|
|
@@ -75,20 +74,21 @@ trilogy/core/statements/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG
|
|
|
75
74
|
trilogy/core/statements/author.py,sha256=VFzylve72fw0tqMSP5Yiwp8--_r92b9zzX1XAdxuTYQ,15963
|
|
76
75
|
trilogy/core/statements/build.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
77
76
|
trilogy/core/statements/common.py,sha256=VnVLULQg1TJLNUFzJaROT1tsf2ewk3IpuhvZaP36R6A,535
|
|
78
|
-
trilogy/core/statements/execute.py,sha256=
|
|
77
|
+
trilogy/core/statements/execute.py,sha256=kiwJcVeMa4wZR-xLfM2oYOJ9DeyJkP8An38WFyJxktM,2413
|
|
79
78
|
trilogy/core/validation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
80
|
-
trilogy/core/validation/common.py,sha256=
|
|
81
|
-
trilogy/core/validation/concept.py,sha256=
|
|
82
|
-
trilogy/core/validation/datasource.py,sha256=
|
|
83
|
-
trilogy/core/validation/environment.py,sha256=
|
|
79
|
+
trilogy/core/validation/common.py,sha256=cVbDSowtLf2nl0-QVmNauAeLBBNFkSE5bRZtTHIzW20,3193
|
|
80
|
+
trilogy/core/validation/concept.py,sha256=23wZYw_cGmTQuFvaRM-0T7M2b5ZwqjFMucfvfzyQxlc,4425
|
|
81
|
+
trilogy/core/validation/datasource.py,sha256=HIk7iEKK99k0-WXosiIhedH-U2rDGYGdUMKEt1eMl1w,6394
|
|
82
|
+
trilogy/core/validation/environment.py,sha256=yjSnEH893mTiW9o6YXBtXJWbGSR2kMQWjszDuECznLs,2784
|
|
84
83
|
trilogy/dialect/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
85
|
-
trilogy/dialect/base.py,sha256=
|
|
84
|
+
trilogy/dialect/base.py,sha256=m2a8azbI3AWfQz-VtSn84H5T-BqjF5PULH6BrgwZzok,49666
|
|
86
85
|
trilogy/dialect/bigquery.py,sha256=XS3hpybeowgfrOrkycAigAF3NX2YUzTzfgE6f__2fT4,4316
|
|
87
86
|
trilogy/dialect/common.py,sha256=tSthIZOXXRPQ4KeMKnDDsH7KlTmf2EVqigVtLyoc4zc,6071
|
|
88
87
|
trilogy/dialect/config.py,sha256=olnyeVU5W5T6b9-dMeNAnvxuPlyc2uefb7FRME094Ec,3834
|
|
89
88
|
trilogy/dialect/dataframe.py,sha256=RUbNgReEa9g3pL6H7fP9lPTrAij5pkqedpZ99D8_5AE,1522
|
|
90
89
|
trilogy/dialect/duckdb.py,sha256=JoUvQ19WvgxoaJkGLM7DPXOd1H0394k3vBiblksQzOI,5631
|
|
91
90
|
trilogy/dialect/enums.py,sha256=FRNYQ5-w-B6-X0yXKNU5g9GowsMlERFogTC5u2nxL_s,4740
|
|
91
|
+
trilogy/dialect/metadata.py,sha256=Vt4-p82bD1ijqeoI2dagUVUbC-KgNNJ2MvDwQIa5mG8,7034
|
|
92
92
|
trilogy/dialect/postgres.py,sha256=el2PKwfyvWGk5EZtLudqAH5ewLitY1sFHJiocBSyxyM,3393
|
|
93
93
|
trilogy/dialect/presto.py,sha256=k1IaeilR3nzPC9Hp7jlAdzJ7TsuxB3LQTBQ28MYE7O8,3715
|
|
94
94
|
trilogy/dialect/snowflake.py,sha256=T6_mKfhpDazB1xQxqFLS2AJwzwzBcPYY6_qxRnAtFBs,3326
|
|
@@ -116,8 +116,8 @@ trilogy/std/money.preql,sha256=XWwvAV3WxBsHX9zfptoYRnBigcfYwrYtBHXTME0xJuQ,2082
|
|
|
116
116
|
trilogy/std/net.preql,sha256=WZCuvH87_rZntZiuGJMmBDMVKkdhTtxeHOkrXNwJ1EE,416
|
|
117
117
|
trilogy/std/ranking.preql,sha256=LDoZrYyz4g3xsII9XwXfmstZD-_92i1Eox1UqkBIfi8,83
|
|
118
118
|
trilogy/std/report.preql,sha256=LbV-XlHdfw0jgnQ8pV7acG95xrd1-p65fVpiIc-S7W4,202
|
|
119
|
-
pytrilogy-0.0.3.
|
|
120
|
-
pytrilogy-0.0.3.
|
|
121
|
-
pytrilogy-0.0.3.
|
|
122
|
-
pytrilogy-0.0.3.
|
|
123
|
-
pytrilogy-0.0.3.
|
|
119
|
+
pytrilogy-0.0.3.96.dist-info/METADATA,sha256=l4yiGzDzMYd4of8nPXBMBKyetasPk4yfIOyENWjEqcU,11023
|
|
120
|
+
pytrilogy-0.0.3.96.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
121
|
+
pytrilogy-0.0.3.96.dist-info/entry_points.txt,sha256=ewBPU2vLnVexZVnB-NrVj-p3E-4vukg83Zk8A55Wp2w,56
|
|
122
|
+
pytrilogy-0.0.3.96.dist-info/top_level.txt,sha256=cAy__NW_eMAa_yT9UnUNlZLFfxcg6eimUAZ184cdNiE,8
|
|
123
|
+
pytrilogy-0.0.3.96.dist-info/RECORD,,
|
trilogy/__init__.py
CHANGED
trilogy/authoring/__init__.py
CHANGED
|
@@ -60,68 +60,82 @@ from trilogy.core.statements.author import (
|
|
|
60
60
|
RowsetDerivationStatement,
|
|
61
61
|
SelectItem,
|
|
62
62
|
SelectStatement,
|
|
63
|
+
ShowCategory,
|
|
64
|
+
ShowStatement,
|
|
65
|
+
ValidateStatement,
|
|
63
66
|
)
|
|
64
67
|
from trilogy.parsing.common import arbitrary_to_concept, arg_to_datatype
|
|
65
68
|
|
|
66
69
|
__all__ = [
|
|
67
|
-
|
|
68
|
-
"
|
|
69
|
-
|
|
70
|
-
"
|
|
71
|
-
"
|
|
72
|
-
"
|
|
73
|
-
"CaseElse",
|
|
74
|
-
"AggregateWrapper",
|
|
75
|
-
"WindowItem",
|
|
76
|
-
"WindowOrder",
|
|
77
|
-
"WindowType",
|
|
78
|
-
"WindowItemOrder",
|
|
79
|
-
"WindowItemOver",
|
|
80
|
-
"DataType",
|
|
81
|
-
"StructType",
|
|
82
|
-
"ArrayType",
|
|
83
|
-
"NumericType",
|
|
84
|
-
"Grain",
|
|
85
|
-
"RowsetDerivationStatement",
|
|
86
|
-
"MapType",
|
|
87
|
-
"ListWrapper",
|
|
70
|
+
# trilogy.constants
|
|
71
|
+
"DEFAULT_NAMESPACE",
|
|
72
|
+
# trilogy.core.enums
|
|
73
|
+
"BooleanOperator",
|
|
74
|
+
"ComparisonOperator",
|
|
75
|
+
"FunctionClass",
|
|
88
76
|
"FunctionType",
|
|
77
|
+
"InfiniteFunctionArgs",
|
|
78
|
+
"Ordering",
|
|
79
|
+
"Purpose",
|
|
80
|
+
# trilogy.core.functions
|
|
89
81
|
"FunctionFactory",
|
|
90
|
-
|
|
91
|
-
"
|
|
92
|
-
"
|
|
93
|
-
"
|
|
94
|
-
"
|
|
82
|
+
# trilogy.core.models.author
|
|
83
|
+
"AggregateWrapper",
|
|
84
|
+
"CaseElse",
|
|
85
|
+
"CaseWhen",
|
|
86
|
+
"Comparison",
|
|
87
|
+
"Concept",
|
|
95
88
|
"ConceptRef",
|
|
89
|
+
"Conditional",
|
|
90
|
+
"FilterItem",
|
|
91
|
+
"Function",
|
|
92
|
+
"FunctionCallWrapper",
|
|
96
93
|
"HavingClause",
|
|
97
94
|
"MagicConstants",
|
|
98
95
|
"Metadata",
|
|
96
|
+
"MultiSelectLineage",
|
|
99
97
|
"OrderBy",
|
|
100
98
|
"OrderItem",
|
|
101
99
|
"Parenthetical",
|
|
100
|
+
"RowsetItem",
|
|
102
101
|
"SubselectComparison",
|
|
103
|
-
"
|
|
104
|
-
"
|
|
105
|
-
"
|
|
106
|
-
"
|
|
107
|
-
"
|
|
108
|
-
"
|
|
109
|
-
|
|
110
|
-
"
|
|
111
|
-
"
|
|
112
|
-
"
|
|
113
|
-
"
|
|
114
|
-
"
|
|
115
|
-
"
|
|
116
|
-
"
|
|
102
|
+
"WhereClause",
|
|
103
|
+
"WindowItem",
|
|
104
|
+
"WindowItemOrder",
|
|
105
|
+
"WindowItemOver",
|
|
106
|
+
"WindowOrder",
|
|
107
|
+
"WindowType",
|
|
108
|
+
# trilogy.core.models.core
|
|
109
|
+
"ArrayType",
|
|
110
|
+
"DataType",
|
|
111
|
+
"ListWrapper",
|
|
112
|
+
"MapType",
|
|
113
|
+
"NumericType",
|
|
114
|
+
"StructType",
|
|
115
|
+
"TraitDataType",
|
|
116
|
+
# trilogy.core.models.datasource
|
|
117
|
+
"Address",
|
|
117
118
|
"Datasource",
|
|
118
119
|
"DatasourceMetadata",
|
|
119
|
-
|
|
120
|
-
"
|
|
121
|
-
|
|
120
|
+
# trilogy.core.models.environment
|
|
121
|
+
"Environment",
|
|
122
|
+
# trilogy.core.statements.author
|
|
123
|
+
"ConceptDeclarationStatement",
|
|
124
|
+
"ConceptTransform",
|
|
122
125
|
"CopyStatement",
|
|
126
|
+
"Grain",
|
|
123
127
|
"HasUUID",
|
|
124
128
|
"ImportStatement",
|
|
125
|
-
"
|
|
126
|
-
"
|
|
129
|
+
"MultiSelectStatement",
|
|
130
|
+
"PersistStatement",
|
|
131
|
+
"RawSQLStatement",
|
|
132
|
+
"RowsetDerivationStatement",
|
|
133
|
+
"SelectItem",
|
|
134
|
+
"SelectStatement",
|
|
135
|
+
"ShowCategory",
|
|
136
|
+
"ShowStatement",
|
|
137
|
+
"ValidateStatement",
|
|
138
|
+
# trilogy.parsing.common
|
|
139
|
+
"arbitrary_to_concept",
|
|
140
|
+
"arg_to_datatype",
|
|
127
141
|
]
|
trilogy/core/graph_models.py
CHANGED
|
@@ -64,13 +64,13 @@ def datasource_to_node(input: BuildDatasource) -> str:
|
|
|
64
64
|
|
|
65
65
|
|
|
66
66
|
class ReferenceGraph(nx.DiGraph):
|
|
67
|
-
def __init__(self, *args, **kwargs):
|
|
67
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
68
68
|
super().__init__(*args, **kwargs)
|
|
69
69
|
self.concepts: dict[str, BuildConcept] = {}
|
|
70
70
|
self.datasources: dict[str, BuildDatasource] = {}
|
|
71
71
|
self.pseudonyms: set[tuple[str, str]] = set()
|
|
72
72
|
|
|
73
|
-
def copy(self):
|
|
73
|
+
def copy(self) -> "ReferenceGraph":
|
|
74
74
|
g = ReferenceGraph()
|
|
75
75
|
g.concepts = self.concepts.copy()
|
|
76
76
|
g.datasources = self.datasources.copy()
|
|
@@ -83,7 +83,7 @@ class ReferenceGraph(nx.DiGraph):
|
|
|
83
83
|
# g.add_edges_from(self.edges(data=True))
|
|
84
84
|
return g
|
|
85
85
|
|
|
86
|
-
def remove_node(self, n):
|
|
86
|
+
def remove_node(self, n) -> None:
|
|
87
87
|
if n in self.concepts:
|
|
88
88
|
del self.concepts[n]
|
|
89
89
|
if n in self.datasources:
|
|
@@ -98,7 +98,7 @@ class ReferenceGraph(nx.DiGraph):
|
|
|
98
98
|
self.datasources[node_name] = attr["datasource"]
|
|
99
99
|
super().add_node(node_name, **attr)
|
|
100
100
|
|
|
101
|
-
def add_datasource_node(self, node_name, datasource):
|
|
101
|
+
def add_datasource_node(self, node_name, datasource) -> None:
|
|
102
102
|
self.datasources[node_name] = datasource
|
|
103
103
|
super().add_node(node_name, datasource=datasource)
|
|
104
104
|
|
|
@@ -27,7 +27,8 @@ class ExpectationType(Enum):
|
|
|
27
27
|
@dataclass
|
|
28
28
|
class ValidationTest:
|
|
29
29
|
check_type: ExpectationType
|
|
30
|
-
|
|
30
|
+
raw_query: ProcessedQuery | None = None
|
|
31
|
+
generated_query: str | None = None
|
|
31
32
|
expected: str | None = None
|
|
32
33
|
result: ModelValidationError | None = None
|
|
33
34
|
ran: bool = True
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from trilogy import Executor
|
|
1
|
+
from trilogy import Environment, Executor
|
|
2
2
|
from trilogy.core.enums import Derivation, Purpose
|
|
3
3
|
from trilogy.core.exceptions import (
|
|
4
4
|
ConceptModelValidationError,
|
|
@@ -12,64 +12,68 @@ from trilogy.core.validation.common import ExpectationType, ValidationTest, easy
|
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def validate_property_concept(
|
|
15
|
-
concept: BuildConcept,
|
|
15
|
+
concept: BuildConcept, exec: Executor | None = None
|
|
16
16
|
) -> list[ValidationTest]:
|
|
17
17
|
return []
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
def validate_key_concept(
|
|
21
21
|
concept: BuildConcept,
|
|
22
|
+
env: Environment,
|
|
22
23
|
build_env: BuildEnvironment,
|
|
23
|
-
exec: Executor,
|
|
24
|
-
generate_only: bool = False,
|
|
24
|
+
exec: Executor | None = None,
|
|
25
25
|
):
|
|
26
26
|
results: list[ValidationTest] = []
|
|
27
|
-
seen = {}
|
|
27
|
+
seen: dict[str, int] = {}
|
|
28
28
|
for datasource in build_env.datasources.values():
|
|
29
29
|
if concept.address in [c.address for c in datasource.concepts]:
|
|
30
30
|
assignment = [
|
|
31
31
|
x for x in datasource.columns if x.concept.address == concept.address
|
|
32
32
|
][0]
|
|
33
|
+
# if it's not a partial, skip it
|
|
34
|
+
if not assignment.is_complete:
|
|
35
|
+
continue
|
|
33
36
|
type_query = easy_query(
|
|
34
37
|
concepts=[
|
|
35
38
|
# build_env.concepts[concept.address],
|
|
36
39
|
build_env.concepts[f"grain_check_{concept.safe_address}"],
|
|
37
40
|
],
|
|
38
41
|
datasource=datasource,
|
|
39
|
-
env=
|
|
42
|
+
env=env,
|
|
40
43
|
limit=1,
|
|
41
44
|
)
|
|
42
|
-
|
|
45
|
+
if exec:
|
|
46
|
+
type_sql = exec.generate_sql(type_query)[-1]
|
|
43
47
|
|
|
44
|
-
|
|
45
|
-
|
|
48
|
+
rows = exec.execute_raw_sql(type_sql).fetchall()
|
|
49
|
+
seen[datasource.name] = rows[0][0] if rows else 0
|
|
50
|
+
else:
|
|
46
51
|
results.append(
|
|
47
52
|
ValidationTest(
|
|
48
|
-
|
|
53
|
+
raw_query=type_query,
|
|
49
54
|
check_type=ExpectationType.ROWCOUNT,
|
|
50
55
|
expected=f"equal_max_{concept.safe_address}",
|
|
51
56
|
result=None,
|
|
52
57
|
ran=False,
|
|
53
58
|
)
|
|
54
59
|
)
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
if generate_only:
|
|
60
|
+
|
|
61
|
+
if not exec:
|
|
58
62
|
return results
|
|
59
|
-
max_seen = max([v for v in seen.values() if v is not None], default=0)
|
|
63
|
+
max_seen: int = max([v for v in seen.values() if v is not None], default=0)
|
|
60
64
|
for datasource in build_env.datasources.values():
|
|
61
65
|
if concept.address in [c.address for c in datasource.concepts]:
|
|
62
66
|
assignment = [
|
|
63
67
|
x for x in datasource.columns if x.concept.address == concept.address
|
|
64
68
|
][0]
|
|
65
69
|
err = None
|
|
66
|
-
|
|
70
|
+
datasource_count: int = seen.get(datasource.name, 0)
|
|
71
|
+
if datasource_count < max_seen and assignment.is_complete:
|
|
67
72
|
err = DatasourceModelValidationError(
|
|
68
73
|
f"Key concept {concept.address} is missing values in datasource {datasource.name} (max cardinality in data {max_seen}, datasource has {seen[datasource.name]} values) but is not marked as partial."
|
|
69
74
|
)
|
|
70
75
|
results.append(
|
|
71
76
|
ValidationTest(
|
|
72
|
-
query=None,
|
|
73
77
|
check_type=ExpectationType.ROWCOUNT,
|
|
74
78
|
expected=str(max_seen),
|
|
75
79
|
result=err,
|
|
@@ -96,7 +100,6 @@ def validate_datasources(
|
|
|
96
100
|
return []
|
|
97
101
|
return [
|
|
98
102
|
ValidationTest(
|
|
99
|
-
query=None,
|
|
100
103
|
check_type=ExpectationType.LOGICAL,
|
|
101
104
|
expected=None,
|
|
102
105
|
result=ConceptModelValidationError(
|
|
@@ -109,14 +112,14 @@ def validate_datasources(
|
|
|
109
112
|
|
|
110
113
|
def validate_concept(
|
|
111
114
|
concept: BuildConcept,
|
|
115
|
+
env: Environment,
|
|
112
116
|
build_env: BuildEnvironment,
|
|
113
|
-
exec: Executor,
|
|
114
|
-
generate_only: bool = False,
|
|
117
|
+
exec: Executor | None = None,
|
|
115
118
|
) -> list[ValidationTest]:
|
|
116
119
|
base: list[ValidationTest] = []
|
|
117
120
|
base += validate_datasources(concept, build_env)
|
|
118
121
|
if concept.purpose == Purpose.PROPERTY:
|
|
119
|
-
base += validate_property_concept(concept
|
|
122
|
+
base += validate_property_concept(concept)
|
|
120
123
|
elif concept.purpose == Purpose.KEY:
|
|
121
|
-
base += validate_key_concept(concept, build_env, exec
|
|
124
|
+
base += validate_key_concept(concept, env, build_env, exec)
|
|
122
125
|
return base
|
|
@@ -2,7 +2,7 @@ from datetime import date, datetime
|
|
|
2
2
|
from decimal import Decimal
|
|
3
3
|
from typing import Any
|
|
4
4
|
|
|
5
|
-
from trilogy import Executor
|
|
5
|
+
from trilogy import Environment, Executor
|
|
6
6
|
from trilogy.authoring import (
|
|
7
7
|
ArrayType,
|
|
8
8
|
DataType,
|
|
@@ -61,12 +61,12 @@ def type_check(
|
|
|
61
61
|
|
|
62
62
|
def validate_datasource(
|
|
63
63
|
datasource: BuildDatasource,
|
|
64
|
+
env: Environment,
|
|
64
65
|
build_env: BuildEnvironment,
|
|
65
|
-
exec: Executor,
|
|
66
|
-
generate_only: bool = False,
|
|
66
|
+
exec: Executor | None = None,
|
|
67
67
|
) -> list[ValidationTest]:
|
|
68
68
|
results: list[ValidationTest] = []
|
|
69
|
-
# we might have merged concepts, where both
|
|
69
|
+
# we might have merged concepts, where both will map out to the same
|
|
70
70
|
unique_outputs = unique(
|
|
71
71
|
[build_env.concepts[col.concept.address] for col in datasource.columns],
|
|
72
72
|
"address",
|
|
@@ -74,18 +74,20 @@ def validate_datasource(
|
|
|
74
74
|
type_query = easy_query(
|
|
75
75
|
concepts=unique_outputs,
|
|
76
76
|
datasource=datasource,
|
|
77
|
-
env=
|
|
77
|
+
env=env,
|
|
78
78
|
limit=100,
|
|
79
79
|
)
|
|
80
|
-
|
|
80
|
+
|
|
81
81
|
rows = []
|
|
82
|
-
if
|
|
82
|
+
if exec:
|
|
83
|
+
type_sql = exec.generate_sql(type_query)[-1]
|
|
83
84
|
try:
|
|
84
85
|
rows = exec.execute_raw_sql(type_sql).fetchall()
|
|
85
86
|
except Exception as e:
|
|
86
87
|
results.append(
|
|
87
88
|
ValidationTest(
|
|
88
|
-
|
|
89
|
+
raw_query=type_query,
|
|
90
|
+
generated_query=type_sql,
|
|
89
91
|
check_type=ExpectationType.LOGICAL,
|
|
90
92
|
expected="valid_sql",
|
|
91
93
|
result=DatasourceModelValidationError(
|
|
@@ -96,9 +98,10 @@ def validate_datasource(
|
|
|
96
98
|
)
|
|
97
99
|
return results
|
|
98
100
|
else:
|
|
101
|
+
|
|
99
102
|
results.append(
|
|
100
103
|
ValidationTest(
|
|
101
|
-
|
|
104
|
+
raw_query=type_query,
|
|
102
105
|
check_type=ExpectationType.LOGICAL,
|
|
103
106
|
expected="datatype_match",
|
|
104
107
|
result=None,
|
|
@@ -117,7 +120,6 @@ def validate_datasource(
|
|
|
117
120
|
cols_with_error = set()
|
|
118
121
|
for row in rows:
|
|
119
122
|
for col in datasource.columns:
|
|
120
|
-
|
|
121
123
|
actual_address = build_env.concepts[col.concept.address].safe_address
|
|
122
124
|
if actual_address in cols_with_error:
|
|
123
125
|
continue
|
|
@@ -140,7 +142,6 @@ def validate_datasource(
|
|
|
140
142
|
if failures:
|
|
141
143
|
results.append(
|
|
142
144
|
ValidationTest(
|
|
143
|
-
query=None,
|
|
144
145
|
check_type=ExpectationType.LOGICAL,
|
|
145
146
|
expected="datatype_match",
|
|
146
147
|
ran=True,
|
|
@@ -161,10 +162,10 @@ def validate_datasource(
|
|
|
161
162
|
operator=ComparisonOperator.GT,
|
|
162
163
|
),
|
|
163
164
|
)
|
|
164
|
-
if
|
|
165
|
+
if not exec:
|
|
165
166
|
results.append(
|
|
166
167
|
ValidationTest(
|
|
167
|
-
|
|
168
|
+
raw_query=query,
|
|
168
169
|
check_type=ExpectationType.ROWCOUNT,
|
|
169
170
|
expected="0",
|
|
170
171
|
result=None,
|
|
@@ -179,7 +180,8 @@ def validate_datasource(
|
|
|
179
180
|
if rows:
|
|
180
181
|
results.append(
|
|
181
182
|
ValidationTest(
|
|
182
|
-
query
|
|
183
|
+
raw_query=query,
|
|
184
|
+
generated_query=sql,
|
|
183
185
|
check_type=ExpectationType.ROWCOUNT,
|
|
184
186
|
expected="0",
|
|
185
187
|
result=DatasourceModelValidationError(
|
|
@@ -12,12 +12,12 @@ from trilogy.parsing.common import function_to_concept
|
|
|
12
12
|
|
|
13
13
|
def validate_environment(
|
|
14
14
|
env: Environment,
|
|
15
|
-
exec: Executor,
|
|
16
15
|
scope: ValidationScope = ValidationScope.ALL,
|
|
17
16
|
targets: list[str] | None = None,
|
|
18
|
-
|
|
17
|
+
exec: Executor | None = None,
|
|
19
18
|
) -> list[ValidationTest]:
|
|
20
19
|
# avoid mutating the environment for validation
|
|
20
|
+
generate_only = exec is None
|
|
21
21
|
env = env.duplicate()
|
|
22
22
|
grain_check = function_to_concept(
|
|
23
23
|
parent=Function(
|
|
@@ -51,13 +51,13 @@ def validate_environment(
|
|
|
51
51
|
for datasource in build_env.datasources.values():
|
|
52
52
|
if targets and datasource.name not in targets:
|
|
53
53
|
continue
|
|
54
|
-
results += validate_datasource(datasource, build_env, exec
|
|
54
|
+
results += validate_datasource(datasource, env, build_env, exec)
|
|
55
55
|
if scope == ValidationScope.ALL or scope == ValidationScope.CONCEPTS:
|
|
56
56
|
|
|
57
57
|
for bconcept in build_env.concepts.values():
|
|
58
58
|
if targets and bconcept.address not in targets:
|
|
59
59
|
continue
|
|
60
|
-
results += validate_concept(bconcept, build_env, exec
|
|
60
|
+
results += validate_concept(bconcept, env, build_env, exec)
|
|
61
61
|
|
|
62
62
|
# raise a nicely formatted union of all exceptions
|
|
63
63
|
exceptions: list[ModelValidationError] = [e.result for e in results if e.result]
|
trilogy/dialect/base.py
CHANGED
|
@@ -76,6 +76,7 @@ from trilogy.core.statements.author import (
|
|
|
76
76
|
)
|
|
77
77
|
from trilogy.core.statements.execute import (
|
|
78
78
|
PROCESSED_STATEMENT_TYPES,
|
|
79
|
+
ProcessedCopyStatement,
|
|
79
80
|
ProcessedQuery,
|
|
80
81
|
ProcessedQueryPersist,
|
|
81
82
|
ProcessedRawSQLStatement,
|
|
@@ -345,6 +346,7 @@ class BaseDialect:
|
|
|
345
346
|
COMPLEX_DATATYPE_MAP = COMPLEX_DATATYPE_MAP
|
|
346
347
|
UNNEST_MODE = UnnestMode.CROSS_APPLY
|
|
347
348
|
GROUP_MODE = GroupMode.AUTO
|
|
349
|
+
EXPLAIN_KEYWORD = "EXPLAIN"
|
|
348
350
|
|
|
349
351
|
def __init__(self, rendering: Rendering | None = None):
|
|
350
352
|
self.rendering = rendering or CONFIG.rendering
|
|
@@ -1135,7 +1137,13 @@ class BaseDialect:
|
|
|
1135
1137
|
query: PROCESSED_STATEMENT_TYPES,
|
|
1136
1138
|
) -> str:
|
|
1137
1139
|
if isinstance(query, ProcessedShowStatement):
|
|
1138
|
-
return ";\n".join(
|
|
1140
|
+
return ";\n".join(
|
|
1141
|
+
[
|
|
1142
|
+
f'{self.EXPLAIN_KEYWORD} {self.compile_statement(x)}'
|
|
1143
|
+
for x in query.output_values
|
|
1144
|
+
if isinstance(x, (ProcessedQuery, ProcessedCopyStatement))
|
|
1145
|
+
]
|
|
1146
|
+
)
|
|
1139
1147
|
elif isinstance(query, ProcessedRawSQLStatement):
|
|
1140
1148
|
return query.text
|
|
1141
1149
|
|
|
@@ -0,0 +1,233 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any, List, Optional
|
|
3
|
+
|
|
4
|
+
from trilogy.core.models.author import ConceptRef
|
|
5
|
+
from trilogy.core.models.datasource import Datasource
|
|
6
|
+
from trilogy.core.models.environment import Environment
|
|
7
|
+
from trilogy.core.statements.author import (
|
|
8
|
+
ConceptDeclarationStatement,
|
|
9
|
+
ImportStatement,
|
|
10
|
+
MergeStatementV2,
|
|
11
|
+
)
|
|
12
|
+
from trilogy.core.statements.execute import (
|
|
13
|
+
ProcessedShowStatement,
|
|
14
|
+
ProcessedStaticValueOutput,
|
|
15
|
+
ProcessedValidateStatement,
|
|
16
|
+
)
|
|
17
|
+
from trilogy.core.validation.common import ValidationTest
|
|
18
|
+
from trilogy.dialect.base import BaseDialect
|
|
19
|
+
from trilogy.engine import ResultProtocol
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass
|
|
23
|
+
class MockResult(ResultProtocol):
|
|
24
|
+
values: list["MockResultRow"]
|
|
25
|
+
columns: list[str]
|
|
26
|
+
|
|
27
|
+
def __init__(self, values: list[Any], columns: list[str]):
|
|
28
|
+
processed: list[MockResultRow] = []
|
|
29
|
+
for x in values:
|
|
30
|
+
if isinstance(x, dict):
|
|
31
|
+
processed.append(MockResultRow(x))
|
|
32
|
+
elif isinstance(x, MockResultRow):
|
|
33
|
+
processed.append(x)
|
|
34
|
+
else:
|
|
35
|
+
raise ValueError(
|
|
36
|
+
f"Cannot process value of type {type(x)} in MockResult"
|
|
37
|
+
)
|
|
38
|
+
self.columns = columns
|
|
39
|
+
self.values = processed
|
|
40
|
+
|
|
41
|
+
def __iter__(self):
|
|
42
|
+
while self.values:
|
|
43
|
+
yield self.values.pop(0)
|
|
44
|
+
|
|
45
|
+
def fetchall(self):
|
|
46
|
+
return self.values
|
|
47
|
+
|
|
48
|
+
def fetchone(self):
|
|
49
|
+
if self.values:
|
|
50
|
+
return self.values.pop(0)
|
|
51
|
+
return None
|
|
52
|
+
|
|
53
|
+
def fetchmany(self, size: int):
|
|
54
|
+
rval = self.values[:size]
|
|
55
|
+
self.values = self.values[size:]
|
|
56
|
+
return rval
|
|
57
|
+
|
|
58
|
+
def keys(self):
|
|
59
|
+
return self.columns
|
|
60
|
+
|
|
61
|
+
def as_dict(self):
|
|
62
|
+
return [x.as_dict() if isinstance(x, MockResultRow) else x for x in self.values]
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class MockResultRow:
|
|
67
|
+
_values: dict[str, Any]
|
|
68
|
+
|
|
69
|
+
def as_dict(self):
|
|
70
|
+
return self._values
|
|
71
|
+
|
|
72
|
+
def __str__(self) -> str:
|
|
73
|
+
return str(self._values)
|
|
74
|
+
|
|
75
|
+
def __repr__(self) -> str:
|
|
76
|
+
return repr(self._values)
|
|
77
|
+
|
|
78
|
+
def __getattr__(self, name: str) -> Any:
|
|
79
|
+
if name in self._values:
|
|
80
|
+
return self._values[name]
|
|
81
|
+
return super().__getattribute__(name)
|
|
82
|
+
|
|
83
|
+
def __getitem__(self, key: str) -> Any:
|
|
84
|
+
return self._values[key]
|
|
85
|
+
|
|
86
|
+
def values(self):
|
|
87
|
+
return self._values.values()
|
|
88
|
+
|
|
89
|
+
def keys(self):
|
|
90
|
+
return self._values.keys()
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def generate_result_set(
|
|
94
|
+
columns: List[ConceptRef], output_data: list[Any]
|
|
95
|
+
) -> MockResult:
|
|
96
|
+
"""Generate a mock result set from columns and output data."""
|
|
97
|
+
names = [x.address.replace(".", "_") for x in columns]
|
|
98
|
+
return MockResult(
|
|
99
|
+
values=[dict(zip(names, [row])) for row in output_data], columns=names
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def handle_concept_declaration(query: ConceptDeclarationStatement) -> MockResult:
|
|
104
|
+
"""Handle concept declaration statements without execution."""
|
|
105
|
+
concept = query.concept
|
|
106
|
+
return MockResult(
|
|
107
|
+
[
|
|
108
|
+
{
|
|
109
|
+
"address": concept.address,
|
|
110
|
+
"type": concept.datatype.value,
|
|
111
|
+
"purpose": concept.purpose.value,
|
|
112
|
+
"derivation": concept.derivation.value,
|
|
113
|
+
}
|
|
114
|
+
],
|
|
115
|
+
["address", "type", "purpose", "derivation"],
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def handle_datasource(query: Datasource) -> MockResult:
|
|
120
|
+
"""Handle datasource queries without execution."""
|
|
121
|
+
return MockResult(
|
|
122
|
+
[
|
|
123
|
+
{
|
|
124
|
+
"name": query.name,
|
|
125
|
+
}
|
|
126
|
+
],
|
|
127
|
+
["name"],
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def handle_import_statement(query: ImportStatement) -> MockResult:
|
|
132
|
+
"""Handle import statements without execution."""
|
|
133
|
+
return MockResult(
|
|
134
|
+
[
|
|
135
|
+
{
|
|
136
|
+
"path": query.path,
|
|
137
|
+
"alias": query.alias,
|
|
138
|
+
}
|
|
139
|
+
],
|
|
140
|
+
["path", "alias"],
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def handle_merge_statement(
|
|
145
|
+
query: MergeStatementV2, environment: Environment
|
|
146
|
+
) -> MockResult:
|
|
147
|
+
"""Handle merge statements by updating environment and returning result."""
|
|
148
|
+
for concept in query.sources:
|
|
149
|
+
environment.merge_concept(
|
|
150
|
+
concept, query.targets[concept.address], modifiers=query.modifiers
|
|
151
|
+
)
|
|
152
|
+
|
|
153
|
+
return MockResult(
|
|
154
|
+
[
|
|
155
|
+
{
|
|
156
|
+
"sources": ",".join([x.address for x in query.sources]),
|
|
157
|
+
"targets": ",".join([x.address for _, x in query.targets.items()]),
|
|
158
|
+
}
|
|
159
|
+
],
|
|
160
|
+
["source", "target"],
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def handle_processed_show_statement(
|
|
165
|
+
query: ProcessedShowStatement, compiled_statements: list[str]
|
|
166
|
+
) -> MockResult:
|
|
167
|
+
"""Handle processed show statements without execution."""
|
|
168
|
+
|
|
169
|
+
return generate_result_set(query.output_columns, compiled_statements)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def raw_validation_to_result(
|
|
173
|
+
raw: list[ValidationTest], generator: Optional[BaseDialect] = None
|
|
174
|
+
) -> Optional[MockResult]:
|
|
175
|
+
"""Convert raw validation tests to mock result."""
|
|
176
|
+
if not raw:
|
|
177
|
+
return None
|
|
178
|
+
output = []
|
|
179
|
+
for row in raw:
|
|
180
|
+
if row.raw_query and generator and not row.generated_query:
|
|
181
|
+
try:
|
|
182
|
+
row.generated_query = generator.compile_statement(row.raw_query)
|
|
183
|
+
except Exception as e:
|
|
184
|
+
row.generated_query = f"Error generating query: {e}"
|
|
185
|
+
output.append(
|
|
186
|
+
{
|
|
187
|
+
"check_type": row.check_type.value,
|
|
188
|
+
"expected": row.expected,
|
|
189
|
+
"result": str(row.result) if row.result else None,
|
|
190
|
+
"ran": row.ran,
|
|
191
|
+
"query": row.generated_query if row.generated_query else "",
|
|
192
|
+
}
|
|
193
|
+
)
|
|
194
|
+
return MockResult(output, ["check_type", "expected", "result", "ran", "query"])
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def handle_processed_validate_statement(
|
|
198
|
+
query: ProcessedValidateStatement, dialect: BaseDialect, validate_environment_func
|
|
199
|
+
) -> Optional[MockResult]:
|
|
200
|
+
"""Handle processed validate statements."""
|
|
201
|
+
results = validate_environment_func(query.scope, query.targets)
|
|
202
|
+
return raw_validation_to_result(results, dialect)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def handle_show_statement_outputs(
|
|
206
|
+
statement: ProcessedShowStatement,
|
|
207
|
+
compiled_statements: list[str],
|
|
208
|
+
environment: Environment,
|
|
209
|
+
dialect: BaseDialect,
|
|
210
|
+
) -> list[MockResult]:
|
|
211
|
+
"""Handle show statement outputs without execution."""
|
|
212
|
+
output = []
|
|
213
|
+
for x in statement.output_values:
|
|
214
|
+
if isinstance(x, ProcessedStaticValueOutput):
|
|
215
|
+
output.append(generate_result_set(statement.output_columns, x.values))
|
|
216
|
+
elif compiled_statements:
|
|
217
|
+
|
|
218
|
+
output.append(
|
|
219
|
+
generate_result_set(
|
|
220
|
+
statement.output_columns,
|
|
221
|
+
compiled_statements,
|
|
222
|
+
)
|
|
223
|
+
)
|
|
224
|
+
elif isinstance(x, ProcessedValidateStatement):
|
|
225
|
+
from trilogy.core.validation.environment import validate_environment
|
|
226
|
+
|
|
227
|
+
raw = validate_environment(environment, x.scope, x.targets)
|
|
228
|
+
results = raw_validation_to_result(raw, dialect)
|
|
229
|
+
if results:
|
|
230
|
+
output.append(results)
|
|
231
|
+
else:
|
|
232
|
+
raise NotImplementedError(f"Cannot show type {type(x)} in show statement")
|
|
233
|
+
return output
|
trilogy/executor.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
2
1
|
from functools import singledispatchmethod
|
|
3
2
|
from pathlib import Path
|
|
4
3
|
from typing import Any, Generator, List, Optional
|
|
@@ -7,7 +6,7 @@ from sqlalchemy import text
|
|
|
7
6
|
|
|
8
7
|
from trilogy.constants import MagicConstants, Rendering, logger
|
|
9
8
|
from trilogy.core.enums import FunctionType, Granularity, IOType, ValidationScope
|
|
10
|
-
from trilogy.core.models.author import Concept,
|
|
9
|
+
from trilogy.core.models.author import Concept, Function
|
|
11
10
|
from trilogy.core.models.build import BuildFunction
|
|
12
11
|
from trilogy.core.models.core import ListWrapper, MapWrapper
|
|
13
12
|
from trilogy.core.models.datasource import Datasource
|
|
@@ -31,7 +30,6 @@ from trilogy.core.statements.execute import (
|
|
|
31
30
|
ProcessedQueryPersist,
|
|
32
31
|
ProcessedRawSQLStatement,
|
|
33
32
|
ProcessedShowStatement,
|
|
34
|
-
ProcessedStaticValueOutput,
|
|
35
33
|
ProcessedValidateStatement,
|
|
36
34
|
)
|
|
37
35
|
from trilogy.core.validation.common import (
|
|
@@ -39,82 +37,22 @@ from trilogy.core.validation.common import (
|
|
|
39
37
|
)
|
|
40
38
|
from trilogy.dialect.base import BaseDialect
|
|
41
39
|
from trilogy.dialect.enums import Dialects
|
|
40
|
+
from trilogy.dialect.metadata import (
|
|
41
|
+
generate_result_set,
|
|
42
|
+
handle_concept_declaration,
|
|
43
|
+
handle_datasource,
|
|
44
|
+
handle_import_statement,
|
|
45
|
+
handle_merge_statement,
|
|
46
|
+
handle_processed_show_statement,
|
|
47
|
+
handle_processed_validate_statement,
|
|
48
|
+
handle_show_statement_outputs,
|
|
49
|
+
)
|
|
42
50
|
from trilogy.engine import ExecutionEngine, ResultProtocol
|
|
43
51
|
from trilogy.hooks.base_hook import BaseHook
|
|
44
52
|
from trilogy.parser import parse_text
|
|
45
53
|
from trilogy.render import get_dialect_generator
|
|
46
54
|
|
|
47
55
|
|
|
48
|
-
@dataclass
|
|
49
|
-
class MockResult(ResultProtocol):
|
|
50
|
-
values: list[Any]
|
|
51
|
-
columns: list[str]
|
|
52
|
-
|
|
53
|
-
def __init__(self, values: list[Any], columns: list[str]):
|
|
54
|
-
processed = []
|
|
55
|
-
for x in values:
|
|
56
|
-
if isinstance(x, dict):
|
|
57
|
-
processed.append(MockResultRow(x))
|
|
58
|
-
else:
|
|
59
|
-
processed.append(x)
|
|
60
|
-
self.columns = columns
|
|
61
|
-
self.values = processed
|
|
62
|
-
|
|
63
|
-
def __iter__(self):
|
|
64
|
-
while self.values:
|
|
65
|
-
yield self.values.pop(0)
|
|
66
|
-
|
|
67
|
-
def fetchall(self):
|
|
68
|
-
return self.values
|
|
69
|
-
|
|
70
|
-
def fetchone(self):
|
|
71
|
-
if self.values:
|
|
72
|
-
return self.values.pop(0)
|
|
73
|
-
return None
|
|
74
|
-
|
|
75
|
-
def fetchmany(self, size: int):
|
|
76
|
-
rval = self.values[:size]
|
|
77
|
-
self.values = self.values[size:]
|
|
78
|
-
return rval
|
|
79
|
-
|
|
80
|
-
def keys(self):
|
|
81
|
-
return self.columns
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
@dataclass
|
|
85
|
-
class MockResultRow:
|
|
86
|
-
_values: dict[str, Any]
|
|
87
|
-
|
|
88
|
-
def __str__(self) -> str:
|
|
89
|
-
return str(self._values)
|
|
90
|
-
|
|
91
|
-
def __repr__(self) -> str:
|
|
92
|
-
return repr(self._values)
|
|
93
|
-
|
|
94
|
-
def __getattr__(self, name: str) -> Any:
|
|
95
|
-
if name in self._values:
|
|
96
|
-
return self._values[name]
|
|
97
|
-
return super().__getattribute__(name)
|
|
98
|
-
|
|
99
|
-
def __getitem__(self, key: str) -> Any:
|
|
100
|
-
return self._values[key]
|
|
101
|
-
|
|
102
|
-
def values(self):
|
|
103
|
-
return self._values.values()
|
|
104
|
-
|
|
105
|
-
def keys(self):
|
|
106
|
-
return self._values.keys()
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
def generate_result_set(
|
|
110
|
-
columns: List[ConceptRef], output_data: list[Any]
|
|
111
|
-
) -> MockResult:
|
|
112
|
-
names = [x.address.replace(".", "_") for x in columns]
|
|
113
|
-
return MockResult(
|
|
114
|
-
values=[dict(zip(names, [row])) for row in output_data], columns=names
|
|
115
|
-
)
|
|
116
|
-
|
|
117
|
-
|
|
118
56
|
class Executor(object):
|
|
119
57
|
def __init__(
|
|
120
58
|
self,
|
|
@@ -150,29 +88,11 @@ class Executor(object):
|
|
|
150
88
|
|
|
151
89
|
@execute_query.register
|
|
152
90
|
def _(self, query: ConceptDeclarationStatement) -> ResultProtocol | None:
|
|
153
|
-
|
|
154
|
-
return MockResult(
|
|
155
|
-
[
|
|
156
|
-
{
|
|
157
|
-
"address": concept.address,
|
|
158
|
-
"type": concept.datatype.value,
|
|
159
|
-
"purpose": concept.purpose.value,
|
|
160
|
-
"derivation": concept.derivation.value,
|
|
161
|
-
}
|
|
162
|
-
],
|
|
163
|
-
["address", "type", "purpose", "derivation"],
|
|
164
|
-
)
|
|
91
|
+
return handle_concept_declaration(query)
|
|
165
92
|
|
|
166
93
|
@execute_query.register
|
|
167
94
|
def _(self, query: Datasource) -> ResultProtocol | None:
|
|
168
|
-
return
|
|
169
|
-
[
|
|
170
|
-
{
|
|
171
|
-
"name": query.name,
|
|
172
|
-
}
|
|
173
|
-
],
|
|
174
|
-
["name"],
|
|
175
|
-
)
|
|
95
|
+
return handle_datasource(query)
|
|
176
96
|
|
|
177
97
|
@execute_query.register
|
|
178
98
|
def _(self, query: str) -> ResultProtocol | None:
|
|
@@ -208,66 +128,28 @@ class Executor(object):
|
|
|
208
128
|
|
|
209
129
|
@execute_query.register
|
|
210
130
|
def _(self, query: ProcessedShowStatement) -> ResultProtocol | None:
|
|
211
|
-
return
|
|
212
|
-
query
|
|
131
|
+
return handle_processed_show_statement(
|
|
132
|
+
query,
|
|
213
133
|
[
|
|
214
134
|
self.generator.compile_statement(x)
|
|
215
135
|
for x in query.output_values
|
|
216
|
-
if isinstance(x, ProcessedQuery)
|
|
136
|
+
if isinstance(x, (ProcessedQuery, ProcessedQueryPersist))
|
|
217
137
|
],
|
|
218
138
|
)
|
|
219
139
|
|
|
220
|
-
def _raw_validation_to_result(
|
|
221
|
-
self, raw: list[ValidationTest]
|
|
222
|
-
) -> Optional[ResultProtocol]:
|
|
223
|
-
if not raw:
|
|
224
|
-
return None
|
|
225
|
-
output = []
|
|
226
|
-
for row in raw:
|
|
227
|
-
output.append(
|
|
228
|
-
{
|
|
229
|
-
"check_type": row.check_type.value,
|
|
230
|
-
"expected": row.expected,
|
|
231
|
-
"result": str(row.result) if row.result else None,
|
|
232
|
-
"ran": row.ran,
|
|
233
|
-
"query": row.query if row.query else "",
|
|
234
|
-
}
|
|
235
|
-
)
|
|
236
|
-
return MockResult(output, ["check_type", "expected", "result", "ran", "query"])
|
|
237
|
-
|
|
238
140
|
@execute_query.register
|
|
239
141
|
def _(self, query: ProcessedValidateStatement) -> ResultProtocol | None:
|
|
240
|
-
|
|
241
|
-
|
|
142
|
+
return handle_processed_validate_statement(
|
|
143
|
+
query, self.generator, self.validate_environment
|
|
144
|
+
)
|
|
242
145
|
|
|
243
146
|
@execute_query.register
|
|
244
147
|
def _(self, query: ImportStatement) -> ResultProtocol | None:
|
|
245
|
-
return
|
|
246
|
-
[
|
|
247
|
-
{
|
|
248
|
-
"path": query.path,
|
|
249
|
-
"alias": query.alias,
|
|
250
|
-
}
|
|
251
|
-
],
|
|
252
|
-
["path", "alias"],
|
|
253
|
-
)
|
|
148
|
+
return handle_import_statement(query)
|
|
254
149
|
|
|
255
150
|
@execute_query.register
|
|
256
151
|
def _(self, query: MergeStatementV2) -> ResultProtocol | None:
|
|
257
|
-
|
|
258
|
-
self.environment.merge_concept(
|
|
259
|
-
concept, query.targets[concept.address], modifiers=query.modifiers
|
|
260
|
-
)
|
|
261
|
-
|
|
262
|
-
return MockResult(
|
|
263
|
-
[
|
|
264
|
-
{
|
|
265
|
-
"sources": ",".join([x.address for x in query.sources]),
|
|
266
|
-
"targets": ",".join([x.address for _, x in query.targets.items()]),
|
|
267
|
-
}
|
|
268
|
-
],
|
|
269
|
-
["source", "target"],
|
|
270
|
-
)
|
|
152
|
+
return handle_merge_statement(query, self.environment)
|
|
271
153
|
|
|
272
154
|
@execute_query.register
|
|
273
155
|
def _(self, query: ProcessedRawSQLStatement) -> ResultProtocol | None:
|
|
@@ -516,29 +398,17 @@ class Executor(object):
|
|
|
516
398
|
# connection = self.engine.connect()
|
|
517
399
|
for statement in self.parse_text_generator(command):
|
|
518
400
|
if isinstance(statement, ProcessedShowStatement):
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
)
|
|
531
|
-
elif isinstance(x, ProcessedValidateStatement):
|
|
532
|
-
raw = self.validate_environment(
|
|
533
|
-
x.scope, x.targets, generate_only=True
|
|
534
|
-
)
|
|
535
|
-
results = self._raw_validation_to_result(raw)
|
|
536
|
-
if results:
|
|
537
|
-
output.append(results)
|
|
538
|
-
else:
|
|
539
|
-
raise NotImplementedError(
|
|
540
|
-
f"Cannot show type {type(x)} in show statement"
|
|
541
|
-
)
|
|
401
|
+
results = handle_show_statement_outputs(
|
|
402
|
+
statement,
|
|
403
|
+
[
|
|
404
|
+
self.generator.compile_statement(x)
|
|
405
|
+
for x in statement.output_values
|
|
406
|
+
if isinstance(x, (ProcessedQuery, ProcessedQueryPersist))
|
|
407
|
+
],
|
|
408
|
+
self.environment,
|
|
409
|
+
self.generator,
|
|
410
|
+
)
|
|
411
|
+
output.extend(results)
|
|
542
412
|
continue
|
|
543
413
|
if non_interactive:
|
|
544
414
|
if not isinstance(
|
|
@@ -567,5 +437,5 @@ class Executor(object):
|
|
|
567
437
|
from trilogy.core.validation.environment import validate_environment
|
|
568
438
|
|
|
569
439
|
return validate_environment(
|
|
570
|
-
self.environment,
|
|
440
|
+
self.environment, scope, targets, exec=None if generate_only else self
|
|
571
441
|
)
|
trilogy/compiler.py
DELETED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|