pytrilogy 0.3.149__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- LICENSE.md +19 -0
- _preql_import_resolver/__init__.py +5 -0
- _preql_import_resolver/_preql_import_resolver.cp313-win_amd64.pyd +0 -0
- pytrilogy-0.3.149.dist-info/METADATA +555 -0
- pytrilogy-0.3.149.dist-info/RECORD +207 -0
- pytrilogy-0.3.149.dist-info/WHEEL +4 -0
- pytrilogy-0.3.149.dist-info/entry_points.txt +2 -0
- pytrilogy-0.3.149.dist-info/licenses/LICENSE.md +19 -0
- trilogy/__init__.py +27 -0
- trilogy/ai/README.md +10 -0
- trilogy/ai/__init__.py +19 -0
- trilogy/ai/constants.py +92 -0
- trilogy/ai/conversation.py +107 -0
- trilogy/ai/enums.py +7 -0
- trilogy/ai/execute.py +50 -0
- trilogy/ai/models.py +34 -0
- trilogy/ai/prompts.py +100 -0
- trilogy/ai/providers/__init__.py +0 -0
- trilogy/ai/providers/anthropic.py +106 -0
- trilogy/ai/providers/base.py +24 -0
- trilogy/ai/providers/google.py +146 -0
- trilogy/ai/providers/openai.py +89 -0
- trilogy/ai/providers/utils.py +68 -0
- trilogy/authoring/README.md +3 -0
- trilogy/authoring/__init__.py +148 -0
- trilogy/constants.py +119 -0
- trilogy/core/README.md +52 -0
- trilogy/core/__init__.py +0 -0
- trilogy/core/constants.py +6 -0
- trilogy/core/enums.py +454 -0
- trilogy/core/env_processor.py +239 -0
- trilogy/core/environment_helpers.py +320 -0
- trilogy/core/ergonomics.py +193 -0
- trilogy/core/exceptions.py +123 -0
- trilogy/core/functions.py +1240 -0
- trilogy/core/graph_models.py +142 -0
- trilogy/core/internal.py +85 -0
- trilogy/core/models/__init__.py +0 -0
- trilogy/core/models/author.py +2670 -0
- trilogy/core/models/build.py +2603 -0
- trilogy/core/models/build_environment.py +165 -0
- trilogy/core/models/core.py +506 -0
- trilogy/core/models/datasource.py +436 -0
- trilogy/core/models/environment.py +756 -0
- trilogy/core/models/execute.py +1213 -0
- trilogy/core/optimization.py +251 -0
- trilogy/core/optimizations/__init__.py +12 -0
- trilogy/core/optimizations/base_optimization.py +17 -0
- trilogy/core/optimizations/hide_unused_concept.py +47 -0
- trilogy/core/optimizations/inline_datasource.py +102 -0
- trilogy/core/optimizations/predicate_pushdown.py +245 -0
- trilogy/core/processing/README.md +94 -0
- trilogy/core/processing/READMEv2.md +121 -0
- trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
- trilogy/core/processing/__init__.py +0 -0
- trilogy/core/processing/concept_strategies_v3.py +508 -0
- trilogy/core/processing/constants.py +15 -0
- trilogy/core/processing/discovery_node_factory.py +451 -0
- trilogy/core/processing/discovery_utility.py +548 -0
- trilogy/core/processing/discovery_validation.py +167 -0
- trilogy/core/processing/graph_utils.py +43 -0
- trilogy/core/processing/node_generators/README.md +9 -0
- trilogy/core/processing/node_generators/__init__.py +31 -0
- trilogy/core/processing/node_generators/basic_node.py +160 -0
- trilogy/core/processing/node_generators/common.py +270 -0
- trilogy/core/processing/node_generators/constant_node.py +38 -0
- trilogy/core/processing/node_generators/filter_node.py +315 -0
- trilogy/core/processing/node_generators/group_node.py +213 -0
- trilogy/core/processing/node_generators/group_to_node.py +117 -0
- trilogy/core/processing/node_generators/multiselect_node.py +207 -0
- trilogy/core/processing/node_generators/node_merge_node.py +695 -0
- trilogy/core/processing/node_generators/recursive_node.py +88 -0
- trilogy/core/processing/node_generators/rowset_node.py +165 -0
- trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
- trilogy/core/processing/node_generators/select_merge_node.py +846 -0
- trilogy/core/processing/node_generators/select_node.py +95 -0
- trilogy/core/processing/node_generators/synonym_node.py +98 -0
- trilogy/core/processing/node_generators/union_node.py +91 -0
- trilogy/core/processing/node_generators/unnest_node.py +182 -0
- trilogy/core/processing/node_generators/window_node.py +201 -0
- trilogy/core/processing/nodes/README.md +28 -0
- trilogy/core/processing/nodes/__init__.py +179 -0
- trilogy/core/processing/nodes/base_node.py +522 -0
- trilogy/core/processing/nodes/filter_node.py +75 -0
- trilogy/core/processing/nodes/group_node.py +194 -0
- trilogy/core/processing/nodes/merge_node.py +420 -0
- trilogy/core/processing/nodes/recursive_node.py +46 -0
- trilogy/core/processing/nodes/select_node_v2.py +242 -0
- trilogy/core/processing/nodes/union_node.py +53 -0
- trilogy/core/processing/nodes/unnest_node.py +62 -0
- trilogy/core/processing/nodes/window_node.py +56 -0
- trilogy/core/processing/utility.py +823 -0
- trilogy/core/query_processor.py +604 -0
- trilogy/core/statements/README.md +35 -0
- trilogy/core/statements/__init__.py +0 -0
- trilogy/core/statements/author.py +536 -0
- trilogy/core/statements/build.py +0 -0
- trilogy/core/statements/common.py +20 -0
- trilogy/core/statements/execute.py +155 -0
- trilogy/core/table_processor.py +66 -0
- trilogy/core/utility.py +8 -0
- trilogy/core/validation/README.md +46 -0
- trilogy/core/validation/__init__.py +0 -0
- trilogy/core/validation/common.py +161 -0
- trilogy/core/validation/concept.py +146 -0
- trilogy/core/validation/datasource.py +227 -0
- trilogy/core/validation/environment.py +73 -0
- trilogy/core/validation/fix.py +256 -0
- trilogy/dialect/__init__.py +32 -0
- trilogy/dialect/base.py +1432 -0
- trilogy/dialect/bigquery.py +314 -0
- trilogy/dialect/common.py +147 -0
- trilogy/dialect/config.py +159 -0
- trilogy/dialect/dataframe.py +50 -0
- trilogy/dialect/duckdb.py +397 -0
- trilogy/dialect/enums.py +151 -0
- trilogy/dialect/metadata.py +173 -0
- trilogy/dialect/mock.py +190 -0
- trilogy/dialect/postgres.py +117 -0
- trilogy/dialect/presto.py +110 -0
- trilogy/dialect/results.py +89 -0
- trilogy/dialect/snowflake.py +129 -0
- trilogy/dialect/sql_server.py +137 -0
- trilogy/engine.py +48 -0
- trilogy/execution/__init__.py +17 -0
- trilogy/execution/config.py +119 -0
- trilogy/execution/state/__init__.py +0 -0
- trilogy/execution/state/exceptions.py +26 -0
- trilogy/execution/state/file_state_store.py +0 -0
- trilogy/execution/state/sqllite_state_store.py +0 -0
- trilogy/execution/state/state_store.py +406 -0
- trilogy/executor.py +692 -0
- trilogy/hooks/__init__.py +4 -0
- trilogy/hooks/base_hook.py +40 -0
- trilogy/hooks/graph_hook.py +135 -0
- trilogy/hooks/query_debugger.py +166 -0
- trilogy/metadata/__init__.py +0 -0
- trilogy/parser.py +10 -0
- trilogy/parsing/README.md +21 -0
- trilogy/parsing/__init__.py +0 -0
- trilogy/parsing/common.py +1069 -0
- trilogy/parsing/config.py +5 -0
- trilogy/parsing/exceptions.py +8 -0
- trilogy/parsing/helpers.py +1 -0
- trilogy/parsing/parse_engine.py +2876 -0
- trilogy/parsing/render.py +775 -0
- trilogy/parsing/trilogy.lark +546 -0
- trilogy/py.typed +0 -0
- trilogy/render.py +45 -0
- trilogy/scripts/README.md +9 -0
- trilogy/scripts/__init__.py +0 -0
- trilogy/scripts/agent.py +41 -0
- trilogy/scripts/agent_info.py +306 -0
- trilogy/scripts/common.py +432 -0
- trilogy/scripts/dependency/Cargo.lock +617 -0
- trilogy/scripts/dependency/Cargo.toml +39 -0
- trilogy/scripts/dependency/README.md +131 -0
- trilogy/scripts/dependency/build.sh +25 -0
- trilogy/scripts/dependency/src/directory_resolver.rs +387 -0
- trilogy/scripts/dependency/src/lib.rs +16 -0
- trilogy/scripts/dependency/src/main.rs +770 -0
- trilogy/scripts/dependency/src/parser.rs +435 -0
- trilogy/scripts/dependency/src/preql.pest +208 -0
- trilogy/scripts/dependency/src/python_bindings.rs +311 -0
- trilogy/scripts/dependency/src/resolver.rs +716 -0
- trilogy/scripts/dependency/tests/base.preql +3 -0
- trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
- trilogy/scripts/dependency/tests/customer.preql +6 -0
- trilogy/scripts/dependency/tests/main.preql +9 -0
- trilogy/scripts/dependency/tests/orders.preql +7 -0
- trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
- trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
- trilogy/scripts/dependency.py +323 -0
- trilogy/scripts/display.py +555 -0
- trilogy/scripts/environment.py +59 -0
- trilogy/scripts/fmt.py +32 -0
- trilogy/scripts/ingest.py +487 -0
- trilogy/scripts/ingest_helpers/__init__.py +1 -0
- trilogy/scripts/ingest_helpers/foreign_keys.py +123 -0
- trilogy/scripts/ingest_helpers/formatting.py +93 -0
- trilogy/scripts/ingest_helpers/typing.py +161 -0
- trilogy/scripts/init.py +105 -0
- trilogy/scripts/parallel_execution.py +762 -0
- trilogy/scripts/plan.py +189 -0
- trilogy/scripts/refresh.py +161 -0
- trilogy/scripts/run.py +79 -0
- trilogy/scripts/serve.py +202 -0
- trilogy/scripts/serve_helpers/__init__.py +41 -0
- trilogy/scripts/serve_helpers/file_discovery.py +142 -0
- trilogy/scripts/serve_helpers/index_generation.py +206 -0
- trilogy/scripts/serve_helpers/models.py +38 -0
- trilogy/scripts/single_execution.py +131 -0
- trilogy/scripts/testing.py +143 -0
- trilogy/scripts/trilogy.py +75 -0
- trilogy/std/__init__.py +0 -0
- trilogy/std/color.preql +3 -0
- trilogy/std/date.preql +13 -0
- trilogy/std/display.preql +18 -0
- trilogy/std/geography.preql +22 -0
- trilogy/std/metric.preql +15 -0
- trilogy/std/money.preql +67 -0
- trilogy/std/net.preql +14 -0
- trilogy/std/ranking.preql +7 -0
- trilogy/std/report.preql +5 -0
- trilogy/std/semantic.preql +6 -0
- trilogy/utility.py +34 -0
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
"""Common helper functions used across all CLI commands."""
|
|
2
|
+
|
|
3
|
+
import traceback
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from io import StringIO
|
|
6
|
+
from pathlib import Path as PathlibPath
|
|
7
|
+
from typing import Any, Iterable, Sequence, Union
|
|
8
|
+
|
|
9
|
+
from click.exceptions import Exit
|
|
10
|
+
|
|
11
|
+
from trilogy import Executor
|
|
12
|
+
from trilogy.constants import DEFAULT_NAMESPACE
|
|
13
|
+
from trilogy.core.exceptions import ConfigurationException, ModelValidationError
|
|
14
|
+
from trilogy.core.models.environment import Environment
|
|
15
|
+
from trilogy.core.statements.execute import (
|
|
16
|
+
PROCESSED_STATEMENT_TYPES,
|
|
17
|
+
ProcessedQueryPersist,
|
|
18
|
+
ProcessedValidateStatement,
|
|
19
|
+
)
|
|
20
|
+
from trilogy.dialect.enums import Dialects
|
|
21
|
+
from trilogy.execution.config import RuntimeConfig, load_config_file
|
|
22
|
+
from trilogy.hooks.query_debugger import DebuggingHook
|
|
23
|
+
from trilogy.scripts.dependency import ScriptNode
|
|
24
|
+
from trilogy.scripts.display import (
|
|
25
|
+
print_error,
|
|
26
|
+
print_info,
|
|
27
|
+
print_success,
|
|
28
|
+
)
|
|
29
|
+
from trilogy.scripts.environment import extra_to_kwargs, parse_env_params
|
|
30
|
+
|
|
31
|
+
# Configuration file name
|
|
32
|
+
TRILOGY_CONFIG_NAME = "trilogy.toml"
|
|
33
|
+
|
|
34
|
+
# Default stat types to display in output; easily configurable
|
|
35
|
+
DEFAULT_STAT_TYPES: list[str] = ["persist", "update", "validate"]
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass
|
|
39
|
+
class ExecutionStats:
|
|
40
|
+
"""Statistics about statements executed in a script."""
|
|
41
|
+
|
|
42
|
+
persist_count: int = 0
|
|
43
|
+
update_count: int = 0
|
|
44
|
+
validate_count: int = 0
|
|
45
|
+
|
|
46
|
+
def __add__(self, other: "ExecutionStats") -> "ExecutionStats":
|
|
47
|
+
return ExecutionStats(
|
|
48
|
+
persist_count=self.persist_count + other.persist_count,
|
|
49
|
+
update_count=self.update_count + other.update_count,
|
|
50
|
+
validate_count=self.validate_count + other.validate_count,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def format_stats(stats: ExecutionStats, stat_types: list[str] | None = None) -> str:
|
|
55
|
+
"""Format execution stats for display."""
|
|
56
|
+
if stat_types is None:
|
|
57
|
+
stat_types = DEFAULT_STAT_TYPES
|
|
58
|
+
|
|
59
|
+
parts = []
|
|
60
|
+
if "persist" in stat_types and stats.persist_count > 0:
|
|
61
|
+
label = "table" if stats.persist_count == 1 else "tables"
|
|
62
|
+
parts.append(f"{stats.persist_count} {label} persisted")
|
|
63
|
+
if "update" in stat_types and stats.update_count > 0:
|
|
64
|
+
label = "datasource" if stats.update_count == 1 else "datasources"
|
|
65
|
+
parts.append(f"{stats.update_count} {label} updated")
|
|
66
|
+
if "validate" in stat_types and stats.validate_count > 0:
|
|
67
|
+
label = "datasource" if stats.validate_count == 1 else "datasources"
|
|
68
|
+
parts.append(f"{stats.validate_count} {label} validated")
|
|
69
|
+
|
|
70
|
+
return "; ".join(parts)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@dataclass
|
|
74
|
+
class CLIRuntimeParams:
|
|
75
|
+
"""Parameters provided via CLI for execution."""
|
|
76
|
+
|
|
77
|
+
input: str
|
|
78
|
+
dialect: Dialects | None = None
|
|
79
|
+
parallelism: int | None = None
|
|
80
|
+
param: tuple[str, ...] = ()
|
|
81
|
+
conn_args: tuple[str, ...] = ()
|
|
82
|
+
debug: bool = False
|
|
83
|
+
config_path: PathlibPath | None = None
|
|
84
|
+
execution_strategy: str = "eager_bfs"
|
|
85
|
+
env: tuple[str, ...] = ()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def merge_runtime_config(
|
|
89
|
+
cli_params: CLIRuntimeParams, file_config: RuntimeConfig
|
|
90
|
+
) -> tuple[Dialects, int]:
|
|
91
|
+
"""
|
|
92
|
+
Merge CLI parameters with config file settings.
|
|
93
|
+
CLI parameters take precedence over config file.
|
|
94
|
+
|
|
95
|
+
Returns:
|
|
96
|
+
tuple of (dialect, parallelism)
|
|
97
|
+
|
|
98
|
+
Raises:
|
|
99
|
+
Exit: If no dialect is specified in either CLI or config
|
|
100
|
+
"""
|
|
101
|
+
# Resolve dialect: CLI argument takes precedence over config
|
|
102
|
+
if cli_params.dialect:
|
|
103
|
+
dialect = cli_params.dialect
|
|
104
|
+
elif file_config.engine_dialect:
|
|
105
|
+
dialect = file_config.engine_dialect
|
|
106
|
+
else:
|
|
107
|
+
print_error(
|
|
108
|
+
"No dialect specified. Provide dialect as argument or set engine.dialect in config file."
|
|
109
|
+
)
|
|
110
|
+
raise Exit(1)
|
|
111
|
+
|
|
112
|
+
# Resolve parallelism: CLI argument takes precedence over config
|
|
113
|
+
parallelism = (
|
|
114
|
+
cli_params.parallelism
|
|
115
|
+
if cli_params.parallelism is not None
|
|
116
|
+
else file_config.parallelism
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
return dialect, parallelism
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def find_trilogy_config(start_path: PathlibPath | None = None) -> PathlibPath | None:
|
|
123
|
+
"""
|
|
124
|
+
Search for trilogy.toml starting from the given path, walking up parent directories.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
start_path: Starting directory for search. If None, uses current working directory.
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
Path to trilogy.toml if found, None otherwise.
|
|
131
|
+
"""
|
|
132
|
+
search_path = start_path if start_path else PathlibPath.cwd()
|
|
133
|
+
if not search_path.is_dir():
|
|
134
|
+
search_path = search_path.parent
|
|
135
|
+
|
|
136
|
+
for parent in [search_path] + list(search_path.parents):
|
|
137
|
+
candidate = parent / TRILOGY_CONFIG_NAME
|
|
138
|
+
if candidate.exists():
|
|
139
|
+
return candidate
|
|
140
|
+
return None
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def resolve_input(path: PathlibPath) -> list[PathlibPath]:
|
|
144
|
+
# Directory
|
|
145
|
+
if path.is_dir():
|
|
146
|
+
pattern = "**/*.preql"
|
|
147
|
+
return sorted(path.glob(pattern))
|
|
148
|
+
# Single file
|
|
149
|
+
if path.exists() and path.is_file():
|
|
150
|
+
return [path]
|
|
151
|
+
|
|
152
|
+
raise FileNotFoundError(f"Input path '{path}' does not exist.")
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def get_runtime_config(
|
|
156
|
+
path: PathlibPath, config_override: PathlibPath | None = None
|
|
157
|
+
) -> RuntimeConfig:
|
|
158
|
+
config_path: PathlibPath | None = None
|
|
159
|
+
|
|
160
|
+
if config_override:
|
|
161
|
+
config_path = config_override
|
|
162
|
+
else:
|
|
163
|
+
config_path = find_trilogy_config(path)
|
|
164
|
+
|
|
165
|
+
if not config_path:
|
|
166
|
+
return RuntimeConfig(startup_trilogy=[], startup_sql=[])
|
|
167
|
+
|
|
168
|
+
try:
|
|
169
|
+
return load_config_file(config_path)
|
|
170
|
+
except Exception as e:
|
|
171
|
+
print_error(f"Failed to load configuration file {config_path}: {e}")
|
|
172
|
+
handle_execution_exception(e)
|
|
173
|
+
# This won't be reached due to handle_execution_exception raising Exit
|
|
174
|
+
return RuntimeConfig(startup_trilogy=[], startup_sql=[])
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def resolve_input_information(
|
|
178
|
+
input: str, config_path_input: PathlibPath | None = None
|
|
179
|
+
) -> tuple[Iterable[PathlibPath | StringIO], PathlibPath, str, str, RuntimeConfig]:
|
|
180
|
+
input_as_path = PathlibPath(input)
|
|
181
|
+
files: Iterable[StringIO | PathlibPath]
|
|
182
|
+
if input_as_path.exists():
|
|
183
|
+
pathlib_path = input_as_path
|
|
184
|
+
files = resolve_input(pathlib_path)
|
|
185
|
+
|
|
186
|
+
if pathlib_path.is_dir():
|
|
187
|
+
directory = pathlib_path
|
|
188
|
+
input_type = "directory"
|
|
189
|
+
config = get_runtime_config(pathlib_path, config_path_input)
|
|
190
|
+
|
|
191
|
+
else:
|
|
192
|
+
directory = pathlib_path.parent
|
|
193
|
+
input_type = "file"
|
|
194
|
+
config = get_runtime_config(pathlib_path, config_path_input)
|
|
195
|
+
|
|
196
|
+
input_name = pathlib_path.name
|
|
197
|
+
else:
|
|
198
|
+
script = input
|
|
199
|
+
files = [StringIO(script)]
|
|
200
|
+
directory = PathlibPath.cwd()
|
|
201
|
+
input_type = "query"
|
|
202
|
+
input_name = "inline"
|
|
203
|
+
config = RuntimeConfig(startup_trilogy=[], startup_sql=[])
|
|
204
|
+
return files, directory, input_type, input_name, config
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def validate_required_connection_params(
|
|
208
|
+
conn_dict: dict[str, Any],
|
|
209
|
+
required_keys: list[str],
|
|
210
|
+
optional_keys: list[str],
|
|
211
|
+
dialect_name: str,
|
|
212
|
+
) -> dict:
|
|
213
|
+
missing = [key for key in required_keys if key not in conn_dict]
|
|
214
|
+
extra = [
|
|
215
|
+
key
|
|
216
|
+
for key in conn_dict
|
|
217
|
+
if key not in required_keys and key not in optional_keys
|
|
218
|
+
]
|
|
219
|
+
if missing:
|
|
220
|
+
raise ConfigurationException(
|
|
221
|
+
f"Missing required {dialect_name} connection parameters: {', '.join(missing)}"
|
|
222
|
+
)
|
|
223
|
+
if extra:
|
|
224
|
+
print(
|
|
225
|
+
f"Warning: Extra {dialect_name} connection parameters provided: {', '.join(extra)}"
|
|
226
|
+
)
|
|
227
|
+
return {
|
|
228
|
+
k: v for k, v in conn_dict.items() if k in required_keys or k in optional_keys
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
def get_dialect_config(
|
|
233
|
+
edialect: Dialects, conn_dict: dict[str, Any], runtime_config: RuntimeConfig
|
|
234
|
+
) -> Any:
|
|
235
|
+
"""Get dialect configuration based on dialect type."""
|
|
236
|
+
conf: Union[Any, None] = None
|
|
237
|
+
|
|
238
|
+
if edialect == Dialects.DUCK_DB:
|
|
239
|
+
from trilogy.dialect.config import DuckDBConfig
|
|
240
|
+
|
|
241
|
+
conn_dict = validate_required_connection_params(
|
|
242
|
+
conn_dict, [], ["path", "enable_python_datasources", "enable_gcs"], "DuckDB"
|
|
243
|
+
)
|
|
244
|
+
conf = DuckDBConfig(**conn_dict)
|
|
245
|
+
elif edialect == Dialects.SNOWFLAKE:
|
|
246
|
+
from trilogy.dialect.config import SnowflakeConfig
|
|
247
|
+
|
|
248
|
+
conn_dict = validate_required_connection_params(
|
|
249
|
+
conn_dict, ["username", "password", "account"], [], "Snowflake"
|
|
250
|
+
)
|
|
251
|
+
conf = SnowflakeConfig(**conn_dict)
|
|
252
|
+
elif edialect == Dialects.SQL_SERVER:
|
|
253
|
+
from trilogy.dialect.config import SQLServerConfig
|
|
254
|
+
|
|
255
|
+
conn_dict = validate_required_connection_params(
|
|
256
|
+
conn_dict,
|
|
257
|
+
["host", "port", "username", "password", "database"],
|
|
258
|
+
[],
|
|
259
|
+
"SQL Server",
|
|
260
|
+
)
|
|
261
|
+
conf = SQLServerConfig(**conn_dict)
|
|
262
|
+
elif edialect == Dialects.POSTGRES:
|
|
263
|
+
from trilogy.dialect.config import PostgresConfig
|
|
264
|
+
|
|
265
|
+
conn_dict = validate_required_connection_params(
|
|
266
|
+
conn_dict,
|
|
267
|
+
["host", "port", "username", "password", "database"],
|
|
268
|
+
[],
|
|
269
|
+
"Postgres",
|
|
270
|
+
)
|
|
271
|
+
conf = PostgresConfig(**conn_dict)
|
|
272
|
+
elif edialect == Dialects.BIGQUERY:
|
|
273
|
+
from trilogy.dialect.config import BigQueryConfig
|
|
274
|
+
|
|
275
|
+
conn_dict = validate_required_connection_params(
|
|
276
|
+
conn_dict, [], ["project"], "BigQuery"
|
|
277
|
+
)
|
|
278
|
+
conf = BigQueryConfig(**conn_dict)
|
|
279
|
+
elif edialect == Dialects.PRESTO:
|
|
280
|
+
from trilogy.dialect.config import PrestoConfig
|
|
281
|
+
|
|
282
|
+
conn_dict = validate_required_connection_params(
|
|
283
|
+
conn_dict,
|
|
284
|
+
["host", "port", "username", "password", "catalog"],
|
|
285
|
+
[],
|
|
286
|
+
"Presto",
|
|
287
|
+
)
|
|
288
|
+
conf = PrestoConfig(**conn_dict)
|
|
289
|
+
if conf and runtime_config.engine_config:
|
|
290
|
+
conf = runtime_config.engine_config.merge_config(conf)
|
|
291
|
+
return conf
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def create_executor(
|
|
295
|
+
param: tuple[str, ...],
|
|
296
|
+
directory: PathlibPath,
|
|
297
|
+
conn_args: Iterable[str],
|
|
298
|
+
edialect: Dialects,
|
|
299
|
+
debug: bool,
|
|
300
|
+
config: RuntimeConfig,
|
|
301
|
+
) -> Executor:
|
|
302
|
+
# Parse environment parameters from dedicated flag
|
|
303
|
+
namespace = DEFAULT_NAMESPACE
|
|
304
|
+
try:
|
|
305
|
+
env_params = parse_env_params(param)
|
|
306
|
+
from trilogy.scripts.display import show_environment_params
|
|
307
|
+
|
|
308
|
+
show_environment_params(env_params)
|
|
309
|
+
except ValueError as e:
|
|
310
|
+
print_error(str(e))
|
|
311
|
+
raise Exit(1) from e
|
|
312
|
+
|
|
313
|
+
# Parse connection arguments from remaining args
|
|
314
|
+
conn_dict = extra_to_kwargs(conn_args)
|
|
315
|
+
|
|
316
|
+
# Configure dialect
|
|
317
|
+
try:
|
|
318
|
+
conf = get_dialect_config(edialect, conn_dict, runtime_config=config)
|
|
319
|
+
except Exception as e:
|
|
320
|
+
handle_execution_exception(e)
|
|
321
|
+
|
|
322
|
+
# Create environment and set additional parameters if any exist
|
|
323
|
+
environment = Environment(working_path=str(directory), namespace=namespace)
|
|
324
|
+
if env_params:
|
|
325
|
+
environment.set_parameters(**env_params)
|
|
326
|
+
|
|
327
|
+
exec = Executor(
|
|
328
|
+
dialect=edialect,
|
|
329
|
+
engine=edialect.default_engine(conf=conf),
|
|
330
|
+
environment=environment,
|
|
331
|
+
hooks=[DebuggingHook()] if debug else [],
|
|
332
|
+
config=conf,
|
|
333
|
+
)
|
|
334
|
+
if config.startup_sql:
|
|
335
|
+
for script in config.startup_sql:
|
|
336
|
+
print_info(f"Executing startup SQL script: {script.name}...")
|
|
337
|
+
exec.execute_file(script)
|
|
338
|
+
print_success(f"Completed startup SQL script: {script.name}")
|
|
339
|
+
if config.startup_trilogy:
|
|
340
|
+
for script in config.startup_trilogy:
|
|
341
|
+
print_info(f"Executing startup Trilogy script: {script.name}...")
|
|
342
|
+
exec.execute_file(script)
|
|
343
|
+
print_success(f"Completed startup Trilogy script: {script.name}")
|
|
344
|
+
return exec
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
def create_executor_for_script(
|
|
348
|
+
node: ScriptNode,
|
|
349
|
+
param: tuple[str, ...],
|
|
350
|
+
conn_args: Iterable[str],
|
|
351
|
+
edialect: Dialects,
|
|
352
|
+
debug: bool,
|
|
353
|
+
config: RuntimeConfig,
|
|
354
|
+
) -> Executor:
|
|
355
|
+
"""
|
|
356
|
+
Create an executor for a specific script node.
|
|
357
|
+
|
|
358
|
+
Each script gets its own executor with its own environment,
|
|
359
|
+
using the script's parent directory as the working path.
|
|
360
|
+
"""
|
|
361
|
+
directory = node.path.parent
|
|
362
|
+
return create_executor(param, directory, conn_args, edialect, debug, config)
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def validate_datasources(
|
|
366
|
+
exec: Executor, mock: bool = False, quiet: bool = False
|
|
367
|
+
) -> None:
|
|
368
|
+
"""Validate datasources with consistent error handling.
|
|
369
|
+
|
|
370
|
+
Args:
|
|
371
|
+
exec: The executor instance
|
|
372
|
+
mock: If True, mock datasources before validation (for unit tests)
|
|
373
|
+
quiet: If True, suppress informational messages (for parallel execution)
|
|
374
|
+
|
|
375
|
+
Raises:
|
|
376
|
+
Exit: If validation fails
|
|
377
|
+
"""
|
|
378
|
+
datasources = exec.environment.datasources.keys()
|
|
379
|
+
if not datasources:
|
|
380
|
+
if not quiet:
|
|
381
|
+
message = "unit" if mock else "integration"
|
|
382
|
+
print_success(f"No datasources found to {message} test.")
|
|
383
|
+
return
|
|
384
|
+
|
|
385
|
+
if mock:
|
|
386
|
+
exec.execute_text("mock datasources {};".format(", ".join(datasources)))
|
|
387
|
+
|
|
388
|
+
try:
|
|
389
|
+
exec.execute_text("validate datasources {};".format(", ".join(datasources)))
|
|
390
|
+
except ModelValidationError as e:
|
|
391
|
+
if not e.children:
|
|
392
|
+
print_error(f"Datasource validation failed: {e.message}")
|
|
393
|
+
for idx, child in enumerate(e.children or []):
|
|
394
|
+
print_error(f"Error {idx + 1}: {child.message}")
|
|
395
|
+
raise Exit(1) from e
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
def handle_execution_exception(e: Exception, debug: bool = False) -> None:
|
|
399
|
+
print_error(f"Unexpected error: {e}")
|
|
400
|
+
if debug:
|
|
401
|
+
print_error(f"Full traceback:\n{traceback.format_exc()}")
|
|
402
|
+
raise Exit(1) from e
|
|
403
|
+
|
|
404
|
+
|
|
405
|
+
def count_statement_stats(
|
|
406
|
+
statements: Sequence[PROCESSED_STATEMENT_TYPES],
|
|
407
|
+
existing_stats: ExecutionStats | None = None,
|
|
408
|
+
) -> ExecutionStats:
|
|
409
|
+
"""Count persist and validate statements in a list of processed statements."""
|
|
410
|
+
persist_count = sum(1 for s in statements if isinstance(s, ProcessedQueryPersist))
|
|
411
|
+
validate_count = sum(
|
|
412
|
+
1 for s in statements if isinstance(s, ProcessedValidateStatement)
|
|
413
|
+
)
|
|
414
|
+
if existing_stats:
|
|
415
|
+
existing_stats.persist_count += persist_count
|
|
416
|
+
existing_stats.validate_count += validate_count
|
|
417
|
+
return existing_stats
|
|
418
|
+
return ExecutionStats(persist_count=persist_count, validate_count=validate_count)
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def execute_script_with_stats(
|
|
422
|
+
exec: Executor, script_path: PathlibPath, run_statements: bool = True
|
|
423
|
+
) -> ExecutionStats:
|
|
424
|
+
"""Parse and optionally execute a script, returning execution stats."""
|
|
425
|
+
with open(script_path, "r") as f:
|
|
426
|
+
queries = exec.parse_text(f.read())
|
|
427
|
+
stats = ExecutionStats()
|
|
428
|
+
if run_statements:
|
|
429
|
+
for query in queries:
|
|
430
|
+
exec.execute_query(query)
|
|
431
|
+
stats = count_statement_stats([query], stats)
|
|
432
|
+
return stats
|