pytrilogy 0.3.142__cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- LICENSE.md +19 -0
- _preql_import_resolver/__init__.py +5 -0
- _preql_import_resolver/_preql_import_resolver.cpython-313-x86_64-linux-gnu.so +0 -0
- pytrilogy-0.3.142.dist-info/METADATA +555 -0
- pytrilogy-0.3.142.dist-info/RECORD +200 -0
- pytrilogy-0.3.142.dist-info/WHEEL +5 -0
- pytrilogy-0.3.142.dist-info/entry_points.txt +2 -0
- pytrilogy-0.3.142.dist-info/licenses/LICENSE.md +19 -0
- trilogy/__init__.py +16 -0
- trilogy/ai/README.md +10 -0
- trilogy/ai/__init__.py +19 -0
- trilogy/ai/constants.py +92 -0
- trilogy/ai/conversation.py +107 -0
- trilogy/ai/enums.py +7 -0
- trilogy/ai/execute.py +50 -0
- trilogy/ai/models.py +34 -0
- trilogy/ai/prompts.py +100 -0
- trilogy/ai/providers/__init__.py +0 -0
- trilogy/ai/providers/anthropic.py +106 -0
- trilogy/ai/providers/base.py +24 -0
- trilogy/ai/providers/google.py +146 -0
- trilogy/ai/providers/openai.py +89 -0
- trilogy/ai/providers/utils.py +68 -0
- trilogy/authoring/README.md +3 -0
- trilogy/authoring/__init__.py +148 -0
- trilogy/constants.py +113 -0
- trilogy/core/README.md +52 -0
- trilogy/core/__init__.py +0 -0
- trilogy/core/constants.py +6 -0
- trilogy/core/enums.py +443 -0
- trilogy/core/env_processor.py +120 -0
- trilogy/core/environment_helpers.py +320 -0
- trilogy/core/ergonomics.py +193 -0
- trilogy/core/exceptions.py +123 -0
- trilogy/core/functions.py +1227 -0
- trilogy/core/graph_models.py +139 -0
- trilogy/core/internal.py +85 -0
- trilogy/core/models/__init__.py +0 -0
- trilogy/core/models/author.py +2669 -0
- trilogy/core/models/build.py +2521 -0
- trilogy/core/models/build_environment.py +180 -0
- trilogy/core/models/core.py +501 -0
- trilogy/core/models/datasource.py +322 -0
- trilogy/core/models/environment.py +751 -0
- trilogy/core/models/execute.py +1177 -0
- trilogy/core/optimization.py +251 -0
- trilogy/core/optimizations/__init__.py +12 -0
- trilogy/core/optimizations/base_optimization.py +17 -0
- trilogy/core/optimizations/hide_unused_concept.py +47 -0
- trilogy/core/optimizations/inline_datasource.py +102 -0
- trilogy/core/optimizations/predicate_pushdown.py +245 -0
- trilogy/core/processing/README.md +94 -0
- trilogy/core/processing/READMEv2.md +121 -0
- trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
- trilogy/core/processing/__init__.py +0 -0
- trilogy/core/processing/concept_strategies_v3.py +508 -0
- trilogy/core/processing/constants.py +15 -0
- trilogy/core/processing/discovery_node_factory.py +451 -0
- trilogy/core/processing/discovery_utility.py +548 -0
- trilogy/core/processing/discovery_validation.py +167 -0
- trilogy/core/processing/graph_utils.py +43 -0
- trilogy/core/processing/node_generators/README.md +9 -0
- trilogy/core/processing/node_generators/__init__.py +31 -0
- trilogy/core/processing/node_generators/basic_node.py +160 -0
- trilogy/core/processing/node_generators/common.py +268 -0
- trilogy/core/processing/node_generators/constant_node.py +38 -0
- trilogy/core/processing/node_generators/filter_node.py +315 -0
- trilogy/core/processing/node_generators/group_node.py +213 -0
- trilogy/core/processing/node_generators/group_to_node.py +117 -0
- trilogy/core/processing/node_generators/multiselect_node.py +205 -0
- trilogy/core/processing/node_generators/node_merge_node.py +653 -0
- trilogy/core/processing/node_generators/recursive_node.py +88 -0
- trilogy/core/processing/node_generators/rowset_node.py +165 -0
- trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
- trilogy/core/processing/node_generators/select_merge_node.py +748 -0
- trilogy/core/processing/node_generators/select_node.py +95 -0
- trilogy/core/processing/node_generators/synonym_node.py +98 -0
- trilogy/core/processing/node_generators/union_node.py +91 -0
- trilogy/core/processing/node_generators/unnest_node.py +182 -0
- trilogy/core/processing/node_generators/window_node.py +201 -0
- trilogy/core/processing/nodes/README.md +28 -0
- trilogy/core/processing/nodes/__init__.py +179 -0
- trilogy/core/processing/nodes/base_node.py +519 -0
- trilogy/core/processing/nodes/filter_node.py +75 -0
- trilogy/core/processing/nodes/group_node.py +194 -0
- trilogy/core/processing/nodes/merge_node.py +420 -0
- trilogy/core/processing/nodes/recursive_node.py +46 -0
- trilogy/core/processing/nodes/select_node_v2.py +242 -0
- trilogy/core/processing/nodes/union_node.py +53 -0
- trilogy/core/processing/nodes/unnest_node.py +62 -0
- trilogy/core/processing/nodes/window_node.py +56 -0
- trilogy/core/processing/utility.py +823 -0
- trilogy/core/query_processor.py +596 -0
- trilogy/core/statements/README.md +35 -0
- trilogy/core/statements/__init__.py +0 -0
- trilogy/core/statements/author.py +536 -0
- trilogy/core/statements/build.py +0 -0
- trilogy/core/statements/common.py +20 -0
- trilogy/core/statements/execute.py +155 -0
- trilogy/core/table_processor.py +66 -0
- trilogy/core/utility.py +8 -0
- trilogy/core/validation/README.md +46 -0
- trilogy/core/validation/__init__.py +0 -0
- trilogy/core/validation/common.py +161 -0
- trilogy/core/validation/concept.py +146 -0
- trilogy/core/validation/datasource.py +227 -0
- trilogy/core/validation/environment.py +73 -0
- trilogy/core/validation/fix.py +256 -0
- trilogy/dialect/__init__.py +32 -0
- trilogy/dialect/base.py +1392 -0
- trilogy/dialect/bigquery.py +308 -0
- trilogy/dialect/common.py +147 -0
- trilogy/dialect/config.py +144 -0
- trilogy/dialect/dataframe.py +50 -0
- trilogy/dialect/duckdb.py +231 -0
- trilogy/dialect/enums.py +147 -0
- trilogy/dialect/metadata.py +173 -0
- trilogy/dialect/mock.py +190 -0
- trilogy/dialect/postgres.py +117 -0
- trilogy/dialect/presto.py +110 -0
- trilogy/dialect/results.py +89 -0
- trilogy/dialect/snowflake.py +129 -0
- trilogy/dialect/sql_server.py +137 -0
- trilogy/engine.py +48 -0
- trilogy/execution/config.py +75 -0
- trilogy/executor.py +568 -0
- trilogy/hooks/__init__.py +4 -0
- trilogy/hooks/base_hook.py +40 -0
- trilogy/hooks/graph_hook.py +139 -0
- trilogy/hooks/query_debugger.py +166 -0
- trilogy/metadata/__init__.py +0 -0
- trilogy/parser.py +10 -0
- trilogy/parsing/README.md +21 -0
- trilogy/parsing/__init__.py +0 -0
- trilogy/parsing/common.py +1069 -0
- trilogy/parsing/config.py +5 -0
- trilogy/parsing/exceptions.py +8 -0
- trilogy/parsing/helpers.py +1 -0
- trilogy/parsing/parse_engine.py +2813 -0
- trilogy/parsing/render.py +769 -0
- trilogy/parsing/trilogy.lark +540 -0
- trilogy/py.typed +0 -0
- trilogy/render.py +42 -0
- trilogy/scripts/README.md +9 -0
- trilogy/scripts/__init__.py +0 -0
- trilogy/scripts/agent.py +41 -0
- trilogy/scripts/agent_info.py +303 -0
- trilogy/scripts/common.py +355 -0
- trilogy/scripts/dependency/Cargo.lock +617 -0
- trilogy/scripts/dependency/Cargo.toml +39 -0
- trilogy/scripts/dependency/README.md +131 -0
- trilogy/scripts/dependency/build.sh +25 -0
- trilogy/scripts/dependency/src/directory_resolver.rs +177 -0
- trilogy/scripts/dependency/src/lib.rs +16 -0
- trilogy/scripts/dependency/src/main.rs +770 -0
- trilogy/scripts/dependency/src/parser.rs +435 -0
- trilogy/scripts/dependency/src/preql.pest +208 -0
- trilogy/scripts/dependency/src/python_bindings.rs +303 -0
- trilogy/scripts/dependency/src/resolver.rs +716 -0
- trilogy/scripts/dependency/tests/base.preql +3 -0
- trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
- trilogy/scripts/dependency/tests/customer.preql +6 -0
- trilogy/scripts/dependency/tests/main.preql +9 -0
- trilogy/scripts/dependency/tests/orders.preql +7 -0
- trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
- trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
- trilogy/scripts/dependency.py +323 -0
- trilogy/scripts/display.py +512 -0
- trilogy/scripts/environment.py +46 -0
- trilogy/scripts/fmt.py +32 -0
- trilogy/scripts/ingest.py +471 -0
- trilogy/scripts/ingest_helpers/__init__.py +1 -0
- trilogy/scripts/ingest_helpers/foreign_keys.py +123 -0
- trilogy/scripts/ingest_helpers/formatting.py +93 -0
- trilogy/scripts/ingest_helpers/typing.py +161 -0
- trilogy/scripts/init.py +105 -0
- trilogy/scripts/parallel_execution.py +713 -0
- trilogy/scripts/plan.py +189 -0
- trilogy/scripts/run.py +63 -0
- trilogy/scripts/serve.py +140 -0
- trilogy/scripts/serve_helpers/__init__.py +41 -0
- trilogy/scripts/serve_helpers/file_discovery.py +142 -0
- trilogy/scripts/serve_helpers/index_generation.py +206 -0
- trilogy/scripts/serve_helpers/models.py +38 -0
- trilogy/scripts/single_execution.py +131 -0
- trilogy/scripts/testing.py +119 -0
- trilogy/scripts/trilogy.py +68 -0
- trilogy/std/__init__.py +0 -0
- trilogy/std/color.preql +3 -0
- trilogy/std/date.preql +13 -0
- trilogy/std/display.preql +18 -0
- trilogy/std/geography.preql +22 -0
- trilogy/std/metric.preql +15 -0
- trilogy/std/money.preql +67 -0
- trilogy/std/net.preql +14 -0
- trilogy/std/ranking.preql +7 -0
- trilogy/std/report.preql +5 -0
- trilogy/std/semantic.preql +6 -0
- trilogy/utility.py +34 -0
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
"""Index and model generation utilities for the serve command."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from trilogy.execution.config import load_config_file
|
|
6
|
+
from trilogy.scripts.common import TRILOGY_CONFIG_NAME
|
|
7
|
+
from trilogy.scripts.serve_helpers.file_discovery import (
|
|
8
|
+
extract_description_from_file,
|
|
9
|
+
find_all_model_files,
|
|
10
|
+
find_csv_files,
|
|
11
|
+
find_trilogy_files,
|
|
12
|
+
get_relative_model_name,
|
|
13
|
+
get_safe_model_name,
|
|
14
|
+
read_file_content,
|
|
15
|
+
)
|
|
16
|
+
from trilogy.scripts.serve_helpers.models import (
|
|
17
|
+
ImportFile,
|
|
18
|
+
ModelImport,
|
|
19
|
+
StoreModelIndex,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _get_model_description(directory_path: Path, trilogy_files: list[Path]) -> str:
|
|
24
|
+
"""Get model description from README.md, first file, or default.
|
|
25
|
+
|
|
26
|
+
Priority order:
|
|
27
|
+
1. README.md file in the directory
|
|
28
|
+
2. First comment from first trilogy file (alphabetically)
|
|
29
|
+
3. Default description based on directory name
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
directory_path: Root directory of the model
|
|
33
|
+
trilogy_files: List of trilogy files in the directory
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Description string for the model
|
|
37
|
+
"""
|
|
38
|
+
# Check for README.md first
|
|
39
|
+
readme_path = directory_path / "README.md"
|
|
40
|
+
if readme_path.exists():
|
|
41
|
+
try:
|
|
42
|
+
with open(readme_path, "r", encoding="utf-8") as f:
|
|
43
|
+
content = f.read().strip()
|
|
44
|
+
# Return first non-empty line or first paragraph
|
|
45
|
+
if content:
|
|
46
|
+
# Get first line or first paragraph (up to first blank line)
|
|
47
|
+
lines = content.split("\n")
|
|
48
|
+
first_line = ""
|
|
49
|
+
for line in lines:
|
|
50
|
+
stripped = line.strip()
|
|
51
|
+
# Skip markdown headers
|
|
52
|
+
if stripped and not stripped.startswith("#"):
|
|
53
|
+
first_line = stripped
|
|
54
|
+
break
|
|
55
|
+
# If it's a header, use it without the hash marks
|
|
56
|
+
elif stripped.startswith("#"):
|
|
57
|
+
first_line = stripped.lstrip("#").strip()
|
|
58
|
+
break
|
|
59
|
+
if first_line:
|
|
60
|
+
return first_line
|
|
61
|
+
except Exception:
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
# Fall back to first file's description
|
|
65
|
+
if trilogy_files:
|
|
66
|
+
return extract_description_from_file(trilogy_files[0])
|
|
67
|
+
|
|
68
|
+
# Default description
|
|
69
|
+
return f"Trilogy model: {directory_path.name}"
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def generate_model_index(
|
|
73
|
+
directory_path: Path, base_url: str, engine: str
|
|
74
|
+
) -> list[StoreModelIndex]:
|
|
75
|
+
"""Generate model index representing directory as a single model.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
directory_path: Root directory containing trilogy files
|
|
79
|
+
base_url: Base URL for the server (e.g., "http://localhost:8100")
|
|
80
|
+
engine: Engine type (e.g., "duckdb", "generic")
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
List with a single StoreModelIndex for the directory model
|
|
84
|
+
"""
|
|
85
|
+
model_name = directory_path.name
|
|
86
|
+
safe_name = get_safe_model_name(model_name)
|
|
87
|
+
|
|
88
|
+
return [StoreModelIndex(name=model_name, url=f"{base_url}/models/{safe_name}.json")]
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def find_model_by_name(
|
|
92
|
+
model_name: str, directory_path: Path, base_url: str, engine: str
|
|
93
|
+
) -> ModelImport | None:
|
|
94
|
+
"""Find and construct a ModelImport representing the directory as a single model.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
model_name: The safe model name (directory name with slashes replaced by hyphens)
|
|
98
|
+
directory_path: Root directory containing trilogy files
|
|
99
|
+
base_url: Base URL for the server
|
|
100
|
+
engine: Engine type (e.g., "duckdb", "generic")
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
ModelImport object if the model_name matches the directory, None otherwise
|
|
104
|
+
"""
|
|
105
|
+
expected_name = get_safe_model_name(directory_path.name)
|
|
106
|
+
|
|
107
|
+
if model_name != expected_name:
|
|
108
|
+
return None
|
|
109
|
+
|
|
110
|
+
# Check for trilogy.toml config
|
|
111
|
+
config_path = directory_path / TRILOGY_CONFIG_NAME
|
|
112
|
+
setup_scripts = []
|
|
113
|
+
if config_path.exists():
|
|
114
|
+
try:
|
|
115
|
+
config = load_config_file(config_path)
|
|
116
|
+
setup_scripts = config.startup_sql + config.startup_trilogy
|
|
117
|
+
except Exception:
|
|
118
|
+
pass
|
|
119
|
+
|
|
120
|
+
# Find all trilogy files (preql and sql)
|
|
121
|
+
trilogy_files = find_trilogy_files(directory_path)
|
|
122
|
+
|
|
123
|
+
# Find CSV files separately
|
|
124
|
+
csv_files = find_csv_files(directory_path)
|
|
125
|
+
|
|
126
|
+
# Generate description
|
|
127
|
+
description = _get_model_description(directory_path, trilogy_files)
|
|
128
|
+
|
|
129
|
+
# Create components for each file
|
|
130
|
+
components = []
|
|
131
|
+
|
|
132
|
+
# Add setup scripts first with purpose="setup"
|
|
133
|
+
for setup_file in setup_scripts:
|
|
134
|
+
setup_path = (
|
|
135
|
+
setup_file if setup_file.is_absolute() else directory_path / setup_file
|
|
136
|
+
)
|
|
137
|
+
if setup_path.exists():
|
|
138
|
+
file_model_name = get_relative_model_name(setup_path, directory_path)
|
|
139
|
+
safe_file_name = get_safe_model_name(file_model_name)
|
|
140
|
+
file_ext = setup_path.suffix
|
|
141
|
+
|
|
142
|
+
components.append(
|
|
143
|
+
ImportFile(
|
|
144
|
+
url=f"{base_url}/files/{safe_file_name}{file_ext}",
|
|
145
|
+
name=file_model_name,
|
|
146
|
+
alias="",
|
|
147
|
+
type="sql" if file_ext == ".sql" else "trilogy",
|
|
148
|
+
purpose="setup",
|
|
149
|
+
)
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
# Add all trilogy files (preql and sql) with purpose="source"
|
|
153
|
+
for trilogy_file in trilogy_files:
|
|
154
|
+
# Skip if already added as setup script
|
|
155
|
+
if any(
|
|
156
|
+
trilogy_file.samefile(s) if s.exists() else False for s in setup_scripts
|
|
157
|
+
):
|
|
158
|
+
continue
|
|
159
|
+
|
|
160
|
+
file_model_name = get_relative_model_name(trilogy_file, directory_path)
|
|
161
|
+
safe_file_name = get_safe_model_name(file_model_name)
|
|
162
|
+
file_ext = trilogy_file.suffix
|
|
163
|
+
|
|
164
|
+
components.append(
|
|
165
|
+
ImportFile(
|
|
166
|
+
url=f"{base_url}/files/{safe_file_name}{file_ext}",
|
|
167
|
+
name=file_model_name,
|
|
168
|
+
alias="",
|
|
169
|
+
type="sql" if file_ext == ".sql" else "trilogy",
|
|
170
|
+
purpose="source",
|
|
171
|
+
)
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
# Add CSV files with purpose="data"
|
|
175
|
+
for csv_file in csv_files:
|
|
176
|
+
file_model_name = get_relative_model_name(csv_file, directory_path)
|
|
177
|
+
safe_file_name = get_safe_model_name(file_model_name)
|
|
178
|
+
|
|
179
|
+
components.append(
|
|
180
|
+
ImportFile(
|
|
181
|
+
url=f"{base_url}/files/{safe_file_name}.csv",
|
|
182
|
+
name=file_model_name,
|
|
183
|
+
alias=file_model_name,
|
|
184
|
+
type="csv",
|
|
185
|
+
purpose="data",
|
|
186
|
+
)
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
return ModelImport(
|
|
190
|
+
name=directory_path.name,
|
|
191
|
+
description=description,
|
|
192
|
+
engine=engine,
|
|
193
|
+
components=components,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def find_file_content_by_name(file_name: str, directory_path: Path) -> str | None:
|
|
198
|
+
|
|
199
|
+
target_parts = Path(file_name.replace("-", "/")).parts
|
|
200
|
+
|
|
201
|
+
for model_file in find_all_model_files(directory_path):
|
|
202
|
+
relative_parts = model_file.relative_to(directory_path).parts
|
|
203
|
+
if relative_parts == target_parts:
|
|
204
|
+
return read_file_content(model_file)
|
|
205
|
+
|
|
206
|
+
return None
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""Pydantic models for the serve command."""
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ImportFile(BaseModel):
|
|
7
|
+
"""Component file in a model import."""
|
|
8
|
+
|
|
9
|
+
url: str
|
|
10
|
+
name: str
|
|
11
|
+
alias: str = ""
|
|
12
|
+
purpose: str
|
|
13
|
+
type: str | None = None
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ModelImport(BaseModel):
|
|
17
|
+
"""Model import definition."""
|
|
18
|
+
|
|
19
|
+
name: str
|
|
20
|
+
engine: str
|
|
21
|
+
description: str
|
|
22
|
+
link: str = ""
|
|
23
|
+
tags: list[str] = Field(default_factory=list)
|
|
24
|
+
components: list[ImportFile]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class StoreModelIndex(BaseModel):
|
|
28
|
+
"""Individual model entry in the store index."""
|
|
29
|
+
|
|
30
|
+
name: str
|
|
31
|
+
url: str
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class StoreIndex(BaseModel):
|
|
35
|
+
"""Store index containing list of available models."""
|
|
36
|
+
|
|
37
|
+
name: str
|
|
38
|
+
models: list[StoreModelIndex]
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
import traceback
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Any, Union
|
|
4
|
+
|
|
5
|
+
from trilogy import Executor
|
|
6
|
+
from trilogy.core.statements.execute import PROCESSED_STATEMENT_TYPES
|
|
7
|
+
from trilogy.scripts.display import (
|
|
8
|
+
FETCH_LIMIT,
|
|
9
|
+
ResultSet,
|
|
10
|
+
create_progress_context,
|
|
11
|
+
print_error,
|
|
12
|
+
print_info,
|
|
13
|
+
print_results_table,
|
|
14
|
+
show_statement_result,
|
|
15
|
+
show_statement_type,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def get_statement_type(statement: PROCESSED_STATEMENT_TYPES) -> str:
|
|
20
|
+
"""Get the type/class name of a statement."""
|
|
21
|
+
return type(statement).__name__
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def execute_single_statement(
|
|
25
|
+
exec: Executor,
|
|
26
|
+
query: PROCESSED_STATEMENT_TYPES,
|
|
27
|
+
idx: int,
|
|
28
|
+
total_queries: int,
|
|
29
|
+
use_progress=False,
|
|
30
|
+
) -> tuple[bool, ResultSet | None, Any, Union[Exception, None]]:
|
|
31
|
+
"""Execute a single statement and handle results/errors consistently."""
|
|
32
|
+
# Log the statement type before execution
|
|
33
|
+
statement_type = get_statement_type(query)
|
|
34
|
+
if not use_progress: # Only show type when not using progress bar
|
|
35
|
+
show_statement_type(idx, total_queries, statement_type)
|
|
36
|
+
|
|
37
|
+
start_time = datetime.now()
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
raw_results = exec.execute_statement(query)
|
|
41
|
+
results = (
|
|
42
|
+
ResultSet(
|
|
43
|
+
rows=raw_results.fetchmany(FETCH_LIMIT + 1), columns=raw_results.keys()
|
|
44
|
+
)
|
|
45
|
+
if raw_results
|
|
46
|
+
else None
|
|
47
|
+
)
|
|
48
|
+
duration = datetime.now() - start_time
|
|
49
|
+
|
|
50
|
+
if not use_progress:
|
|
51
|
+
show_statement_result(idx, total_queries, duration, bool(results))
|
|
52
|
+
|
|
53
|
+
return True, results, duration, None
|
|
54
|
+
|
|
55
|
+
except Exception as e:
|
|
56
|
+
duration = datetime.now() - start_time
|
|
57
|
+
|
|
58
|
+
if not use_progress:
|
|
59
|
+
show_statement_result(idx, total_queries, duration, False, str(e), type(e))
|
|
60
|
+
|
|
61
|
+
return False, None, duration, e
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def execute_queries_with_progress(
|
|
65
|
+
exec: Executor, queries: list[PROCESSED_STATEMENT_TYPES]
|
|
66
|
+
) -> Exception | None:
|
|
67
|
+
"""Execute queries with Rich progress bar. Returns True if all succeeded, False if any failed."""
|
|
68
|
+
progress = create_progress_context()
|
|
69
|
+
results_to_print = []
|
|
70
|
+
exception = None
|
|
71
|
+
|
|
72
|
+
with progress:
|
|
73
|
+
task = progress.add_task("Executing statements...", total=len(queries))
|
|
74
|
+
|
|
75
|
+
for idx, query in enumerate(queries):
|
|
76
|
+
statement_type = get_statement_type(query)
|
|
77
|
+
progress.update(
|
|
78
|
+
task, description=f"Statement {idx+1}/{len(queries)} ({statement_type})"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
success, results, duration, error = execute_single_statement(
|
|
82
|
+
exec, query, idx, len(queries), use_progress=True
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
if not success:
|
|
86
|
+
exception = error
|
|
87
|
+
|
|
88
|
+
# Store results for printing after progress is done
|
|
89
|
+
results_to_print.append(
|
|
90
|
+
(idx, len(queries), duration, success, results, error)
|
|
91
|
+
)
|
|
92
|
+
progress.advance(task)
|
|
93
|
+
if exception:
|
|
94
|
+
break
|
|
95
|
+
|
|
96
|
+
# Print all results after progress bar is finished
|
|
97
|
+
for idx, total_queries, duration, success, results, error in results_to_print:
|
|
98
|
+
if error:
|
|
99
|
+
show_statement_result(
|
|
100
|
+
idx, total_queries, duration, False, str(error), type(error)
|
|
101
|
+
)
|
|
102
|
+
print_error(f"Full traceback:\n{traceback.format_exc()}")
|
|
103
|
+
else:
|
|
104
|
+
show_statement_result(idx, total_queries, duration, bool(results))
|
|
105
|
+
if results and not error:
|
|
106
|
+
print_results_table(results)
|
|
107
|
+
|
|
108
|
+
return exception
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def execute_queries_simple(
|
|
112
|
+
exec: Executor, queries: list[PROCESSED_STATEMENT_TYPES]
|
|
113
|
+
) -> Exception | None:
|
|
114
|
+
"""Execute queries with simple output. Returns True if all succeeded, False if any failed."""
|
|
115
|
+
exception = None
|
|
116
|
+
|
|
117
|
+
for idx, query in enumerate(queries):
|
|
118
|
+
if len(queries) > 1:
|
|
119
|
+
print_info(f"Executing statement {idx+1} of {len(queries)}...")
|
|
120
|
+
|
|
121
|
+
success, results, duration, error = execute_single_statement(
|
|
122
|
+
exec, query, idx, len(queries), use_progress=False
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
if not success:
|
|
126
|
+
exception = error
|
|
127
|
+
|
|
128
|
+
if results and not error:
|
|
129
|
+
print_results_table(results)
|
|
130
|
+
|
|
131
|
+
return exception
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""Testing commands (integration and unit) for Trilogy CLI."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path as PathlibPath
|
|
4
|
+
|
|
5
|
+
from click import UNPROCESSED, Path, argument, option, pass_context
|
|
6
|
+
from click.exceptions import Exit
|
|
7
|
+
|
|
8
|
+
from trilogy import Executor
|
|
9
|
+
from trilogy.dialect.enums import Dialects
|
|
10
|
+
from trilogy.scripts.common import (
|
|
11
|
+
CLIRuntimeParams,
|
|
12
|
+
handle_execution_exception,
|
|
13
|
+
validate_datasources,
|
|
14
|
+
)
|
|
15
|
+
from trilogy.scripts.dependency import ScriptNode
|
|
16
|
+
from trilogy.scripts.parallel_execution import run_parallel_execution
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def execute_script_for_integration(
|
|
20
|
+
exec: Executor, node: ScriptNode, quiet: bool = False
|
|
21
|
+
) -> None:
|
|
22
|
+
"""Execute a script for the 'integration' command (parse + validate)."""
|
|
23
|
+
with open(node.path, "r") as f:
|
|
24
|
+
exec.parse_text(f.read())
|
|
25
|
+
validate_datasources(exec, mock=False, quiet=quiet)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def execute_script_for_unit(
|
|
29
|
+
exec: Executor, node: ScriptNode, quiet: bool = False
|
|
30
|
+
) -> None:
|
|
31
|
+
"""Execute a script for the 'unit' command (parse + mock validate)."""
|
|
32
|
+
with open(node.path, "r") as f:
|
|
33
|
+
exec.parse_text(f.read())
|
|
34
|
+
validate_datasources(exec, mock=True, quiet=quiet)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@argument("input", type=Path())
|
|
38
|
+
@argument("dialect", type=str, required=False)
|
|
39
|
+
@option("--param", multiple=True, help="Environment parameters as key=value pairs")
|
|
40
|
+
@option(
|
|
41
|
+
"--parallelism",
|
|
42
|
+
"-p",
|
|
43
|
+
default=None,
|
|
44
|
+
help="Maximum parallel workers for directory execution",
|
|
45
|
+
)
|
|
46
|
+
@option(
|
|
47
|
+
"--config", type=Path(exists=True), help="Path to trilogy.toml configuration file"
|
|
48
|
+
)
|
|
49
|
+
@argument("conn_args", nargs=-1, type=UNPROCESSED)
|
|
50
|
+
@pass_context
|
|
51
|
+
def integration(
|
|
52
|
+
ctx, input, dialect: str | None, param, parallelism: int | None, config, conn_args
|
|
53
|
+
):
|
|
54
|
+
"""Run integration tests on Trilogy scripts."""
|
|
55
|
+
cli_params = CLIRuntimeParams(
|
|
56
|
+
input=input,
|
|
57
|
+
dialect=Dialects(dialect) if dialect else None,
|
|
58
|
+
parallelism=parallelism,
|
|
59
|
+
param=param,
|
|
60
|
+
conn_args=conn_args,
|
|
61
|
+
debug=ctx.obj["DEBUG"],
|
|
62
|
+
config_path=PathlibPath(config) if config else None,
|
|
63
|
+
execution_strategy="eager_bfs",
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
try:
|
|
67
|
+
run_parallel_execution(
|
|
68
|
+
cli_params=cli_params,
|
|
69
|
+
execution_fn=execute_script_for_integration,
|
|
70
|
+
execution_mode="integration",
|
|
71
|
+
)
|
|
72
|
+
except Exit:
|
|
73
|
+
raise
|
|
74
|
+
except Exception as e:
|
|
75
|
+
handle_execution_exception(e, debug=cli_params.debug)
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@argument("input", type=Path())
|
|
79
|
+
@option("--param", multiple=True, help="Environment parameters as key=value pairs")
|
|
80
|
+
@option(
|
|
81
|
+
"--parallelism",
|
|
82
|
+
"-p",
|
|
83
|
+
default=None,
|
|
84
|
+
help="Maximum parallel workers for directory execution",
|
|
85
|
+
)
|
|
86
|
+
@option(
|
|
87
|
+
"--config", type=Path(exists=True), help="Path to trilogy.toml configuration file"
|
|
88
|
+
)
|
|
89
|
+
@pass_context
|
|
90
|
+
def unit(
|
|
91
|
+
ctx,
|
|
92
|
+
input,
|
|
93
|
+
param,
|
|
94
|
+
parallelism: int | None,
|
|
95
|
+
config,
|
|
96
|
+
):
|
|
97
|
+
"""Run unit tests on Trilogy scripts with mocked datasources."""
|
|
98
|
+
# Build CLI runtime params (unit tests always use DuckDB)
|
|
99
|
+
cli_params = CLIRuntimeParams(
|
|
100
|
+
input=input,
|
|
101
|
+
dialect=Dialects.DUCK_DB,
|
|
102
|
+
parallelism=parallelism,
|
|
103
|
+
param=param,
|
|
104
|
+
conn_args=(),
|
|
105
|
+
debug=ctx.obj["DEBUG"],
|
|
106
|
+
config_path=PathlibPath(config) if config else None,
|
|
107
|
+
execution_strategy="eager_bfs",
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
try:
|
|
111
|
+
run_parallel_execution(
|
|
112
|
+
cli_params=cli_params,
|
|
113
|
+
execution_fn=execute_script_for_unit,
|
|
114
|
+
execution_mode="unit",
|
|
115
|
+
)
|
|
116
|
+
except Exit:
|
|
117
|
+
raise
|
|
118
|
+
except Exception as e:
|
|
119
|
+
handle_execution_exception(e, debug=cli_params.debug)
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from click import group, option, pass_context
|
|
2
|
+
|
|
3
|
+
from trilogy.scripts.agent import agent
|
|
4
|
+
from trilogy.scripts.agent_info import agent_info
|
|
5
|
+
from trilogy.scripts.display import set_rich_mode, show_debug_mode
|
|
6
|
+
from trilogy.scripts.fmt import fmt
|
|
7
|
+
from trilogy.scripts.ingest import ingest
|
|
8
|
+
from trilogy.scripts.init import init
|
|
9
|
+
from trilogy.scripts.plan import plan
|
|
10
|
+
from trilogy.scripts.run import run
|
|
11
|
+
from trilogy.scripts.serve import serve
|
|
12
|
+
from trilogy.scripts.testing import integration, unit
|
|
13
|
+
|
|
14
|
+
set_rich_mode = set_rich_mode
|
|
15
|
+
|
|
16
|
+
# Default parallelism level
|
|
17
|
+
DEFAULT_PARALLELISM = 2
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
AGENT_NOTICE = """
|
|
21
|
+
NOTE FOR AI AGENTS (Claude, GPT, etc.): For comprehensive usage documentation,
|
|
22
|
+
run 'trilogy agent-info' to get a detailed AGENTS.md-style guide with all
|
|
23
|
+
commands, options, and usage examples.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@group(epilog=AGENT_NOTICE)
|
|
28
|
+
@option("--debug", default=False, help="Enable debug mode")
|
|
29
|
+
@pass_context
|
|
30
|
+
def cli(ctx, debug: bool):
|
|
31
|
+
"""Trilogy CLI - A beautiful data productivity tool."""
|
|
32
|
+
ctx.ensure_object(dict)
|
|
33
|
+
ctx.obj["DEBUG"] = debug
|
|
34
|
+
|
|
35
|
+
if debug:
|
|
36
|
+
show_debug_mode()
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
# Add commands to the group
|
|
40
|
+
cli.command("init")(init)
|
|
41
|
+
cli.command("ingest", context_settings=dict(ignore_unknown_options=True))(ingest)
|
|
42
|
+
cli.command("fmt", context_settings=dict(ignore_unknown_options=True))(fmt)
|
|
43
|
+
cli.command(
|
|
44
|
+
"unit",
|
|
45
|
+
context_settings=dict(
|
|
46
|
+
ignore_unknown_options=True,
|
|
47
|
+
),
|
|
48
|
+
)(unit)
|
|
49
|
+
cli.command(
|
|
50
|
+
"integration",
|
|
51
|
+
context_settings=dict(
|
|
52
|
+
ignore_unknown_options=True,
|
|
53
|
+
),
|
|
54
|
+
)(integration)
|
|
55
|
+
cli.command(
|
|
56
|
+
"run",
|
|
57
|
+
context_settings=dict(
|
|
58
|
+
ignore_unknown_options=True,
|
|
59
|
+
),
|
|
60
|
+
)(run)
|
|
61
|
+
cli.command("agent")(agent)
|
|
62
|
+
cli.command("agent-info")(agent_info)
|
|
63
|
+
cli.command("plan")(plan)
|
|
64
|
+
cli.command("serve")(serve)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
if __name__ == "__main__":
|
|
68
|
+
cli()
|
trilogy/std/__init__.py
ADDED
|
File without changes
|
trilogy/std/color.preql
ADDED
trilogy/std/date.preql
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
type percent float; # Percentage value
|
|
4
|
+
|
|
5
|
+
def calc_percent(a, b, digits=-1) ->
|
|
6
|
+
case
|
|
7
|
+
when digits =-1 then
|
|
8
|
+
case
|
|
9
|
+
when b = 0 then 0.0::numeric
|
|
10
|
+
else (a/b)::numeric
|
|
11
|
+
end
|
|
12
|
+
else round((
|
|
13
|
+
case
|
|
14
|
+
when b = 0 then 0.0::float
|
|
15
|
+
else (a/b)::float
|
|
16
|
+
end
|
|
17
|
+
)::numeric, digits)
|
|
18
|
+
end::numeric::percent;
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
|
|
2
|
+
|
|
3
|
+
## us types
|
|
4
|
+
type us_state_short string; # US state abbreviation - ex MA, CA, NY
|
|
5
|
+
type us_state string; # US state name - ex Massachusetts, California, New York
|
|
6
|
+
|
|
7
|
+
type us_zip_code string; # US ZIP code
|
|
8
|
+
|
|
9
|
+
## generic types
|
|
10
|
+
type latitude float; # Latitude in degrees
|
|
11
|
+
type longitude float; # Longitude in degrees
|
|
12
|
+
type lat_long string; # Latitude and longitude in degrees, as a coordinate pair
|
|
13
|
+
|
|
14
|
+
type city string; # City name
|
|
15
|
+
type country string; # Full Country Name
|
|
16
|
+
type country_code string; # ISO-3166 Country code - ex US, CA, GB
|
|
17
|
+
type timezone string; # Timezone name
|
|
18
|
+
type region string; # Region name
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
## special formats
|
|
22
|
+
type geojson string; # GeoJSON format for geographic data
|
trilogy/std/metric.preql
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# Length and distance units
|
|
2
|
+
type m numeric; # meters
|
|
3
|
+
type km numeric; # kilometers
|
|
4
|
+
type cm numeric; # centimeters
|
|
5
|
+
type mm numeric; # millimeters
|
|
6
|
+
|
|
7
|
+
# Mass units
|
|
8
|
+
type kg numeric; # kilograms
|
|
9
|
+
type g numeric; # grams
|
|
10
|
+
type tonne numeric; # metric tons (1000 kg)
|
|
11
|
+
|
|
12
|
+
# Force units
|
|
13
|
+
type n numeric; # newtons
|
|
14
|
+
type kn numeric; # kilonewtons (1000 N)
|
|
15
|
+
type mn numeric; # meganewtons (1,000,000 N)
|