pytrilogy 0.3.148__cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- LICENSE.md +19 -0
- _preql_import_resolver/__init__.py +5 -0
- _preql_import_resolver/_preql_import_resolver.cpython-312-aarch64-linux-gnu.so +0 -0
- pytrilogy-0.3.148.dist-info/METADATA +555 -0
- pytrilogy-0.3.148.dist-info/RECORD +206 -0
- pytrilogy-0.3.148.dist-info/WHEEL +5 -0
- pytrilogy-0.3.148.dist-info/entry_points.txt +2 -0
- pytrilogy-0.3.148.dist-info/licenses/LICENSE.md +19 -0
- trilogy/__init__.py +27 -0
- trilogy/ai/README.md +10 -0
- trilogy/ai/__init__.py +19 -0
- trilogy/ai/constants.py +92 -0
- trilogy/ai/conversation.py +107 -0
- trilogy/ai/enums.py +7 -0
- trilogy/ai/execute.py +50 -0
- trilogy/ai/models.py +34 -0
- trilogy/ai/prompts.py +100 -0
- trilogy/ai/providers/__init__.py +0 -0
- trilogy/ai/providers/anthropic.py +106 -0
- trilogy/ai/providers/base.py +24 -0
- trilogy/ai/providers/google.py +146 -0
- trilogy/ai/providers/openai.py +89 -0
- trilogy/ai/providers/utils.py +68 -0
- trilogy/authoring/README.md +3 -0
- trilogy/authoring/__init__.py +148 -0
- trilogy/constants.py +119 -0
- trilogy/core/README.md +52 -0
- trilogy/core/__init__.py +0 -0
- trilogy/core/constants.py +6 -0
- trilogy/core/enums.py +454 -0
- trilogy/core/env_processor.py +239 -0
- trilogy/core/environment_helpers.py +320 -0
- trilogy/core/ergonomics.py +193 -0
- trilogy/core/exceptions.py +123 -0
- trilogy/core/functions.py +1240 -0
- trilogy/core/graph_models.py +142 -0
- trilogy/core/internal.py +85 -0
- trilogy/core/models/__init__.py +0 -0
- trilogy/core/models/author.py +2662 -0
- trilogy/core/models/build.py +2603 -0
- trilogy/core/models/build_environment.py +165 -0
- trilogy/core/models/core.py +506 -0
- trilogy/core/models/datasource.py +434 -0
- trilogy/core/models/environment.py +756 -0
- trilogy/core/models/execute.py +1213 -0
- trilogy/core/optimization.py +251 -0
- trilogy/core/optimizations/__init__.py +12 -0
- trilogy/core/optimizations/base_optimization.py +17 -0
- trilogy/core/optimizations/hide_unused_concept.py +47 -0
- trilogy/core/optimizations/inline_datasource.py +102 -0
- trilogy/core/optimizations/predicate_pushdown.py +245 -0
- trilogy/core/processing/README.md +94 -0
- trilogy/core/processing/READMEv2.md +121 -0
- trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
- trilogy/core/processing/__init__.py +0 -0
- trilogy/core/processing/concept_strategies_v3.py +508 -0
- trilogy/core/processing/constants.py +15 -0
- trilogy/core/processing/discovery_node_factory.py +451 -0
- trilogy/core/processing/discovery_utility.py +548 -0
- trilogy/core/processing/discovery_validation.py +167 -0
- trilogy/core/processing/graph_utils.py +43 -0
- trilogy/core/processing/node_generators/README.md +9 -0
- trilogy/core/processing/node_generators/__init__.py +31 -0
- trilogy/core/processing/node_generators/basic_node.py +160 -0
- trilogy/core/processing/node_generators/common.py +270 -0
- trilogy/core/processing/node_generators/constant_node.py +38 -0
- trilogy/core/processing/node_generators/filter_node.py +315 -0
- trilogy/core/processing/node_generators/group_node.py +213 -0
- trilogy/core/processing/node_generators/group_to_node.py +117 -0
- trilogy/core/processing/node_generators/multiselect_node.py +207 -0
- trilogy/core/processing/node_generators/node_merge_node.py +695 -0
- trilogy/core/processing/node_generators/recursive_node.py +88 -0
- trilogy/core/processing/node_generators/rowset_node.py +165 -0
- trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
- trilogy/core/processing/node_generators/select_merge_node.py +786 -0
- trilogy/core/processing/node_generators/select_node.py +95 -0
- trilogy/core/processing/node_generators/synonym_node.py +98 -0
- trilogy/core/processing/node_generators/union_node.py +91 -0
- trilogy/core/processing/node_generators/unnest_node.py +182 -0
- trilogy/core/processing/node_generators/window_node.py +201 -0
- trilogy/core/processing/nodes/README.md +28 -0
- trilogy/core/processing/nodes/__init__.py +179 -0
- trilogy/core/processing/nodes/base_node.py +522 -0
- trilogy/core/processing/nodes/filter_node.py +75 -0
- trilogy/core/processing/nodes/group_node.py +194 -0
- trilogy/core/processing/nodes/merge_node.py +420 -0
- trilogy/core/processing/nodes/recursive_node.py +46 -0
- trilogy/core/processing/nodes/select_node_v2.py +242 -0
- trilogy/core/processing/nodes/union_node.py +53 -0
- trilogy/core/processing/nodes/unnest_node.py +62 -0
- trilogy/core/processing/nodes/window_node.py +56 -0
- trilogy/core/processing/utility.py +823 -0
- trilogy/core/query_processor.py +604 -0
- trilogy/core/statements/README.md +35 -0
- trilogy/core/statements/__init__.py +0 -0
- trilogy/core/statements/author.py +536 -0
- trilogy/core/statements/build.py +0 -0
- trilogy/core/statements/common.py +20 -0
- trilogy/core/statements/execute.py +155 -0
- trilogy/core/table_processor.py +66 -0
- trilogy/core/utility.py +8 -0
- trilogy/core/validation/README.md +46 -0
- trilogy/core/validation/__init__.py +0 -0
- trilogy/core/validation/common.py +161 -0
- trilogy/core/validation/concept.py +146 -0
- trilogy/core/validation/datasource.py +227 -0
- trilogy/core/validation/environment.py +73 -0
- trilogy/core/validation/fix.py +256 -0
- trilogy/dialect/__init__.py +32 -0
- trilogy/dialect/base.py +1431 -0
- trilogy/dialect/bigquery.py +314 -0
- trilogy/dialect/common.py +147 -0
- trilogy/dialect/config.py +159 -0
- trilogy/dialect/dataframe.py +50 -0
- trilogy/dialect/duckdb.py +376 -0
- trilogy/dialect/enums.py +149 -0
- trilogy/dialect/metadata.py +173 -0
- trilogy/dialect/mock.py +190 -0
- trilogy/dialect/postgres.py +117 -0
- trilogy/dialect/presto.py +110 -0
- trilogy/dialect/results.py +89 -0
- trilogy/dialect/snowflake.py +129 -0
- trilogy/dialect/sql_server.py +137 -0
- trilogy/engine.py +48 -0
- trilogy/execution/__init__.py +17 -0
- trilogy/execution/config.py +119 -0
- trilogy/execution/state/__init__.py +0 -0
- trilogy/execution/state/file_state_store.py +0 -0
- trilogy/execution/state/sqllite_state_store.py +0 -0
- trilogy/execution/state/state_store.py +301 -0
- trilogy/executor.py +656 -0
- trilogy/hooks/__init__.py +4 -0
- trilogy/hooks/base_hook.py +40 -0
- trilogy/hooks/graph_hook.py +135 -0
- trilogy/hooks/query_debugger.py +166 -0
- trilogy/metadata/__init__.py +0 -0
- trilogy/parser.py +10 -0
- trilogy/parsing/README.md +21 -0
- trilogy/parsing/__init__.py +0 -0
- trilogy/parsing/common.py +1069 -0
- trilogy/parsing/config.py +5 -0
- trilogy/parsing/exceptions.py +8 -0
- trilogy/parsing/helpers.py +1 -0
- trilogy/parsing/parse_engine.py +2863 -0
- trilogy/parsing/render.py +773 -0
- trilogy/parsing/trilogy.lark +544 -0
- trilogy/py.typed +0 -0
- trilogy/render.py +45 -0
- trilogy/scripts/README.md +9 -0
- trilogy/scripts/__init__.py +0 -0
- trilogy/scripts/agent.py +41 -0
- trilogy/scripts/agent_info.py +306 -0
- trilogy/scripts/common.py +430 -0
- trilogy/scripts/dependency/Cargo.lock +617 -0
- trilogy/scripts/dependency/Cargo.toml +39 -0
- trilogy/scripts/dependency/README.md +131 -0
- trilogy/scripts/dependency/build.sh +25 -0
- trilogy/scripts/dependency/src/directory_resolver.rs +387 -0
- trilogy/scripts/dependency/src/lib.rs +16 -0
- trilogy/scripts/dependency/src/main.rs +770 -0
- trilogy/scripts/dependency/src/parser.rs +435 -0
- trilogy/scripts/dependency/src/preql.pest +208 -0
- trilogy/scripts/dependency/src/python_bindings.rs +311 -0
- trilogy/scripts/dependency/src/resolver.rs +716 -0
- trilogy/scripts/dependency/tests/base.preql +3 -0
- trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
- trilogy/scripts/dependency/tests/customer.preql +6 -0
- trilogy/scripts/dependency/tests/main.preql +9 -0
- trilogy/scripts/dependency/tests/orders.preql +7 -0
- trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
- trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
- trilogy/scripts/dependency.py +323 -0
- trilogy/scripts/display.py +555 -0
- trilogy/scripts/environment.py +59 -0
- trilogy/scripts/fmt.py +32 -0
- trilogy/scripts/ingest.py +472 -0
- trilogy/scripts/ingest_helpers/__init__.py +1 -0
- trilogy/scripts/ingest_helpers/foreign_keys.py +123 -0
- trilogy/scripts/ingest_helpers/formatting.py +93 -0
- trilogy/scripts/ingest_helpers/typing.py +161 -0
- trilogy/scripts/init.py +105 -0
- trilogy/scripts/parallel_execution.py +748 -0
- trilogy/scripts/plan.py +189 -0
- trilogy/scripts/refresh.py +106 -0
- trilogy/scripts/run.py +79 -0
- trilogy/scripts/serve.py +202 -0
- trilogy/scripts/serve_helpers/__init__.py +41 -0
- trilogy/scripts/serve_helpers/file_discovery.py +142 -0
- trilogy/scripts/serve_helpers/index_generation.py +206 -0
- trilogy/scripts/serve_helpers/models.py +38 -0
- trilogy/scripts/single_execution.py +131 -0
- trilogy/scripts/testing.py +129 -0
- trilogy/scripts/trilogy.py +75 -0
- trilogy/std/__init__.py +0 -0
- trilogy/std/color.preql +3 -0
- trilogy/std/date.preql +13 -0
- trilogy/std/display.preql +18 -0
- trilogy/std/geography.preql +22 -0
- trilogy/std/metric.preql +15 -0
- trilogy/std/money.preql +67 -0
- trilogy/std/net.preql +14 -0
- trilogy/std/ranking.preql +7 -0
- trilogy/std/report.preql +5 -0
- trilogy/std/semantic.preql +6 -0
- trilogy/utility.py +34 -0
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
"""File discovery and processing utilities for the serve command."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def find_preql_files(directory_path: Path) -> list[Path]:
|
|
7
|
+
"""Find all .preql files in the directory recursively.
|
|
8
|
+
|
|
9
|
+
Args:
|
|
10
|
+
directory_path: The root directory to search
|
|
11
|
+
|
|
12
|
+
Returns:
|
|
13
|
+
List of Path objects for all .preql files found
|
|
14
|
+
"""
|
|
15
|
+
return list(directory_path.rglob("*.preql"))
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def find_sql_files(directory_path: Path) -> list[Path]:
|
|
19
|
+
"""Find all .sql files in the directory recursively.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
directory_path: The root directory to search
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
List of Path objects for all .sql files found
|
|
26
|
+
"""
|
|
27
|
+
return list(directory_path.rglob("*.sql"))
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def find_csv_files(directory_path: Path) -> list[Path]:
|
|
31
|
+
"""Find all .csv files in the directory recursively.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
directory_path: The root directory to search
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
List of Path objects for all .csv files found
|
|
38
|
+
"""
|
|
39
|
+
return list(directory_path.rglob("*.csv"))
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def find_trilogy_files(directory_path: Path) -> list[Path]:
|
|
43
|
+
"""Find all .preql and .sql files in the directory recursively.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
directory_path: The root directory to search
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
List of Path objects for all .preql and .sql files found, sorted by path
|
|
50
|
+
"""
|
|
51
|
+
preql_files = find_preql_files(directory_path)
|
|
52
|
+
sql_files = find_sql_files(directory_path)
|
|
53
|
+
return sorted(preql_files + sql_files)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def find_all_model_files(directory_path: Path) -> list[Path]:
|
|
57
|
+
"""Find all model files (.preql, .sql, .csv) in the directory recursively.
|
|
58
|
+
|
|
59
|
+
Args:
|
|
60
|
+
directory_path: The root directory to search
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
List of Path objects for all model files found, sorted by path
|
|
64
|
+
"""
|
|
65
|
+
preql_files = find_preql_files(directory_path)
|
|
66
|
+
sql_files = find_sql_files(directory_path)
|
|
67
|
+
csv_files = find_csv_files(directory_path)
|
|
68
|
+
return sorted(preql_files + sql_files + csv_files)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def get_relative_model_name(preql_file: Path, directory_path: Path) -> str:
|
|
72
|
+
"""Get the relative model name from a model file path.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
preql_file: Path to the .preql, .sql, or .csv file
|
|
76
|
+
directory_path: Root directory path
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
Relative model name with forward slashes and no extension
|
|
80
|
+
"""
|
|
81
|
+
relative_path = preql_file.relative_to(directory_path)
|
|
82
|
+
return (
|
|
83
|
+
str(relative_path)
|
|
84
|
+
.replace("\\", "/")
|
|
85
|
+
.replace(".preql", "")
|
|
86
|
+
.replace(".sql", "")
|
|
87
|
+
.replace(".csv", "")
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def get_safe_model_name(model_name: str) -> str:
|
|
92
|
+
"""Convert a model name to a URL-safe format.
|
|
93
|
+
|
|
94
|
+
Args:
|
|
95
|
+
model_name: The model name (may contain slashes)
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
URL-safe model name with slashes replaced by hyphens
|
|
99
|
+
"""
|
|
100
|
+
return model_name.replace("/", "-")
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def extract_description_from_file(file_path: Path) -> str:
|
|
104
|
+
"""Extract description from a preql or sql file's comments.
|
|
105
|
+
|
|
106
|
+
Looks for the first comment line (starting with # or --) in the first 5 lines
|
|
107
|
+
of the file and uses it as the description.
|
|
108
|
+
|
|
109
|
+
Args:
|
|
110
|
+
file_path: Path to the .preql or .sql file
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Description extracted from comments or a default description
|
|
114
|
+
"""
|
|
115
|
+
with open(file_path, "r") as f:
|
|
116
|
+
content = f.read()
|
|
117
|
+
|
|
118
|
+
model_name = file_path.stem
|
|
119
|
+
default_description = f"Trilogy model: {model_name}"
|
|
120
|
+
|
|
121
|
+
first_lines = content.split("\n")[:5]
|
|
122
|
+
for line in first_lines:
|
|
123
|
+
stripped = line.strip()
|
|
124
|
+
if stripped.startswith("#"):
|
|
125
|
+
return stripped.lstrip("#").strip()
|
|
126
|
+
if stripped.startswith("--"):
|
|
127
|
+
return stripped.lstrip("-").strip()
|
|
128
|
+
|
|
129
|
+
return default_description
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def read_file_content(file_path: Path) -> str:
|
|
133
|
+
"""Read and return the content of a file.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
file_path: Path to the file
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
File content as string
|
|
140
|
+
"""
|
|
141
|
+
with open(file_path, "r") as f:
|
|
142
|
+
return f.read()
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
"""Index and model generation utilities for the serve command."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
|
|
5
|
+
from trilogy.execution.config import load_config_file
|
|
6
|
+
from trilogy.scripts.common import TRILOGY_CONFIG_NAME
|
|
7
|
+
from trilogy.scripts.serve_helpers.file_discovery import (
|
|
8
|
+
extract_description_from_file,
|
|
9
|
+
find_all_model_files,
|
|
10
|
+
find_csv_files,
|
|
11
|
+
find_trilogy_files,
|
|
12
|
+
get_relative_model_name,
|
|
13
|
+
get_safe_model_name,
|
|
14
|
+
read_file_content,
|
|
15
|
+
)
|
|
16
|
+
from trilogy.scripts.serve_helpers.models import (
|
|
17
|
+
ImportFile,
|
|
18
|
+
ModelImport,
|
|
19
|
+
StoreModelIndex,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def _get_model_description(directory_path: Path, trilogy_files: list[Path]) -> str:
|
|
24
|
+
"""Get model description from README.md, first file, or default.
|
|
25
|
+
|
|
26
|
+
Priority order:
|
|
27
|
+
1. README.md file in the directory
|
|
28
|
+
2. First comment from first trilogy file (alphabetically)
|
|
29
|
+
3. Default description based on directory name
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
directory_path: Root directory of the model
|
|
33
|
+
trilogy_files: List of trilogy files in the directory
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Description string for the model
|
|
37
|
+
"""
|
|
38
|
+
# Check for README.md first
|
|
39
|
+
readme_path = directory_path / "README.md"
|
|
40
|
+
if readme_path.exists():
|
|
41
|
+
try:
|
|
42
|
+
with open(readme_path, "r", encoding="utf-8") as f:
|
|
43
|
+
content = f.read().strip()
|
|
44
|
+
# Return first non-empty line or first paragraph
|
|
45
|
+
if content:
|
|
46
|
+
# Get first line or first paragraph (up to first blank line)
|
|
47
|
+
lines = content.split("\n")
|
|
48
|
+
first_line = ""
|
|
49
|
+
for line in lines:
|
|
50
|
+
stripped = line.strip()
|
|
51
|
+
# Skip markdown headers
|
|
52
|
+
if stripped and not stripped.startswith("#"):
|
|
53
|
+
first_line = stripped
|
|
54
|
+
break
|
|
55
|
+
# If it's a header, use it without the hash marks
|
|
56
|
+
elif stripped.startswith("#"):
|
|
57
|
+
first_line = stripped.lstrip("#").strip()
|
|
58
|
+
break
|
|
59
|
+
if first_line:
|
|
60
|
+
return first_line
|
|
61
|
+
except Exception:
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
# Fall back to first file's description
|
|
65
|
+
if trilogy_files:
|
|
66
|
+
return extract_description_from_file(trilogy_files[0])
|
|
67
|
+
|
|
68
|
+
# Default description
|
|
69
|
+
return f"Trilogy model: {directory_path.name}"
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def generate_model_index(
|
|
73
|
+
directory_path: Path, base_url: str, engine: str
|
|
74
|
+
) -> list[StoreModelIndex]:
|
|
75
|
+
"""Generate model index representing directory as a single model.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
directory_path: Root directory containing trilogy files
|
|
79
|
+
base_url: Base URL for the server (e.g., "http://localhost:8100")
|
|
80
|
+
engine: Engine type (e.g., "duckdb", "generic")
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
List with a single StoreModelIndex for the directory model
|
|
84
|
+
"""
|
|
85
|
+
model_name = directory_path.name
|
|
86
|
+
safe_name = get_safe_model_name(model_name)
|
|
87
|
+
|
|
88
|
+
return [StoreModelIndex(name=model_name, url=f"{base_url}/models/{safe_name}.json")]
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def find_model_by_name(
|
|
92
|
+
model_name: str, directory_path: Path, base_url: str, engine: str
|
|
93
|
+
) -> ModelImport | None:
|
|
94
|
+
"""Find and construct a ModelImport representing the directory as a single model.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
model_name: The safe model name (directory name with slashes replaced by hyphens)
|
|
98
|
+
directory_path: Root directory containing trilogy files
|
|
99
|
+
base_url: Base URL for the server
|
|
100
|
+
engine: Engine type (e.g., "duckdb", "generic")
|
|
101
|
+
|
|
102
|
+
Returns:
|
|
103
|
+
ModelImport object if the model_name matches the directory, None otherwise
|
|
104
|
+
"""
|
|
105
|
+
expected_name = get_safe_model_name(directory_path.name)
|
|
106
|
+
|
|
107
|
+
if model_name != expected_name:
|
|
108
|
+
return None
|
|
109
|
+
|
|
110
|
+
# Check for trilogy.toml config
|
|
111
|
+
config_path = directory_path / TRILOGY_CONFIG_NAME
|
|
112
|
+
setup_scripts = []
|
|
113
|
+
if config_path.exists():
|
|
114
|
+
try:
|
|
115
|
+
config = load_config_file(config_path)
|
|
116
|
+
setup_scripts = config.startup_sql + config.startup_trilogy
|
|
117
|
+
except Exception:
|
|
118
|
+
pass
|
|
119
|
+
|
|
120
|
+
# Find all trilogy files (preql and sql)
|
|
121
|
+
trilogy_files = find_trilogy_files(directory_path)
|
|
122
|
+
|
|
123
|
+
# Find CSV files separately
|
|
124
|
+
csv_files = find_csv_files(directory_path)
|
|
125
|
+
|
|
126
|
+
# Generate description
|
|
127
|
+
description = _get_model_description(directory_path, trilogy_files)
|
|
128
|
+
|
|
129
|
+
# Create components for each file
|
|
130
|
+
components = []
|
|
131
|
+
|
|
132
|
+
# Add setup scripts first with purpose="setup"
|
|
133
|
+
for setup_file in setup_scripts:
|
|
134
|
+
setup_path = (
|
|
135
|
+
setup_file if setup_file.is_absolute() else directory_path / setup_file
|
|
136
|
+
)
|
|
137
|
+
if setup_path.exists():
|
|
138
|
+
file_model_name = get_relative_model_name(setup_path, directory_path)
|
|
139
|
+
safe_file_name = get_safe_model_name(file_model_name)
|
|
140
|
+
file_ext = setup_path.suffix
|
|
141
|
+
|
|
142
|
+
components.append(
|
|
143
|
+
ImportFile(
|
|
144
|
+
url=f"{base_url}/files/{safe_file_name}{file_ext}",
|
|
145
|
+
name=file_model_name,
|
|
146
|
+
alias="",
|
|
147
|
+
type="sql" if file_ext == ".sql" else "trilogy",
|
|
148
|
+
purpose="setup",
|
|
149
|
+
)
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
# Add all trilogy files (preql and sql) with purpose="source"
|
|
153
|
+
for trilogy_file in trilogy_files:
|
|
154
|
+
# Skip if already added as setup script
|
|
155
|
+
if any(
|
|
156
|
+
trilogy_file.samefile(s) if s.exists() else False for s in setup_scripts
|
|
157
|
+
):
|
|
158
|
+
continue
|
|
159
|
+
|
|
160
|
+
file_model_name = get_relative_model_name(trilogy_file, directory_path)
|
|
161
|
+
safe_file_name = get_safe_model_name(file_model_name)
|
|
162
|
+
file_ext = trilogy_file.suffix
|
|
163
|
+
|
|
164
|
+
components.append(
|
|
165
|
+
ImportFile(
|
|
166
|
+
url=f"{base_url}/files/{safe_file_name}{file_ext}",
|
|
167
|
+
name=file_model_name,
|
|
168
|
+
alias="",
|
|
169
|
+
type="sql" if file_ext == ".sql" else "trilogy",
|
|
170
|
+
purpose="source",
|
|
171
|
+
)
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
# Add CSV files with purpose="data"
|
|
175
|
+
for csv_file in csv_files:
|
|
176
|
+
file_model_name = get_relative_model_name(csv_file, directory_path)
|
|
177
|
+
safe_file_name = get_safe_model_name(file_model_name)
|
|
178
|
+
|
|
179
|
+
components.append(
|
|
180
|
+
ImportFile(
|
|
181
|
+
url=f"{base_url}/files/{safe_file_name}.csv",
|
|
182
|
+
name=file_model_name,
|
|
183
|
+
alias=file_model_name,
|
|
184
|
+
type="csv",
|
|
185
|
+
purpose="data",
|
|
186
|
+
)
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
return ModelImport(
|
|
190
|
+
name=directory_path.name,
|
|
191
|
+
description=description,
|
|
192
|
+
engine=engine,
|
|
193
|
+
components=components,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def find_file_content_by_name(file_name: str, directory_path: Path) -> str | None:
|
|
198
|
+
|
|
199
|
+
target_parts = Path(file_name.replace("-", "/")).parts
|
|
200
|
+
|
|
201
|
+
for model_file in find_all_model_files(directory_path):
|
|
202
|
+
relative_parts = model_file.relative_to(directory_path).parts
|
|
203
|
+
if relative_parts == target_parts:
|
|
204
|
+
return read_file_content(model_file)
|
|
205
|
+
|
|
206
|
+
return None
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
"""Pydantic models for the serve command."""
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, Field
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class ImportFile(BaseModel):
|
|
7
|
+
"""Component file in a model import."""
|
|
8
|
+
|
|
9
|
+
url: str
|
|
10
|
+
name: str
|
|
11
|
+
alias: str = ""
|
|
12
|
+
purpose: str
|
|
13
|
+
type: str | None = None
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class ModelImport(BaseModel):
|
|
17
|
+
"""Model import definition."""
|
|
18
|
+
|
|
19
|
+
name: str
|
|
20
|
+
engine: str
|
|
21
|
+
description: str
|
|
22
|
+
link: str = ""
|
|
23
|
+
tags: list[str] = Field(default_factory=list)
|
|
24
|
+
components: list[ImportFile]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class StoreModelIndex(BaseModel):
|
|
28
|
+
"""Individual model entry in the store index."""
|
|
29
|
+
|
|
30
|
+
name: str
|
|
31
|
+
url: str
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class StoreIndex(BaseModel):
|
|
35
|
+
"""Store index containing list of available models."""
|
|
36
|
+
|
|
37
|
+
name: str
|
|
38
|
+
models: list[StoreModelIndex]
|
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
import traceback
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Any, Union
|
|
4
|
+
|
|
5
|
+
from trilogy import Executor
|
|
6
|
+
from trilogy.core.statements.execute import PROCESSED_STATEMENT_TYPES
|
|
7
|
+
from trilogy.scripts.display import (
|
|
8
|
+
FETCH_LIMIT,
|
|
9
|
+
ResultSet,
|
|
10
|
+
create_progress_context,
|
|
11
|
+
print_error,
|
|
12
|
+
print_info,
|
|
13
|
+
print_results_table,
|
|
14
|
+
show_statement_result,
|
|
15
|
+
show_statement_type,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def get_statement_type(statement: PROCESSED_STATEMENT_TYPES) -> str:
|
|
20
|
+
"""Get the type/class name of a statement."""
|
|
21
|
+
return type(statement).__name__
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def execute_single_statement(
|
|
25
|
+
exec: Executor,
|
|
26
|
+
query: PROCESSED_STATEMENT_TYPES,
|
|
27
|
+
idx: int,
|
|
28
|
+
total_queries: int,
|
|
29
|
+
use_progress=False,
|
|
30
|
+
) -> tuple[bool, ResultSet | None, Any, Union[Exception, None]]:
|
|
31
|
+
"""Execute a single statement and handle results/errors consistently."""
|
|
32
|
+
# Log the statement type before execution
|
|
33
|
+
statement_type = get_statement_type(query)
|
|
34
|
+
if not use_progress: # Only show type when not using progress bar
|
|
35
|
+
show_statement_type(idx, total_queries, statement_type)
|
|
36
|
+
|
|
37
|
+
start_time = datetime.now()
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
raw_results = exec.execute_statement(query)
|
|
41
|
+
results = (
|
|
42
|
+
ResultSet(
|
|
43
|
+
rows=raw_results.fetchmany(FETCH_LIMIT + 1), columns=raw_results.keys()
|
|
44
|
+
)
|
|
45
|
+
if raw_results
|
|
46
|
+
else None
|
|
47
|
+
)
|
|
48
|
+
duration = datetime.now() - start_time
|
|
49
|
+
|
|
50
|
+
if not use_progress:
|
|
51
|
+
show_statement_result(idx, total_queries, duration, bool(results))
|
|
52
|
+
|
|
53
|
+
return True, results, duration, None
|
|
54
|
+
|
|
55
|
+
except Exception as e:
|
|
56
|
+
duration = datetime.now() - start_time
|
|
57
|
+
|
|
58
|
+
if not use_progress:
|
|
59
|
+
show_statement_result(idx, total_queries, duration, False, str(e), type(e))
|
|
60
|
+
|
|
61
|
+
return False, None, duration, e
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def execute_queries_with_progress(
|
|
65
|
+
exec: Executor, queries: list[PROCESSED_STATEMENT_TYPES]
|
|
66
|
+
) -> Exception | None:
|
|
67
|
+
"""Execute queries with Rich progress bar. Returns True if all succeeded, False if any failed."""
|
|
68
|
+
progress = create_progress_context()
|
|
69
|
+
results_to_print = []
|
|
70
|
+
exception = None
|
|
71
|
+
|
|
72
|
+
with progress:
|
|
73
|
+
task = progress.add_task("Executing statements...", total=len(queries))
|
|
74
|
+
|
|
75
|
+
for idx, query in enumerate(queries):
|
|
76
|
+
statement_type = get_statement_type(query)
|
|
77
|
+
progress.update(
|
|
78
|
+
task, description=f"Statement {idx+1}/{len(queries)} ({statement_type})"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
success, results, duration, error = execute_single_statement(
|
|
82
|
+
exec, query, idx, len(queries), use_progress=True
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
if not success:
|
|
86
|
+
exception = error
|
|
87
|
+
|
|
88
|
+
# Store results for printing after progress is done
|
|
89
|
+
results_to_print.append(
|
|
90
|
+
(idx, len(queries), duration, success, results, error)
|
|
91
|
+
)
|
|
92
|
+
progress.advance(task)
|
|
93
|
+
if exception:
|
|
94
|
+
break
|
|
95
|
+
|
|
96
|
+
# Print all results after progress bar is finished
|
|
97
|
+
for idx, total_queries, duration, success, results, error in results_to_print:
|
|
98
|
+
if error:
|
|
99
|
+
show_statement_result(
|
|
100
|
+
idx, total_queries, duration, False, str(error), type(error)
|
|
101
|
+
)
|
|
102
|
+
print_error(f"Full traceback:\n{traceback.format_exc()}")
|
|
103
|
+
else:
|
|
104
|
+
show_statement_result(idx, total_queries, duration, bool(results))
|
|
105
|
+
if results and not error:
|
|
106
|
+
print_results_table(results)
|
|
107
|
+
|
|
108
|
+
return exception
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def execute_queries_simple(
|
|
112
|
+
exec: Executor, queries: list[PROCESSED_STATEMENT_TYPES]
|
|
113
|
+
) -> Exception | None:
|
|
114
|
+
"""Execute queries with simple output. Returns True if all succeeded, False if any failed."""
|
|
115
|
+
exception = None
|
|
116
|
+
|
|
117
|
+
for idx, query in enumerate(queries):
|
|
118
|
+
if len(queries) > 1:
|
|
119
|
+
print_info(f"Executing statement {idx+1} of {len(queries)}...")
|
|
120
|
+
|
|
121
|
+
success, results, duration, error = execute_single_statement(
|
|
122
|
+
exec, query, idx, len(queries), use_progress=False
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
if not success:
|
|
126
|
+
exception = error
|
|
127
|
+
|
|
128
|
+
if results and not error:
|
|
129
|
+
print_results_table(results)
|
|
130
|
+
|
|
131
|
+
return exception
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
"""Testing commands (integration and unit) for Trilogy CLI."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path as PathlibPath
|
|
4
|
+
|
|
5
|
+
from click import UNPROCESSED, Path, argument, option, pass_context
|
|
6
|
+
from click.exceptions import Exit
|
|
7
|
+
|
|
8
|
+
from trilogy import Executor
|
|
9
|
+
from trilogy.dialect.enums import Dialects
|
|
10
|
+
from trilogy.scripts.common import (
|
|
11
|
+
CLIRuntimeParams,
|
|
12
|
+
ExecutionStats,
|
|
13
|
+
count_statement_stats,
|
|
14
|
+
handle_execution_exception,
|
|
15
|
+
validate_datasources,
|
|
16
|
+
)
|
|
17
|
+
from trilogy.scripts.dependency import ScriptNode
|
|
18
|
+
from trilogy.scripts.parallel_execution import ExecutionMode, run_parallel_execution
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def execute_script_for_integration(
|
|
22
|
+
exec: Executor, node: ScriptNode, quiet: bool = False
|
|
23
|
+
) -> ExecutionStats:
|
|
24
|
+
"""Execute a script for the 'integration' command (parse + validate)."""
|
|
25
|
+
with open(node.path, "r") as f:
|
|
26
|
+
queries = exec.parse_text(f.read())
|
|
27
|
+
stats = count_statement_stats(queries)
|
|
28
|
+
validate_datasources(exec, mock=False, quiet=quiet)
|
|
29
|
+
# Count datasources validated
|
|
30
|
+
stats.validate_count = len(exec.environment.datasources)
|
|
31
|
+
return stats
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def execute_script_for_unit(
|
|
35
|
+
exec: Executor, node: ScriptNode, quiet: bool = False
|
|
36
|
+
) -> ExecutionStats:
|
|
37
|
+
"""Execute a script for the 'unit' command (parse + mock validate)."""
|
|
38
|
+
with open(node.path, "r") as f:
|
|
39
|
+
queries = exec.parse_text(f.read())
|
|
40
|
+
stats = count_statement_stats(queries)
|
|
41
|
+
validate_datasources(exec, mock=True, quiet=quiet)
|
|
42
|
+
# Count datasources validated
|
|
43
|
+
stats.validate_count = len(exec.environment.datasources)
|
|
44
|
+
return stats
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@argument("input", type=Path())
|
|
48
|
+
@argument("dialect", type=str, required=False)
|
|
49
|
+
@option("--param", multiple=True, help="Environment parameters as key=value pairs")
|
|
50
|
+
@option(
|
|
51
|
+
"--parallelism",
|
|
52
|
+
"-p",
|
|
53
|
+
default=None,
|
|
54
|
+
help="Maximum parallel workers for directory execution",
|
|
55
|
+
)
|
|
56
|
+
@option(
|
|
57
|
+
"--config", type=Path(exists=True), help="Path to trilogy.toml configuration file"
|
|
58
|
+
)
|
|
59
|
+
@argument("conn_args", nargs=-1, type=UNPROCESSED)
|
|
60
|
+
@pass_context
|
|
61
|
+
def integration(
|
|
62
|
+
ctx, input, dialect: str | None, param, parallelism: int | None, config, conn_args
|
|
63
|
+
):
|
|
64
|
+
"""Run integration tests on Trilogy scripts."""
|
|
65
|
+
cli_params = CLIRuntimeParams(
|
|
66
|
+
input=input,
|
|
67
|
+
dialect=Dialects(dialect) if dialect else None,
|
|
68
|
+
parallelism=parallelism,
|
|
69
|
+
param=param,
|
|
70
|
+
conn_args=conn_args,
|
|
71
|
+
debug=ctx.obj["DEBUG"],
|
|
72
|
+
config_path=PathlibPath(config) if config else None,
|
|
73
|
+
execution_strategy="eager_bfs",
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
try:
|
|
77
|
+
run_parallel_execution(
|
|
78
|
+
cli_params=cli_params,
|
|
79
|
+
execution_fn=execute_script_for_integration,
|
|
80
|
+
execution_mode=ExecutionMode.INTEGRATION,
|
|
81
|
+
)
|
|
82
|
+
except Exit:
|
|
83
|
+
raise
|
|
84
|
+
except Exception as e:
|
|
85
|
+
handle_execution_exception(e, debug=cli_params.debug)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@argument("input", type=Path())
|
|
89
|
+
@option("--param", multiple=True, help="Environment parameters as key=value pairs")
|
|
90
|
+
@option(
|
|
91
|
+
"--parallelism",
|
|
92
|
+
"-p",
|
|
93
|
+
default=None,
|
|
94
|
+
help="Maximum parallel workers for directory execution",
|
|
95
|
+
)
|
|
96
|
+
@option(
|
|
97
|
+
"--config", type=Path(exists=True), help="Path to trilogy.toml configuration file"
|
|
98
|
+
)
|
|
99
|
+
@pass_context
|
|
100
|
+
def unit(
|
|
101
|
+
ctx,
|
|
102
|
+
input,
|
|
103
|
+
param,
|
|
104
|
+
parallelism: int | None,
|
|
105
|
+
config,
|
|
106
|
+
):
|
|
107
|
+
"""Run unit tests on Trilogy scripts with mocked datasources."""
|
|
108
|
+
# Build CLI runtime params (unit tests always use DuckDB)
|
|
109
|
+
cli_params = CLIRuntimeParams(
|
|
110
|
+
input=input,
|
|
111
|
+
dialect=Dialects.DUCK_DB,
|
|
112
|
+
parallelism=parallelism,
|
|
113
|
+
param=param,
|
|
114
|
+
conn_args=(),
|
|
115
|
+
debug=ctx.obj["DEBUG"],
|
|
116
|
+
config_path=PathlibPath(config) if config else None,
|
|
117
|
+
execution_strategy="eager_bfs",
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
try:
|
|
121
|
+
run_parallel_execution(
|
|
122
|
+
cli_params=cli_params,
|
|
123
|
+
execution_fn=execute_script_for_unit,
|
|
124
|
+
execution_mode=ExecutionMode.UNIT,
|
|
125
|
+
)
|
|
126
|
+
except Exit:
|
|
127
|
+
raise
|
|
128
|
+
except Exception as e:
|
|
129
|
+
handle_execution_exception(e, debug=cli_params.debug)
|