pytrilogy 0.3.149__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (207) hide show
  1. LICENSE.md +19 -0
  2. _preql_import_resolver/__init__.py +5 -0
  3. _preql_import_resolver/_preql_import_resolver.cp313-win_amd64.pyd +0 -0
  4. pytrilogy-0.3.149.dist-info/METADATA +555 -0
  5. pytrilogy-0.3.149.dist-info/RECORD +207 -0
  6. pytrilogy-0.3.149.dist-info/WHEEL +4 -0
  7. pytrilogy-0.3.149.dist-info/entry_points.txt +2 -0
  8. pytrilogy-0.3.149.dist-info/licenses/LICENSE.md +19 -0
  9. trilogy/__init__.py +27 -0
  10. trilogy/ai/README.md +10 -0
  11. trilogy/ai/__init__.py +19 -0
  12. trilogy/ai/constants.py +92 -0
  13. trilogy/ai/conversation.py +107 -0
  14. trilogy/ai/enums.py +7 -0
  15. trilogy/ai/execute.py +50 -0
  16. trilogy/ai/models.py +34 -0
  17. trilogy/ai/prompts.py +100 -0
  18. trilogy/ai/providers/__init__.py +0 -0
  19. trilogy/ai/providers/anthropic.py +106 -0
  20. trilogy/ai/providers/base.py +24 -0
  21. trilogy/ai/providers/google.py +146 -0
  22. trilogy/ai/providers/openai.py +89 -0
  23. trilogy/ai/providers/utils.py +68 -0
  24. trilogy/authoring/README.md +3 -0
  25. trilogy/authoring/__init__.py +148 -0
  26. trilogy/constants.py +119 -0
  27. trilogy/core/README.md +52 -0
  28. trilogy/core/__init__.py +0 -0
  29. trilogy/core/constants.py +6 -0
  30. trilogy/core/enums.py +454 -0
  31. trilogy/core/env_processor.py +239 -0
  32. trilogy/core/environment_helpers.py +320 -0
  33. trilogy/core/ergonomics.py +193 -0
  34. trilogy/core/exceptions.py +123 -0
  35. trilogy/core/functions.py +1240 -0
  36. trilogy/core/graph_models.py +142 -0
  37. trilogy/core/internal.py +85 -0
  38. trilogy/core/models/__init__.py +0 -0
  39. trilogy/core/models/author.py +2670 -0
  40. trilogy/core/models/build.py +2603 -0
  41. trilogy/core/models/build_environment.py +165 -0
  42. trilogy/core/models/core.py +506 -0
  43. trilogy/core/models/datasource.py +436 -0
  44. trilogy/core/models/environment.py +756 -0
  45. trilogy/core/models/execute.py +1213 -0
  46. trilogy/core/optimization.py +251 -0
  47. trilogy/core/optimizations/__init__.py +12 -0
  48. trilogy/core/optimizations/base_optimization.py +17 -0
  49. trilogy/core/optimizations/hide_unused_concept.py +47 -0
  50. trilogy/core/optimizations/inline_datasource.py +102 -0
  51. trilogy/core/optimizations/predicate_pushdown.py +245 -0
  52. trilogy/core/processing/README.md +94 -0
  53. trilogy/core/processing/READMEv2.md +121 -0
  54. trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
  55. trilogy/core/processing/__init__.py +0 -0
  56. trilogy/core/processing/concept_strategies_v3.py +508 -0
  57. trilogy/core/processing/constants.py +15 -0
  58. trilogy/core/processing/discovery_node_factory.py +451 -0
  59. trilogy/core/processing/discovery_utility.py +548 -0
  60. trilogy/core/processing/discovery_validation.py +167 -0
  61. trilogy/core/processing/graph_utils.py +43 -0
  62. trilogy/core/processing/node_generators/README.md +9 -0
  63. trilogy/core/processing/node_generators/__init__.py +31 -0
  64. trilogy/core/processing/node_generators/basic_node.py +160 -0
  65. trilogy/core/processing/node_generators/common.py +270 -0
  66. trilogy/core/processing/node_generators/constant_node.py +38 -0
  67. trilogy/core/processing/node_generators/filter_node.py +315 -0
  68. trilogy/core/processing/node_generators/group_node.py +213 -0
  69. trilogy/core/processing/node_generators/group_to_node.py +117 -0
  70. trilogy/core/processing/node_generators/multiselect_node.py +207 -0
  71. trilogy/core/processing/node_generators/node_merge_node.py +695 -0
  72. trilogy/core/processing/node_generators/recursive_node.py +88 -0
  73. trilogy/core/processing/node_generators/rowset_node.py +165 -0
  74. trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  75. trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
  76. trilogy/core/processing/node_generators/select_merge_node.py +846 -0
  77. trilogy/core/processing/node_generators/select_node.py +95 -0
  78. trilogy/core/processing/node_generators/synonym_node.py +98 -0
  79. trilogy/core/processing/node_generators/union_node.py +91 -0
  80. trilogy/core/processing/node_generators/unnest_node.py +182 -0
  81. trilogy/core/processing/node_generators/window_node.py +201 -0
  82. trilogy/core/processing/nodes/README.md +28 -0
  83. trilogy/core/processing/nodes/__init__.py +179 -0
  84. trilogy/core/processing/nodes/base_node.py +522 -0
  85. trilogy/core/processing/nodes/filter_node.py +75 -0
  86. trilogy/core/processing/nodes/group_node.py +194 -0
  87. trilogy/core/processing/nodes/merge_node.py +420 -0
  88. trilogy/core/processing/nodes/recursive_node.py +46 -0
  89. trilogy/core/processing/nodes/select_node_v2.py +242 -0
  90. trilogy/core/processing/nodes/union_node.py +53 -0
  91. trilogy/core/processing/nodes/unnest_node.py +62 -0
  92. trilogy/core/processing/nodes/window_node.py +56 -0
  93. trilogy/core/processing/utility.py +823 -0
  94. trilogy/core/query_processor.py +604 -0
  95. trilogy/core/statements/README.md +35 -0
  96. trilogy/core/statements/__init__.py +0 -0
  97. trilogy/core/statements/author.py +536 -0
  98. trilogy/core/statements/build.py +0 -0
  99. trilogy/core/statements/common.py +20 -0
  100. trilogy/core/statements/execute.py +155 -0
  101. trilogy/core/table_processor.py +66 -0
  102. trilogy/core/utility.py +8 -0
  103. trilogy/core/validation/README.md +46 -0
  104. trilogy/core/validation/__init__.py +0 -0
  105. trilogy/core/validation/common.py +161 -0
  106. trilogy/core/validation/concept.py +146 -0
  107. trilogy/core/validation/datasource.py +227 -0
  108. trilogy/core/validation/environment.py +73 -0
  109. trilogy/core/validation/fix.py +256 -0
  110. trilogy/dialect/__init__.py +32 -0
  111. trilogy/dialect/base.py +1432 -0
  112. trilogy/dialect/bigquery.py +314 -0
  113. trilogy/dialect/common.py +147 -0
  114. trilogy/dialect/config.py +159 -0
  115. trilogy/dialect/dataframe.py +50 -0
  116. trilogy/dialect/duckdb.py +397 -0
  117. trilogy/dialect/enums.py +151 -0
  118. trilogy/dialect/metadata.py +173 -0
  119. trilogy/dialect/mock.py +190 -0
  120. trilogy/dialect/postgres.py +117 -0
  121. trilogy/dialect/presto.py +110 -0
  122. trilogy/dialect/results.py +89 -0
  123. trilogy/dialect/snowflake.py +129 -0
  124. trilogy/dialect/sql_server.py +137 -0
  125. trilogy/engine.py +48 -0
  126. trilogy/execution/__init__.py +17 -0
  127. trilogy/execution/config.py +119 -0
  128. trilogy/execution/state/__init__.py +0 -0
  129. trilogy/execution/state/exceptions.py +26 -0
  130. trilogy/execution/state/file_state_store.py +0 -0
  131. trilogy/execution/state/sqllite_state_store.py +0 -0
  132. trilogy/execution/state/state_store.py +406 -0
  133. trilogy/executor.py +692 -0
  134. trilogy/hooks/__init__.py +4 -0
  135. trilogy/hooks/base_hook.py +40 -0
  136. trilogy/hooks/graph_hook.py +135 -0
  137. trilogy/hooks/query_debugger.py +166 -0
  138. trilogy/metadata/__init__.py +0 -0
  139. trilogy/parser.py +10 -0
  140. trilogy/parsing/README.md +21 -0
  141. trilogy/parsing/__init__.py +0 -0
  142. trilogy/parsing/common.py +1069 -0
  143. trilogy/parsing/config.py +5 -0
  144. trilogy/parsing/exceptions.py +8 -0
  145. trilogy/parsing/helpers.py +1 -0
  146. trilogy/parsing/parse_engine.py +2876 -0
  147. trilogy/parsing/render.py +775 -0
  148. trilogy/parsing/trilogy.lark +546 -0
  149. trilogy/py.typed +0 -0
  150. trilogy/render.py +45 -0
  151. trilogy/scripts/README.md +9 -0
  152. trilogy/scripts/__init__.py +0 -0
  153. trilogy/scripts/agent.py +41 -0
  154. trilogy/scripts/agent_info.py +306 -0
  155. trilogy/scripts/common.py +432 -0
  156. trilogy/scripts/dependency/Cargo.lock +617 -0
  157. trilogy/scripts/dependency/Cargo.toml +39 -0
  158. trilogy/scripts/dependency/README.md +131 -0
  159. trilogy/scripts/dependency/build.sh +25 -0
  160. trilogy/scripts/dependency/src/directory_resolver.rs +387 -0
  161. trilogy/scripts/dependency/src/lib.rs +16 -0
  162. trilogy/scripts/dependency/src/main.rs +770 -0
  163. trilogy/scripts/dependency/src/parser.rs +435 -0
  164. trilogy/scripts/dependency/src/preql.pest +208 -0
  165. trilogy/scripts/dependency/src/python_bindings.rs +311 -0
  166. trilogy/scripts/dependency/src/resolver.rs +716 -0
  167. trilogy/scripts/dependency/tests/base.preql +3 -0
  168. trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
  169. trilogy/scripts/dependency/tests/customer.preql +6 -0
  170. trilogy/scripts/dependency/tests/main.preql +9 -0
  171. trilogy/scripts/dependency/tests/orders.preql +7 -0
  172. trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
  173. trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
  174. trilogy/scripts/dependency.py +323 -0
  175. trilogy/scripts/display.py +555 -0
  176. trilogy/scripts/environment.py +59 -0
  177. trilogy/scripts/fmt.py +32 -0
  178. trilogy/scripts/ingest.py +487 -0
  179. trilogy/scripts/ingest_helpers/__init__.py +1 -0
  180. trilogy/scripts/ingest_helpers/foreign_keys.py +123 -0
  181. trilogy/scripts/ingest_helpers/formatting.py +93 -0
  182. trilogy/scripts/ingest_helpers/typing.py +161 -0
  183. trilogy/scripts/init.py +105 -0
  184. trilogy/scripts/parallel_execution.py +762 -0
  185. trilogy/scripts/plan.py +189 -0
  186. trilogy/scripts/refresh.py +161 -0
  187. trilogy/scripts/run.py +79 -0
  188. trilogy/scripts/serve.py +202 -0
  189. trilogy/scripts/serve_helpers/__init__.py +41 -0
  190. trilogy/scripts/serve_helpers/file_discovery.py +142 -0
  191. trilogy/scripts/serve_helpers/index_generation.py +206 -0
  192. trilogy/scripts/serve_helpers/models.py +38 -0
  193. trilogy/scripts/single_execution.py +131 -0
  194. trilogy/scripts/testing.py +143 -0
  195. trilogy/scripts/trilogy.py +75 -0
  196. trilogy/std/__init__.py +0 -0
  197. trilogy/std/color.preql +3 -0
  198. trilogy/std/date.preql +13 -0
  199. trilogy/std/display.preql +18 -0
  200. trilogy/std/geography.preql +22 -0
  201. trilogy/std/metric.preql +15 -0
  202. trilogy/std/money.preql +67 -0
  203. trilogy/std/net.preql +14 -0
  204. trilogy/std/ranking.preql +7 -0
  205. trilogy/std/report.preql +5 -0
  206. trilogy/std/semantic.preql +6 -0
  207. trilogy/utility.py +34 -0
@@ -0,0 +1,142 @@
1
+ """File discovery and processing utilities for the serve command."""
2
+
3
+ from pathlib import Path
4
+
5
+
6
+ def find_preql_files(directory_path: Path) -> list[Path]:
7
+ """Find all .preql files in the directory recursively.
8
+
9
+ Args:
10
+ directory_path: The root directory to search
11
+
12
+ Returns:
13
+ List of Path objects for all .preql files found
14
+ """
15
+ return list(directory_path.rglob("*.preql"))
16
+
17
+
18
+ def find_sql_files(directory_path: Path) -> list[Path]:
19
+ """Find all .sql files in the directory recursively.
20
+
21
+ Args:
22
+ directory_path: The root directory to search
23
+
24
+ Returns:
25
+ List of Path objects for all .sql files found
26
+ """
27
+ return list(directory_path.rglob("*.sql"))
28
+
29
+
30
+ def find_csv_files(directory_path: Path) -> list[Path]:
31
+ """Find all .csv files in the directory recursively.
32
+
33
+ Args:
34
+ directory_path: The root directory to search
35
+
36
+ Returns:
37
+ List of Path objects for all .csv files found
38
+ """
39
+ return list(directory_path.rglob("*.csv"))
40
+
41
+
42
+ def find_trilogy_files(directory_path: Path) -> list[Path]:
43
+ """Find all .preql and .sql files in the directory recursively.
44
+
45
+ Args:
46
+ directory_path: The root directory to search
47
+
48
+ Returns:
49
+ List of Path objects for all .preql and .sql files found, sorted by path
50
+ """
51
+ preql_files = find_preql_files(directory_path)
52
+ sql_files = find_sql_files(directory_path)
53
+ return sorted(preql_files + sql_files)
54
+
55
+
56
+ def find_all_model_files(directory_path: Path) -> list[Path]:
57
+ """Find all model files (.preql, .sql, .csv) in the directory recursively.
58
+
59
+ Args:
60
+ directory_path: The root directory to search
61
+
62
+ Returns:
63
+ List of Path objects for all model files found, sorted by path
64
+ """
65
+ preql_files = find_preql_files(directory_path)
66
+ sql_files = find_sql_files(directory_path)
67
+ csv_files = find_csv_files(directory_path)
68
+ return sorted(preql_files + sql_files + csv_files)
69
+
70
+
71
+ def get_relative_model_name(preql_file: Path, directory_path: Path) -> str:
72
+ """Get the relative model name from a model file path.
73
+
74
+ Args:
75
+ preql_file: Path to the .preql, .sql, or .csv file
76
+ directory_path: Root directory path
77
+
78
+ Returns:
79
+ Relative model name with forward slashes and no extension
80
+ """
81
+ relative_path = preql_file.relative_to(directory_path)
82
+ return (
83
+ str(relative_path)
84
+ .replace("\\", "/")
85
+ .replace(".preql", "")
86
+ .replace(".sql", "")
87
+ .replace(".csv", "")
88
+ )
89
+
90
+
91
+ def get_safe_model_name(model_name: str) -> str:
92
+ """Convert a model name to a URL-safe format.
93
+
94
+ Args:
95
+ model_name: The model name (may contain slashes)
96
+
97
+ Returns:
98
+ URL-safe model name with slashes replaced by hyphens
99
+ """
100
+ return model_name.replace("/", "-")
101
+
102
+
103
+ def extract_description_from_file(file_path: Path) -> str:
104
+ """Extract description from a preql or sql file's comments.
105
+
106
+ Looks for the first comment line (starting with # or --) in the first 5 lines
107
+ of the file and uses it as the description.
108
+
109
+ Args:
110
+ file_path: Path to the .preql or .sql file
111
+
112
+ Returns:
113
+ Description extracted from comments or a default description
114
+ """
115
+ with open(file_path, "r") as f:
116
+ content = f.read()
117
+
118
+ model_name = file_path.stem
119
+ default_description = f"Trilogy model: {model_name}"
120
+
121
+ first_lines = content.split("\n")[:5]
122
+ for line in first_lines:
123
+ stripped = line.strip()
124
+ if stripped.startswith("#"):
125
+ return stripped.lstrip("#").strip()
126
+ if stripped.startswith("--"):
127
+ return stripped.lstrip("-").strip()
128
+
129
+ return default_description
130
+
131
+
132
+ def read_file_content(file_path: Path) -> str:
133
+ """Read and return the content of a file.
134
+
135
+ Args:
136
+ file_path: Path to the file
137
+
138
+ Returns:
139
+ File content as string
140
+ """
141
+ with open(file_path, "r") as f:
142
+ return f.read()
@@ -0,0 +1,206 @@
1
+ """Index and model generation utilities for the serve command."""
2
+
3
+ from pathlib import Path
4
+
5
+ from trilogy.execution.config import load_config_file
6
+ from trilogy.scripts.common import TRILOGY_CONFIG_NAME
7
+ from trilogy.scripts.serve_helpers.file_discovery import (
8
+ extract_description_from_file,
9
+ find_all_model_files,
10
+ find_csv_files,
11
+ find_trilogy_files,
12
+ get_relative_model_name,
13
+ get_safe_model_name,
14
+ read_file_content,
15
+ )
16
+ from trilogy.scripts.serve_helpers.models import (
17
+ ImportFile,
18
+ ModelImport,
19
+ StoreModelIndex,
20
+ )
21
+
22
+
23
+ def _get_model_description(directory_path: Path, trilogy_files: list[Path]) -> str:
24
+ """Get model description from README.md, first file, or default.
25
+
26
+ Priority order:
27
+ 1. README.md file in the directory
28
+ 2. First comment from first trilogy file (alphabetically)
29
+ 3. Default description based on directory name
30
+
31
+ Args:
32
+ directory_path: Root directory of the model
33
+ trilogy_files: List of trilogy files in the directory
34
+
35
+ Returns:
36
+ Description string for the model
37
+ """
38
+ # Check for README.md first
39
+ readme_path = directory_path / "README.md"
40
+ if readme_path.exists():
41
+ try:
42
+ with open(readme_path, "r", encoding="utf-8") as f:
43
+ content = f.read().strip()
44
+ # Return first non-empty line or first paragraph
45
+ if content:
46
+ # Get first line or first paragraph (up to first blank line)
47
+ lines = content.split("\n")
48
+ first_line = ""
49
+ for line in lines:
50
+ stripped = line.strip()
51
+ # Skip markdown headers
52
+ if stripped and not stripped.startswith("#"):
53
+ first_line = stripped
54
+ break
55
+ # If it's a header, use it without the hash marks
56
+ elif stripped.startswith("#"):
57
+ first_line = stripped.lstrip("#").strip()
58
+ break
59
+ if first_line:
60
+ return first_line
61
+ except Exception:
62
+ pass
63
+
64
+ # Fall back to first file's description
65
+ if trilogy_files:
66
+ return extract_description_from_file(trilogy_files[0])
67
+
68
+ # Default description
69
+ return f"Trilogy model: {directory_path.name}"
70
+
71
+
72
+ def generate_model_index(
73
+ directory_path: Path, base_url: str, engine: str
74
+ ) -> list[StoreModelIndex]:
75
+ """Generate model index representing directory as a single model.
76
+
77
+ Args:
78
+ directory_path: Root directory containing trilogy files
79
+ base_url: Base URL for the server (e.g., "http://localhost:8100")
80
+ engine: Engine type (e.g., "duckdb", "generic")
81
+
82
+ Returns:
83
+ List with a single StoreModelIndex for the directory model
84
+ """
85
+ model_name = directory_path.name
86
+ safe_name = get_safe_model_name(model_name)
87
+
88
+ return [StoreModelIndex(name=model_name, url=f"{base_url}/models/{safe_name}.json")]
89
+
90
+
91
+ def find_model_by_name(
92
+ model_name: str, directory_path: Path, base_url: str, engine: str
93
+ ) -> ModelImport | None:
94
+ """Find and construct a ModelImport representing the directory as a single model.
95
+
96
+ Args:
97
+ model_name: The safe model name (directory name with slashes replaced by hyphens)
98
+ directory_path: Root directory containing trilogy files
99
+ base_url: Base URL for the server
100
+ engine: Engine type (e.g., "duckdb", "generic")
101
+
102
+ Returns:
103
+ ModelImport object if the model_name matches the directory, None otherwise
104
+ """
105
+ expected_name = get_safe_model_name(directory_path.name)
106
+
107
+ if model_name != expected_name:
108
+ return None
109
+
110
+ # Check for trilogy.toml config
111
+ config_path = directory_path / TRILOGY_CONFIG_NAME
112
+ setup_scripts = []
113
+ if config_path.exists():
114
+ try:
115
+ config = load_config_file(config_path)
116
+ setup_scripts = config.startup_sql + config.startup_trilogy
117
+ except Exception:
118
+ pass
119
+
120
+ # Find all trilogy files (preql and sql)
121
+ trilogy_files = find_trilogy_files(directory_path)
122
+
123
+ # Find CSV files separately
124
+ csv_files = find_csv_files(directory_path)
125
+
126
+ # Generate description
127
+ description = _get_model_description(directory_path, trilogy_files)
128
+
129
+ # Create components for each file
130
+ components = []
131
+
132
+ # Add setup scripts first with purpose="setup"
133
+ for setup_file in setup_scripts:
134
+ setup_path = (
135
+ setup_file if setup_file.is_absolute() else directory_path / setup_file
136
+ )
137
+ if setup_path.exists():
138
+ file_model_name = get_relative_model_name(setup_path, directory_path)
139
+ safe_file_name = get_safe_model_name(file_model_name)
140
+ file_ext = setup_path.suffix
141
+
142
+ components.append(
143
+ ImportFile(
144
+ url=f"{base_url}/files/{safe_file_name}{file_ext}",
145
+ name=file_model_name,
146
+ alias="",
147
+ type="sql" if file_ext == ".sql" else "trilogy",
148
+ purpose="setup",
149
+ )
150
+ )
151
+
152
+ # Add all trilogy files (preql and sql) with purpose="source"
153
+ for trilogy_file in trilogy_files:
154
+ # Skip if already added as setup script
155
+ if any(
156
+ trilogy_file.samefile(s) if s.exists() else False for s in setup_scripts
157
+ ):
158
+ continue
159
+
160
+ file_model_name = get_relative_model_name(trilogy_file, directory_path)
161
+ safe_file_name = get_safe_model_name(file_model_name)
162
+ file_ext = trilogy_file.suffix
163
+
164
+ components.append(
165
+ ImportFile(
166
+ url=f"{base_url}/files/{safe_file_name}{file_ext}",
167
+ name=file_model_name,
168
+ alias="",
169
+ type="sql" if file_ext == ".sql" else "trilogy",
170
+ purpose="source",
171
+ )
172
+ )
173
+
174
+ # Add CSV files with purpose="data"
175
+ for csv_file in csv_files:
176
+ file_model_name = get_relative_model_name(csv_file, directory_path)
177
+ safe_file_name = get_safe_model_name(file_model_name)
178
+
179
+ components.append(
180
+ ImportFile(
181
+ url=f"{base_url}/files/{safe_file_name}.csv",
182
+ name=file_model_name,
183
+ alias=file_model_name,
184
+ type="csv",
185
+ purpose="data",
186
+ )
187
+ )
188
+
189
+ return ModelImport(
190
+ name=directory_path.name,
191
+ description=description,
192
+ engine=engine,
193
+ components=components,
194
+ )
195
+
196
+
197
+ def find_file_content_by_name(file_name: str, directory_path: Path) -> str | None:
198
+
199
+ target_parts = Path(file_name.replace("-", "/")).parts
200
+
201
+ for model_file in find_all_model_files(directory_path):
202
+ relative_parts = model_file.relative_to(directory_path).parts
203
+ if relative_parts == target_parts:
204
+ return read_file_content(model_file)
205
+
206
+ return None
@@ -0,0 +1,38 @@
1
+ """Pydantic models for the serve command."""
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+
6
+ class ImportFile(BaseModel):
7
+ """Component file in a model import."""
8
+
9
+ url: str
10
+ name: str
11
+ alias: str = ""
12
+ purpose: str
13
+ type: str | None = None
14
+
15
+
16
+ class ModelImport(BaseModel):
17
+ """Model import definition."""
18
+
19
+ name: str
20
+ engine: str
21
+ description: str
22
+ link: str = ""
23
+ tags: list[str] = Field(default_factory=list)
24
+ components: list[ImportFile]
25
+
26
+
27
+ class StoreModelIndex(BaseModel):
28
+ """Individual model entry in the store index."""
29
+
30
+ name: str
31
+ url: str
32
+
33
+
34
+ class StoreIndex(BaseModel):
35
+ """Store index containing list of available models."""
36
+
37
+ name: str
38
+ models: list[StoreModelIndex]
@@ -0,0 +1,131 @@
1
+ import traceback
2
+ from datetime import datetime
3
+ from typing import Any, Union
4
+
5
+ from trilogy import Executor
6
+ from trilogy.core.statements.execute import PROCESSED_STATEMENT_TYPES
7
+ from trilogy.scripts.display import (
8
+ FETCH_LIMIT,
9
+ ResultSet,
10
+ create_progress_context,
11
+ print_error,
12
+ print_info,
13
+ print_results_table,
14
+ show_statement_result,
15
+ show_statement_type,
16
+ )
17
+
18
+
19
+ def get_statement_type(statement: PROCESSED_STATEMENT_TYPES) -> str:
20
+ """Get the type/class name of a statement."""
21
+ return type(statement).__name__
22
+
23
+
24
+ def execute_single_statement(
25
+ exec: Executor,
26
+ query: PROCESSED_STATEMENT_TYPES,
27
+ idx: int,
28
+ total_queries: int,
29
+ use_progress=False,
30
+ ) -> tuple[bool, ResultSet | None, Any, Union[Exception, None]]:
31
+ """Execute a single statement and handle results/errors consistently."""
32
+ # Log the statement type before execution
33
+ statement_type = get_statement_type(query)
34
+ if not use_progress: # Only show type when not using progress bar
35
+ show_statement_type(idx, total_queries, statement_type)
36
+
37
+ start_time = datetime.now()
38
+
39
+ try:
40
+ raw_results = exec.execute_statement(query)
41
+ results = (
42
+ ResultSet(
43
+ rows=raw_results.fetchmany(FETCH_LIMIT + 1), columns=raw_results.keys()
44
+ )
45
+ if raw_results
46
+ else None
47
+ )
48
+ duration = datetime.now() - start_time
49
+
50
+ if not use_progress:
51
+ show_statement_result(idx, total_queries, duration, bool(results))
52
+
53
+ return True, results, duration, None
54
+
55
+ except Exception as e:
56
+ duration = datetime.now() - start_time
57
+
58
+ if not use_progress:
59
+ show_statement_result(idx, total_queries, duration, False, str(e), type(e))
60
+
61
+ return False, None, duration, e
62
+
63
+
64
+ def execute_queries_with_progress(
65
+ exec: Executor, queries: list[PROCESSED_STATEMENT_TYPES]
66
+ ) -> Exception | None:
67
+ """Execute queries with Rich progress bar. Returns True if all succeeded, False if any failed."""
68
+ progress = create_progress_context()
69
+ results_to_print = []
70
+ exception = None
71
+
72
+ with progress:
73
+ task = progress.add_task("Executing statements...", total=len(queries))
74
+
75
+ for idx, query in enumerate(queries):
76
+ statement_type = get_statement_type(query)
77
+ progress.update(
78
+ task, description=f"Statement {idx+1}/{len(queries)} ({statement_type})"
79
+ )
80
+
81
+ success, results, duration, error = execute_single_statement(
82
+ exec, query, idx, len(queries), use_progress=True
83
+ )
84
+
85
+ if not success:
86
+ exception = error
87
+
88
+ # Store results for printing after progress is done
89
+ results_to_print.append(
90
+ (idx, len(queries), duration, success, results, error)
91
+ )
92
+ progress.advance(task)
93
+ if exception:
94
+ break
95
+
96
+ # Print all results after progress bar is finished
97
+ for idx, total_queries, duration, success, results, error in results_to_print:
98
+ if error:
99
+ show_statement_result(
100
+ idx, total_queries, duration, False, str(error), type(error)
101
+ )
102
+ print_error(f"Full traceback:\n{traceback.format_exc()}")
103
+ else:
104
+ show_statement_result(idx, total_queries, duration, bool(results))
105
+ if results and not error:
106
+ print_results_table(results)
107
+
108
+ return exception
109
+
110
+
111
+ def execute_queries_simple(
112
+ exec: Executor, queries: list[PROCESSED_STATEMENT_TYPES]
113
+ ) -> Exception | None:
114
+ """Execute queries with simple output. Returns True if all succeeded, False if any failed."""
115
+ exception = None
116
+
117
+ for idx, query in enumerate(queries):
118
+ if len(queries) > 1:
119
+ print_info(f"Executing statement {idx+1} of {len(queries)}...")
120
+
121
+ success, results, duration, error = execute_single_statement(
122
+ exec, query, idx, len(queries), use_progress=False
123
+ )
124
+
125
+ if not success:
126
+ exception = error
127
+
128
+ if results and not error:
129
+ print_results_table(results)
130
+
131
+ return exception
@@ -0,0 +1,143 @@
1
+ """Testing commands (integration and unit) for Trilogy CLI."""
2
+
3
+ from pathlib import Path as PathlibPath
4
+
5
+ from click import UNPROCESSED, Path, argument, option, pass_context
6
+ from click.exceptions import Exit
7
+
8
+ from trilogy import Executor
9
+ from trilogy.dialect.enums import Dialects
10
+ from trilogy.scripts.common import (
11
+ CLIRuntimeParams,
12
+ ExecutionStats,
13
+ count_statement_stats,
14
+ handle_execution_exception,
15
+ validate_datasources,
16
+ )
17
+ from trilogy.scripts.dependency import ScriptNode
18
+ from trilogy.scripts.parallel_execution import ExecutionMode, run_parallel_execution
19
+
20
+
21
+ def execute_script_for_integration(
22
+ exec: Executor, node: ScriptNode, quiet: bool = False
23
+ ) -> ExecutionStats:
24
+ """Execute a script for the 'integration' command (parse + validate)."""
25
+ with open(node.path, "r") as f:
26
+ queries = exec.parse_text(f.read())
27
+ stats = count_statement_stats(queries)
28
+ validate_datasources(exec, mock=False, quiet=quiet)
29
+ # Count datasources validated
30
+ stats.validate_count = len(exec.environment.datasources)
31
+ return stats
32
+
33
+
34
+ def execute_script_for_unit(
35
+ exec: Executor, node: ScriptNode, quiet: bool = False
36
+ ) -> ExecutionStats:
37
+ """Execute a script for the 'unit' command (parse + mock validate)."""
38
+ with open(node.path, "r") as f:
39
+ queries = exec.parse_text(f.read())
40
+ stats = count_statement_stats(queries)
41
+ validate_datasources(exec, mock=True, quiet=quiet)
42
+ # Count datasources validated
43
+ stats.validate_count = len(exec.environment.datasources)
44
+ return stats
45
+
46
+
47
+ @argument("input", type=Path())
48
+ @argument("dialect", type=str, required=False)
49
+ @option("--param", multiple=True, help="Environment parameters as key=value pairs")
50
+ @option(
51
+ "--parallelism",
52
+ "-p",
53
+ default=None,
54
+ help="Maximum parallel workers for directory execution",
55
+ )
56
+ @option(
57
+ "--config", type=Path(exists=True), help="Path to trilogy.toml configuration file"
58
+ )
59
+ @option(
60
+ "--env",
61
+ "-e",
62
+ multiple=True,
63
+ help="Set environment variables as KEY=VALUE pairs",
64
+ )
65
+ @argument("conn_args", nargs=-1, type=UNPROCESSED)
66
+ @pass_context
67
+ def integration(
68
+ ctx,
69
+ input,
70
+ dialect: str | None,
71
+ param,
72
+ parallelism: int | None,
73
+ config,
74
+ env,
75
+ conn_args,
76
+ ):
77
+ """Run integration tests on Trilogy scripts."""
78
+ cli_params = CLIRuntimeParams(
79
+ input=input,
80
+ dialect=Dialects(dialect) if dialect else None,
81
+ parallelism=parallelism,
82
+ param=param,
83
+ conn_args=conn_args,
84
+ debug=ctx.obj["DEBUG"],
85
+ config_path=PathlibPath(config) if config else None,
86
+ execution_strategy="eager_bfs",
87
+ env=env,
88
+ )
89
+
90
+ try:
91
+ run_parallel_execution(
92
+ cli_params=cli_params,
93
+ execution_fn=execute_script_for_integration,
94
+ execution_mode=ExecutionMode.INTEGRATION,
95
+ )
96
+ except Exit:
97
+ raise
98
+ except Exception as e:
99
+ handle_execution_exception(e, debug=cli_params.debug)
100
+
101
+
102
+ @argument("input", type=Path())
103
+ @option("--param", multiple=True, help="Environment parameters as key=value pairs")
104
+ @option(
105
+ "--parallelism",
106
+ "-p",
107
+ default=None,
108
+ help="Maximum parallel workers for directory execution",
109
+ )
110
+ @option(
111
+ "--config", type=Path(exists=True), help="Path to trilogy.toml configuration file"
112
+ )
113
+ @pass_context
114
+ def unit(
115
+ ctx,
116
+ input,
117
+ param,
118
+ parallelism: int | None,
119
+ config,
120
+ ):
121
+ """Run unit tests on Trilogy scripts with mocked datasources."""
122
+ # Build CLI runtime params (unit tests always use DuckDB)
123
+ cli_params = CLIRuntimeParams(
124
+ input=input,
125
+ dialect=Dialects.DUCK_DB,
126
+ parallelism=parallelism,
127
+ param=param,
128
+ conn_args=(),
129
+ debug=ctx.obj["DEBUG"],
130
+ config_path=PathlibPath(config) if config else None,
131
+ execution_strategy="eager_bfs",
132
+ )
133
+
134
+ try:
135
+ run_parallel_execution(
136
+ cli_params=cli_params,
137
+ execution_fn=execute_script_for_unit,
138
+ execution_mode=ExecutionMode.UNIT,
139
+ )
140
+ except Exit:
141
+ raise
142
+ except Exception as e:
143
+ handle_execution_exception(e, debug=cli_params.debug)