pytrilogy 0.3.142__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- LICENSE.md +19 -0
- _preql_import_resolver/__init__.py +5 -0
- _preql_import_resolver/_preql_import_resolver.cp312-win_amd64.pyd +0 -0
- pytrilogy-0.3.142.dist-info/METADATA +555 -0
- pytrilogy-0.3.142.dist-info/RECORD +200 -0
- pytrilogy-0.3.142.dist-info/WHEEL +4 -0
- pytrilogy-0.3.142.dist-info/entry_points.txt +2 -0
- pytrilogy-0.3.142.dist-info/licenses/LICENSE.md +19 -0
- trilogy/__init__.py +16 -0
- trilogy/ai/README.md +10 -0
- trilogy/ai/__init__.py +19 -0
- trilogy/ai/constants.py +92 -0
- trilogy/ai/conversation.py +107 -0
- trilogy/ai/enums.py +7 -0
- trilogy/ai/execute.py +50 -0
- trilogy/ai/models.py +34 -0
- trilogy/ai/prompts.py +100 -0
- trilogy/ai/providers/__init__.py +0 -0
- trilogy/ai/providers/anthropic.py +106 -0
- trilogy/ai/providers/base.py +24 -0
- trilogy/ai/providers/google.py +146 -0
- trilogy/ai/providers/openai.py +89 -0
- trilogy/ai/providers/utils.py +68 -0
- trilogy/authoring/README.md +3 -0
- trilogy/authoring/__init__.py +148 -0
- trilogy/constants.py +113 -0
- trilogy/core/README.md +52 -0
- trilogy/core/__init__.py +0 -0
- trilogy/core/constants.py +6 -0
- trilogy/core/enums.py +443 -0
- trilogy/core/env_processor.py +120 -0
- trilogy/core/environment_helpers.py +320 -0
- trilogy/core/ergonomics.py +193 -0
- trilogy/core/exceptions.py +123 -0
- trilogy/core/functions.py +1227 -0
- trilogy/core/graph_models.py +139 -0
- trilogy/core/internal.py +85 -0
- trilogy/core/models/__init__.py +0 -0
- trilogy/core/models/author.py +2669 -0
- trilogy/core/models/build.py +2521 -0
- trilogy/core/models/build_environment.py +180 -0
- trilogy/core/models/core.py +501 -0
- trilogy/core/models/datasource.py +322 -0
- trilogy/core/models/environment.py +751 -0
- trilogy/core/models/execute.py +1177 -0
- trilogy/core/optimization.py +251 -0
- trilogy/core/optimizations/__init__.py +12 -0
- trilogy/core/optimizations/base_optimization.py +17 -0
- trilogy/core/optimizations/hide_unused_concept.py +47 -0
- trilogy/core/optimizations/inline_datasource.py +102 -0
- trilogy/core/optimizations/predicate_pushdown.py +245 -0
- trilogy/core/processing/README.md +94 -0
- trilogy/core/processing/READMEv2.md +121 -0
- trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
- trilogy/core/processing/__init__.py +0 -0
- trilogy/core/processing/concept_strategies_v3.py +508 -0
- trilogy/core/processing/constants.py +15 -0
- trilogy/core/processing/discovery_node_factory.py +451 -0
- trilogy/core/processing/discovery_utility.py +548 -0
- trilogy/core/processing/discovery_validation.py +167 -0
- trilogy/core/processing/graph_utils.py +43 -0
- trilogy/core/processing/node_generators/README.md +9 -0
- trilogy/core/processing/node_generators/__init__.py +31 -0
- trilogy/core/processing/node_generators/basic_node.py +160 -0
- trilogy/core/processing/node_generators/common.py +268 -0
- trilogy/core/processing/node_generators/constant_node.py +38 -0
- trilogy/core/processing/node_generators/filter_node.py +315 -0
- trilogy/core/processing/node_generators/group_node.py +213 -0
- trilogy/core/processing/node_generators/group_to_node.py +117 -0
- trilogy/core/processing/node_generators/multiselect_node.py +205 -0
- trilogy/core/processing/node_generators/node_merge_node.py +653 -0
- trilogy/core/processing/node_generators/recursive_node.py +88 -0
- trilogy/core/processing/node_generators/rowset_node.py +165 -0
- trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
- trilogy/core/processing/node_generators/select_merge_node.py +748 -0
- trilogy/core/processing/node_generators/select_node.py +95 -0
- trilogy/core/processing/node_generators/synonym_node.py +98 -0
- trilogy/core/processing/node_generators/union_node.py +91 -0
- trilogy/core/processing/node_generators/unnest_node.py +182 -0
- trilogy/core/processing/node_generators/window_node.py +201 -0
- trilogy/core/processing/nodes/README.md +28 -0
- trilogy/core/processing/nodes/__init__.py +179 -0
- trilogy/core/processing/nodes/base_node.py +519 -0
- trilogy/core/processing/nodes/filter_node.py +75 -0
- trilogy/core/processing/nodes/group_node.py +194 -0
- trilogy/core/processing/nodes/merge_node.py +420 -0
- trilogy/core/processing/nodes/recursive_node.py +46 -0
- trilogy/core/processing/nodes/select_node_v2.py +242 -0
- trilogy/core/processing/nodes/union_node.py +53 -0
- trilogy/core/processing/nodes/unnest_node.py +62 -0
- trilogy/core/processing/nodes/window_node.py +56 -0
- trilogy/core/processing/utility.py +823 -0
- trilogy/core/query_processor.py +596 -0
- trilogy/core/statements/README.md +35 -0
- trilogy/core/statements/__init__.py +0 -0
- trilogy/core/statements/author.py +536 -0
- trilogy/core/statements/build.py +0 -0
- trilogy/core/statements/common.py +20 -0
- trilogy/core/statements/execute.py +155 -0
- trilogy/core/table_processor.py +66 -0
- trilogy/core/utility.py +8 -0
- trilogy/core/validation/README.md +46 -0
- trilogy/core/validation/__init__.py +0 -0
- trilogy/core/validation/common.py +161 -0
- trilogy/core/validation/concept.py +146 -0
- trilogy/core/validation/datasource.py +227 -0
- trilogy/core/validation/environment.py +73 -0
- trilogy/core/validation/fix.py +256 -0
- trilogy/dialect/__init__.py +32 -0
- trilogy/dialect/base.py +1392 -0
- trilogy/dialect/bigquery.py +308 -0
- trilogy/dialect/common.py +147 -0
- trilogy/dialect/config.py +144 -0
- trilogy/dialect/dataframe.py +50 -0
- trilogy/dialect/duckdb.py +231 -0
- trilogy/dialect/enums.py +147 -0
- trilogy/dialect/metadata.py +173 -0
- trilogy/dialect/mock.py +190 -0
- trilogy/dialect/postgres.py +117 -0
- trilogy/dialect/presto.py +110 -0
- trilogy/dialect/results.py +89 -0
- trilogy/dialect/snowflake.py +129 -0
- trilogy/dialect/sql_server.py +137 -0
- trilogy/engine.py +48 -0
- trilogy/execution/config.py +75 -0
- trilogy/executor.py +568 -0
- trilogy/hooks/__init__.py +4 -0
- trilogy/hooks/base_hook.py +40 -0
- trilogy/hooks/graph_hook.py +139 -0
- trilogy/hooks/query_debugger.py +166 -0
- trilogy/metadata/__init__.py +0 -0
- trilogy/parser.py +10 -0
- trilogy/parsing/README.md +21 -0
- trilogy/parsing/__init__.py +0 -0
- trilogy/parsing/common.py +1069 -0
- trilogy/parsing/config.py +5 -0
- trilogy/parsing/exceptions.py +8 -0
- trilogy/parsing/helpers.py +1 -0
- trilogy/parsing/parse_engine.py +2813 -0
- trilogy/parsing/render.py +769 -0
- trilogy/parsing/trilogy.lark +540 -0
- trilogy/py.typed +0 -0
- trilogy/render.py +42 -0
- trilogy/scripts/README.md +9 -0
- trilogy/scripts/__init__.py +0 -0
- trilogy/scripts/agent.py +41 -0
- trilogy/scripts/agent_info.py +303 -0
- trilogy/scripts/common.py +355 -0
- trilogy/scripts/dependency/Cargo.lock +617 -0
- trilogy/scripts/dependency/Cargo.toml +39 -0
- trilogy/scripts/dependency/README.md +131 -0
- trilogy/scripts/dependency/build.sh +25 -0
- trilogy/scripts/dependency/src/directory_resolver.rs +177 -0
- trilogy/scripts/dependency/src/lib.rs +16 -0
- trilogy/scripts/dependency/src/main.rs +770 -0
- trilogy/scripts/dependency/src/parser.rs +435 -0
- trilogy/scripts/dependency/src/preql.pest +208 -0
- trilogy/scripts/dependency/src/python_bindings.rs +303 -0
- trilogy/scripts/dependency/src/resolver.rs +716 -0
- trilogy/scripts/dependency/tests/base.preql +3 -0
- trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
- trilogy/scripts/dependency/tests/customer.preql +6 -0
- trilogy/scripts/dependency/tests/main.preql +9 -0
- trilogy/scripts/dependency/tests/orders.preql +7 -0
- trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
- trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
- trilogy/scripts/dependency.py +323 -0
- trilogy/scripts/display.py +512 -0
- trilogy/scripts/environment.py +46 -0
- trilogy/scripts/fmt.py +32 -0
- trilogy/scripts/ingest.py +471 -0
- trilogy/scripts/ingest_helpers/__init__.py +1 -0
- trilogy/scripts/ingest_helpers/foreign_keys.py +123 -0
- trilogy/scripts/ingest_helpers/formatting.py +93 -0
- trilogy/scripts/ingest_helpers/typing.py +161 -0
- trilogy/scripts/init.py +105 -0
- trilogy/scripts/parallel_execution.py +713 -0
- trilogy/scripts/plan.py +189 -0
- trilogy/scripts/run.py +63 -0
- trilogy/scripts/serve.py +140 -0
- trilogy/scripts/serve_helpers/__init__.py +41 -0
- trilogy/scripts/serve_helpers/file_discovery.py +142 -0
- trilogy/scripts/serve_helpers/index_generation.py +206 -0
- trilogy/scripts/serve_helpers/models.py +38 -0
- trilogy/scripts/single_execution.py +131 -0
- trilogy/scripts/testing.py +119 -0
- trilogy/scripts/trilogy.py +68 -0
- trilogy/std/__init__.py +0 -0
- trilogy/std/color.preql +3 -0
- trilogy/std/date.preql +13 -0
- trilogy/std/display.preql +18 -0
- trilogy/std/geography.preql +22 -0
- trilogy/std/metric.preql +15 -0
- trilogy/std/money.preql +67 -0
- trilogy/std/net.preql +14 -0
- trilogy/std/ranking.preql +7 -0
- trilogy/std/report.preql +5 -0
- trilogy/std/semantic.preql +6 -0
- trilogy/utility.py +34 -0
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
"""Agent info command - outputs AGENTS.md-style usage guide for AI agents."""
|
|
2
|
+
|
|
3
|
+
from click import pass_context
|
|
4
|
+
|
|
5
|
+
from trilogy.ai.prompts import get_trilogy_prompt
|
|
6
|
+
|
|
7
|
+
AGENT_INFO_OUTPUT = """# Trilogy CLI - AI Agent Usage Guide
|
|
8
|
+
|
|
9
|
+
## Overview
|
|
10
|
+
|
|
11
|
+
Trilogy is a semantic ETL and reporting tool providing a SQL-like language with
|
|
12
|
+
optimizations. This CLI enables workspace management, script execution, testing,
|
|
13
|
+
and data ingestion.
|
|
14
|
+
|
|
15
|
+
## Quick Start
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
# Initialize a new workspace
|
|
19
|
+
trilogy init [path]
|
|
20
|
+
|
|
21
|
+
# Run a script
|
|
22
|
+
trilogy run script.preql dialect [connection_args...]
|
|
23
|
+
|
|
24
|
+
# Run unit tests (mocked datasources)
|
|
25
|
+
trilogy unit script.preql
|
|
26
|
+
|
|
27
|
+
# Run integration tests (real connections)
|
|
28
|
+
trilogy integration script.preql dialect [connection_args...]
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
## Commands Reference
|
|
32
|
+
|
|
33
|
+
### trilogy init [path]
|
|
34
|
+
|
|
35
|
+
Create a new Trilogy workspace with default configuration and structure.
|
|
36
|
+
|
|
37
|
+
**Arguments:**
|
|
38
|
+
- `path` (optional): Directory to initialize (default: current directory)
|
|
39
|
+
|
|
40
|
+
**Creates:**
|
|
41
|
+
- `trilogy.toml` - Configuration file
|
|
42
|
+
- `raw/` - Directory for raw data models
|
|
43
|
+
- `derived/` - Directory for derived data models
|
|
44
|
+
- `jobs/` - Directory for job scripts
|
|
45
|
+
- `hello_world.preql` - Example script
|
|
46
|
+
|
|
47
|
+
**Example:**
|
|
48
|
+
```bash
|
|
49
|
+
trilogy init my_project
|
|
50
|
+
cd my_project
|
|
51
|
+
trilogy unit hello_world.preql
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
---
|
|
55
|
+
|
|
56
|
+
### trilogy run <input> [dialect] [options] [conn_args...]
|
|
57
|
+
|
|
58
|
+
Execute a Trilogy script or all scripts in a directory.
|
|
59
|
+
|
|
60
|
+
**Arguments:**
|
|
61
|
+
- `input` (required): Path to .preql file or directory
|
|
62
|
+
- `dialect` (optional): Database dialect (duckdb, postgres, snowflake, bigquery, etc.)
|
|
63
|
+
- `conn_args` (optional): Connection arguments passed to the database driver
|
|
64
|
+
|
|
65
|
+
**Options:**
|
|
66
|
+
- `--param KEY=VALUE`: Environment parameters (can be repeated)
|
|
67
|
+
- `--parallelism N`, `-p N`: Max parallel workers for directory execution
|
|
68
|
+
- `--config PATH`: Path to trilogy.toml configuration file
|
|
69
|
+
|
|
70
|
+
**Examples:**
|
|
71
|
+
```bash
|
|
72
|
+
# Run single script with DuckDB
|
|
73
|
+
trilogy run query.preql duckdb
|
|
74
|
+
|
|
75
|
+
# Run with connection string
|
|
76
|
+
trilogy run etl.preql postgres "postgresql://user:pass@host/db"
|
|
77
|
+
|
|
78
|
+
# Run directory with parallelism
|
|
79
|
+
trilogy run jobs/ duckdb -p 4
|
|
80
|
+
|
|
81
|
+
# Run with parameters
|
|
82
|
+
trilogy run report.preql duckdb --param date=2024-01-01 --param region=US
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
---
|
|
86
|
+
|
|
87
|
+
### trilogy unit <input> [options]
|
|
88
|
+
|
|
89
|
+
Run unit tests on Trilogy scripts with mocked datasources. Always uses DuckDB.
|
|
90
|
+
|
|
91
|
+
**Arguments:**
|
|
92
|
+
- `input` (required): Path to .preql file or directory
|
|
93
|
+
|
|
94
|
+
**Options:**
|
|
95
|
+
- `--param KEY=VALUE`: Environment parameters
|
|
96
|
+
- `--parallelism N`, `-p N`: Max parallel workers
|
|
97
|
+
- `--config PATH`: Path to trilogy.toml
|
|
98
|
+
|
|
99
|
+
**Examples:**
|
|
100
|
+
```bash
|
|
101
|
+
# Test single file
|
|
102
|
+
trilogy unit test_query.preql
|
|
103
|
+
|
|
104
|
+
# Test entire directory
|
|
105
|
+
trilogy unit tests/ -p 4
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
---
|
|
109
|
+
|
|
110
|
+
### trilogy integration <input> [dialect] [options] [conn_args...]
|
|
111
|
+
|
|
112
|
+
Run integration tests on Trilogy scripts with real database connections.
|
|
113
|
+
|
|
114
|
+
**Arguments:**
|
|
115
|
+
- `input` (required): Path to .preql file or directory
|
|
116
|
+
- `dialect` (optional): Database dialect
|
|
117
|
+
- `conn_args` (optional): Connection arguments
|
|
118
|
+
|
|
119
|
+
**Options:**
|
|
120
|
+
- `--param KEY=VALUE`: Environment parameters
|
|
121
|
+
- `--parallelism N`, `-p N`: Max parallel workers
|
|
122
|
+
- `--config PATH`: Path to trilogy.toml
|
|
123
|
+
|
|
124
|
+
**Examples:**
|
|
125
|
+
```bash
|
|
126
|
+
# Integration test against Postgres
|
|
127
|
+
trilogy integration tests/ postgres "postgresql://localhost/testdb"
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
---
|
|
131
|
+
|
|
132
|
+
### trilogy fmt <input>
|
|
133
|
+
|
|
134
|
+
Format a Trilogy script file.
|
|
135
|
+
|
|
136
|
+
**Arguments:**
|
|
137
|
+
- `input` (required): Path to .preql file to format
|
|
138
|
+
|
|
139
|
+
**Example:**
|
|
140
|
+
```bash
|
|
141
|
+
trilogy fmt messy_script.preql
|
|
142
|
+
```
|
|
143
|
+
|
|
144
|
+
---
|
|
145
|
+
|
|
146
|
+
### trilogy ingest <tables> [dialect] [options] [conn_args...]
|
|
147
|
+
|
|
148
|
+
Bootstrap datasources from existing warehouse tables. Connects to a database,
|
|
149
|
+
introspects table schemas, and generates Trilogy datasource definitions.
|
|
150
|
+
|
|
151
|
+
**Arguments:**
|
|
152
|
+
- `tables` (required): Comma-separated list of table names
|
|
153
|
+
- `dialect` (optional): Database dialect
|
|
154
|
+
- `conn_args` (optional): Connection arguments
|
|
155
|
+
|
|
156
|
+
**Options:**
|
|
157
|
+
- `--output PATH`, `-o PATH`: Output directory for generated files
|
|
158
|
+
- `--schema NAME`, `-s NAME`: Schema/database to ingest from
|
|
159
|
+
- `--config PATH`: Path to trilogy.toml
|
|
160
|
+
- `--fks SPEC`: Foreign key relationships (format: table.col:ref_table.col)
|
|
161
|
+
|
|
162
|
+
**Examples:**
|
|
163
|
+
```bash
|
|
164
|
+
# Ingest tables from DuckDB
|
|
165
|
+
trilogy ingest "users,orders,products" duckdb "path/to/db.duckdb"
|
|
166
|
+
|
|
167
|
+
# Ingest with schema and output directory
|
|
168
|
+
trilogy ingest "customers" postgres -s public -o raw/ "postgresql://localhost/db"
|
|
169
|
+
|
|
170
|
+
# Ingest with foreign key relationships
|
|
171
|
+
trilogy ingest "orders,customers" duckdb --fks "orders.customer_id:customers.id"
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
---
|
|
175
|
+
|
|
176
|
+
### trilogy serve <directory> [engine] [options]
|
|
177
|
+
|
|
178
|
+
Start a FastAPI server to expose Trilogy models from a directory.
|
|
179
|
+
Requires `pytrilogy[serve]` extras.
|
|
180
|
+
|
|
181
|
+
**Arguments:**
|
|
182
|
+
- `directory` (required): Directory containing model files
|
|
183
|
+
- `engine` (optional): Engine type (default: generic)
|
|
184
|
+
|
|
185
|
+
**Options:**
|
|
186
|
+
- `--port N`, `-p N`: Port number (default: 8100)
|
|
187
|
+
- `--host HOST`, `-h HOST`: Host to bind (default: 0.0.0.0)
|
|
188
|
+
- `--timeout N`, `-t N`: Shutdown after N seconds
|
|
189
|
+
|
|
190
|
+
**Endpoints exposed:**
|
|
191
|
+
- `/` - Server info
|
|
192
|
+
- `/index.json` - List of available models
|
|
193
|
+
- `/models/<name>.json` - Specific model details
|
|
194
|
+
- `/files/<name>` - Raw .preql/.sql file content
|
|
195
|
+
|
|
196
|
+
**Example:**
|
|
197
|
+
```bash
|
|
198
|
+
trilogy serve ./models/ duckdb --port 8080
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
---
|
|
202
|
+
|
|
203
|
+
### trilogy agent <command> [options]
|
|
204
|
+
|
|
205
|
+
Pass off a multi-step orchestration task to an AI agent. (Not yet implemented)
|
|
206
|
+
|
|
207
|
+
**Arguments:**
|
|
208
|
+
- `command` (required): Natural language command
|
|
209
|
+
|
|
210
|
+
**Options:**
|
|
211
|
+
- `--context PATH`, `-c PATH`: Additional context files
|
|
212
|
+
- `--model NAME`, `-m NAME`: AI model to use
|
|
213
|
+
- `--interactive`, `-i`: Interactive mode with feedback
|
|
214
|
+
|
|
215
|
+
---
|
|
216
|
+
|
|
217
|
+
## Configuration File (trilogy.toml)
|
|
218
|
+
|
|
219
|
+
```toml
|
|
220
|
+
[engine]
|
|
221
|
+
# Default dialect for execution
|
|
222
|
+
dialect = "duckdb"
|
|
223
|
+
|
|
224
|
+
# Max parallelism for multi-script execution
|
|
225
|
+
parallelism = 3
|
|
226
|
+
|
|
227
|
+
[setup]
|
|
228
|
+
# Startup scripts to run before execution
|
|
229
|
+
trilogy = ["setup.preql"]
|
|
230
|
+
sql = ["init.sql"]
|
|
231
|
+
```
|
|
232
|
+
|
|
233
|
+
## Supported Dialects
|
|
234
|
+
|
|
235
|
+
- `duckdb` / `duck_db` - DuckDB (default for unit tests)
|
|
236
|
+
- `postgres` / `postgresql` - PostgreSQL
|
|
237
|
+
- `bigquery` - Google BigQuery
|
|
238
|
+
- `snowflake` - Snowflake
|
|
239
|
+
- `redshift` - Amazon Redshift
|
|
240
|
+
- `trino` - Trino/Presto
|
|
241
|
+
- `sql_server` - Microsoft SQL Server
|
|
242
|
+
|
|
243
|
+
## File Types
|
|
244
|
+
|
|
245
|
+
- `.preql` - Trilogy script files (main language)
|
|
246
|
+
- `.sql` - Raw SQL files (for setup scripts)
|
|
247
|
+
- `trilogy.toml` - Configuration file
|
|
248
|
+
|
|
249
|
+
## Common Workflows
|
|
250
|
+
|
|
251
|
+
### 1. Setting up a new project
|
|
252
|
+
```bash
|
|
253
|
+
trilogy init my_analytics
|
|
254
|
+
cd my_analytics
|
|
255
|
+
# Configure trilogy.toml with your dialect and connection
|
|
256
|
+
trilogy unit hello_world.preql
|
|
257
|
+
```
|
|
258
|
+
|
|
259
|
+
### 2. Ingesting existing tables
|
|
260
|
+
```bash
|
|
261
|
+
trilogy ingest "fact_sales,dim_customers,dim_products" postgres \\
|
|
262
|
+
-s analytics -o raw/ "postgresql://localhost/warehouse"
|
|
263
|
+
```
|
|
264
|
+
|
|
265
|
+
### 3. Running ETL jobs
|
|
266
|
+
```bash
|
|
267
|
+
trilogy run jobs/ postgres -p 4 "postgresql://localhost/warehouse"
|
|
268
|
+
```
|
|
269
|
+
|
|
270
|
+
### 4. Testing before deployment
|
|
271
|
+
```bash
|
|
272
|
+
# Unit tests (fast, no connection needed)
|
|
273
|
+
trilogy unit .
|
|
274
|
+
|
|
275
|
+
# Integration tests (real connection)
|
|
276
|
+
trilogy integration . postgres "postgresql://localhost/testdb"
|
|
277
|
+
```
|
|
278
|
+
|
|
279
|
+
## Debug Mode
|
|
280
|
+
|
|
281
|
+
Add `--debug` flag to any command for verbose output:
|
|
282
|
+
```bash
|
|
283
|
+
trilogy --debug run query.preql duckdb
|
|
284
|
+
```
|
|
285
|
+
"""
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def get_agent_info_output() -> str:
|
|
289
|
+
"""Build the complete agent info output with CLI docs and syntax reference."""
|
|
290
|
+
syntax_section = get_trilogy_prompt(
|
|
291
|
+
intro="## Trilogy Language Syntax\n\nTrilogy is a SQL-inspired language with a built-in semantic layer. Use the following syntax reference when writing .preql files.",
|
|
292
|
+
)
|
|
293
|
+
return AGENT_INFO_OUTPUT + "\n" + syntax_section
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
@pass_context
|
|
297
|
+
def agent_info(ctx):
|
|
298
|
+
"""Output comprehensive CLI documentation for AI agents.
|
|
299
|
+
|
|
300
|
+
Prints an AGENTS.md-style guide with all commands, options,
|
|
301
|
+
and usage examples optimized for AI agent consumption.
|
|
302
|
+
"""
|
|
303
|
+
print(get_agent_info_output())
|
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
"""Common helper functions used across all CLI commands."""
|
|
2
|
+
|
|
3
|
+
import traceback
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from io import StringIO
|
|
6
|
+
from pathlib import Path as PathlibPath
|
|
7
|
+
from typing import Any, Iterable, Union
|
|
8
|
+
|
|
9
|
+
from click.exceptions import Exit
|
|
10
|
+
|
|
11
|
+
from trilogy import Executor
|
|
12
|
+
from trilogy.constants import DEFAULT_NAMESPACE
|
|
13
|
+
from trilogy.core.exceptions import ConfigurationException, ModelValidationError
|
|
14
|
+
from trilogy.core.models.environment import Environment
|
|
15
|
+
from trilogy.dialect.enums import Dialects
|
|
16
|
+
from trilogy.execution.config import RuntimeConfig, load_config_file
|
|
17
|
+
from trilogy.hooks.query_debugger import DebuggingHook
|
|
18
|
+
from trilogy.scripts.dependency import ScriptNode
|
|
19
|
+
from trilogy.scripts.display import (
|
|
20
|
+
print_error,
|
|
21
|
+
print_info,
|
|
22
|
+
print_success,
|
|
23
|
+
)
|
|
24
|
+
from trilogy.scripts.environment import extra_to_kwargs, parse_env_params
|
|
25
|
+
|
|
26
|
+
# Configuration file name
|
|
27
|
+
TRILOGY_CONFIG_NAME = "trilogy.toml"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class CLIRuntimeParams:
|
|
32
|
+
"""Parameters provided via CLI for execution."""
|
|
33
|
+
|
|
34
|
+
input: str
|
|
35
|
+
dialect: Dialects | None = None
|
|
36
|
+
parallelism: int | None = None
|
|
37
|
+
param: tuple[str, ...] = ()
|
|
38
|
+
conn_args: tuple[str, ...] = ()
|
|
39
|
+
debug: bool = False
|
|
40
|
+
config_path: PathlibPath | None = None
|
|
41
|
+
execution_strategy: str = "eager_bfs"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def merge_runtime_config(
|
|
45
|
+
cli_params: CLIRuntimeParams, file_config: RuntimeConfig
|
|
46
|
+
) -> tuple[Dialects, int]:
|
|
47
|
+
"""
|
|
48
|
+
Merge CLI parameters with config file settings.
|
|
49
|
+
CLI parameters take precedence over config file.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
tuple of (dialect, parallelism)
|
|
53
|
+
|
|
54
|
+
Raises:
|
|
55
|
+
Exit: If no dialect is specified in either CLI or config
|
|
56
|
+
"""
|
|
57
|
+
# Resolve dialect: CLI argument takes precedence over config
|
|
58
|
+
if cli_params.dialect:
|
|
59
|
+
dialect = cli_params.dialect
|
|
60
|
+
elif file_config.engine_dialect:
|
|
61
|
+
dialect = file_config.engine_dialect
|
|
62
|
+
else:
|
|
63
|
+
print_error(
|
|
64
|
+
"No dialect specified. Provide dialect as argument or set engine.dialect in config file."
|
|
65
|
+
)
|
|
66
|
+
raise Exit(1)
|
|
67
|
+
|
|
68
|
+
# Resolve parallelism: CLI argument takes precedence over config
|
|
69
|
+
parallelism = (
|
|
70
|
+
cli_params.parallelism
|
|
71
|
+
if cli_params.parallelism is not None
|
|
72
|
+
else file_config.parallelism
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
return dialect, parallelism
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def find_trilogy_config(start_path: PathlibPath | None = None) -> PathlibPath | None:
|
|
79
|
+
"""
|
|
80
|
+
Search for trilogy.toml starting from the given path, walking up parent directories.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
start_path: Starting directory for search. If None, uses current working directory.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
Path to trilogy.toml if found, None otherwise.
|
|
87
|
+
"""
|
|
88
|
+
search_path = start_path if start_path else PathlibPath.cwd()
|
|
89
|
+
if not search_path.is_dir():
|
|
90
|
+
search_path = search_path.parent
|
|
91
|
+
|
|
92
|
+
for parent in [search_path] + list(search_path.parents):
|
|
93
|
+
candidate = parent / TRILOGY_CONFIG_NAME
|
|
94
|
+
if candidate.exists():
|
|
95
|
+
return candidate
|
|
96
|
+
return None
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def resolve_input(path: PathlibPath) -> list[PathlibPath]:
|
|
100
|
+
# Directory
|
|
101
|
+
if path.is_dir():
|
|
102
|
+
pattern = "**/*.preql"
|
|
103
|
+
return sorted(path.glob(pattern))
|
|
104
|
+
# Single file
|
|
105
|
+
if path.exists() and path.is_file():
|
|
106
|
+
return [path]
|
|
107
|
+
|
|
108
|
+
raise FileNotFoundError(f"Input path '{path}' does not exist.")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def get_runtime_config(
|
|
112
|
+
path: PathlibPath, config_override: PathlibPath | None = None
|
|
113
|
+
) -> RuntimeConfig:
|
|
114
|
+
config_path: PathlibPath | None = None
|
|
115
|
+
|
|
116
|
+
if config_override:
|
|
117
|
+
config_path = config_override
|
|
118
|
+
else:
|
|
119
|
+
config_path = find_trilogy_config(path)
|
|
120
|
+
|
|
121
|
+
if not config_path:
|
|
122
|
+
return RuntimeConfig(startup_trilogy=[], startup_sql=[])
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
return load_config_file(config_path)
|
|
126
|
+
except Exception as e:
|
|
127
|
+
print_error(f"Failed to load configuration file {config_path}: {e}")
|
|
128
|
+
handle_execution_exception(e)
|
|
129
|
+
# This won't be reached due to handle_execution_exception raising Exit
|
|
130
|
+
return RuntimeConfig(startup_trilogy=[], startup_sql=[])
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def resolve_input_information(
|
|
134
|
+
input: str, config_path_input: PathlibPath | None = None
|
|
135
|
+
) -> tuple[Iterable[PathlibPath | StringIO], PathlibPath, str, str, RuntimeConfig]:
|
|
136
|
+
input_as_path = PathlibPath(input)
|
|
137
|
+
files: Iterable[StringIO | PathlibPath]
|
|
138
|
+
if input_as_path.exists():
|
|
139
|
+
pathlib_path = input_as_path
|
|
140
|
+
files = resolve_input(pathlib_path)
|
|
141
|
+
|
|
142
|
+
if pathlib_path.is_dir():
|
|
143
|
+
directory = pathlib_path
|
|
144
|
+
input_type = "directory"
|
|
145
|
+
config = get_runtime_config(pathlib_path, config_path_input)
|
|
146
|
+
|
|
147
|
+
else:
|
|
148
|
+
directory = pathlib_path.parent
|
|
149
|
+
input_type = "file"
|
|
150
|
+
config = get_runtime_config(pathlib_path, config_path_input)
|
|
151
|
+
|
|
152
|
+
input_name = pathlib_path.name
|
|
153
|
+
else:
|
|
154
|
+
script = input
|
|
155
|
+
files = [StringIO(script)]
|
|
156
|
+
directory = PathlibPath.cwd()
|
|
157
|
+
input_type = "query"
|
|
158
|
+
input_name = "inline"
|
|
159
|
+
config = RuntimeConfig(startup_trilogy=[], startup_sql=[])
|
|
160
|
+
return files, directory, input_type, input_name, config
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def validate_required_connection_params(
|
|
164
|
+
conn_dict: dict[str, Any],
|
|
165
|
+
required_keys: list[str],
|
|
166
|
+
optional_keys: list[str],
|
|
167
|
+
dialect_name: str,
|
|
168
|
+
) -> dict:
|
|
169
|
+
missing = [key for key in required_keys if key not in conn_dict]
|
|
170
|
+
extra = [
|
|
171
|
+
key
|
|
172
|
+
for key in conn_dict
|
|
173
|
+
if key not in required_keys and key not in optional_keys
|
|
174
|
+
]
|
|
175
|
+
if missing:
|
|
176
|
+
raise ConfigurationException(
|
|
177
|
+
f"Missing required {dialect_name} connection parameters: {', '.join(missing)}"
|
|
178
|
+
)
|
|
179
|
+
if extra:
|
|
180
|
+
print(
|
|
181
|
+
f"Warning: Extra {dialect_name} connection parameters provided: {', '.join(extra)}"
|
|
182
|
+
)
|
|
183
|
+
return {
|
|
184
|
+
k: v for k, v in conn_dict.items() if k in required_keys or k in optional_keys
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def get_dialect_config(
|
|
189
|
+
edialect: Dialects, conn_dict: dict[str, Any], runtime_config: RuntimeConfig
|
|
190
|
+
) -> Any:
|
|
191
|
+
"""Get dialect configuration based on dialect type."""
|
|
192
|
+
conf: Union[Any, None] = None
|
|
193
|
+
|
|
194
|
+
if edialect == Dialects.DUCK_DB:
|
|
195
|
+
from trilogy.dialect.config import DuckDBConfig
|
|
196
|
+
|
|
197
|
+
conn_dict = validate_required_connection_params(
|
|
198
|
+
conn_dict, [], ["path"], "DuckDB"
|
|
199
|
+
)
|
|
200
|
+
conf = DuckDBConfig(**conn_dict)
|
|
201
|
+
elif edialect == Dialects.SNOWFLAKE:
|
|
202
|
+
from trilogy.dialect.config import SnowflakeConfig
|
|
203
|
+
|
|
204
|
+
conn_dict = validate_required_connection_params(
|
|
205
|
+
conn_dict, ["username", "password", "account"], [], "Snowflake"
|
|
206
|
+
)
|
|
207
|
+
conf = SnowflakeConfig(**conn_dict)
|
|
208
|
+
elif edialect == Dialects.SQL_SERVER:
|
|
209
|
+
from trilogy.dialect.config import SQLServerConfig
|
|
210
|
+
|
|
211
|
+
conn_dict = validate_required_connection_params(
|
|
212
|
+
conn_dict,
|
|
213
|
+
["host", "port", "username", "password", "database"],
|
|
214
|
+
[],
|
|
215
|
+
"SQL Server",
|
|
216
|
+
)
|
|
217
|
+
conf = SQLServerConfig(**conn_dict)
|
|
218
|
+
elif edialect == Dialects.POSTGRES:
|
|
219
|
+
from trilogy.dialect.config import PostgresConfig
|
|
220
|
+
|
|
221
|
+
conn_dict = validate_required_connection_params(
|
|
222
|
+
conn_dict,
|
|
223
|
+
["host", "port", "username", "password", "database"],
|
|
224
|
+
[],
|
|
225
|
+
"Postgres",
|
|
226
|
+
)
|
|
227
|
+
conf = PostgresConfig(**conn_dict)
|
|
228
|
+
elif edialect == Dialects.BIGQUERY:
|
|
229
|
+
from trilogy.dialect.config import BigQueryConfig
|
|
230
|
+
|
|
231
|
+
conn_dict = validate_required_connection_params(
|
|
232
|
+
conn_dict, [], ["project"], "BigQuery"
|
|
233
|
+
)
|
|
234
|
+
conf = BigQueryConfig(**conn_dict)
|
|
235
|
+
elif edialect == Dialects.PRESTO:
|
|
236
|
+
from trilogy.dialect.config import PrestoConfig
|
|
237
|
+
|
|
238
|
+
conn_dict = validate_required_connection_params(
|
|
239
|
+
conn_dict,
|
|
240
|
+
["host", "port", "username", "password", "catalog"],
|
|
241
|
+
[],
|
|
242
|
+
"Presto",
|
|
243
|
+
)
|
|
244
|
+
conf = PrestoConfig(**conn_dict)
|
|
245
|
+
if conf and runtime_config.engine_config:
|
|
246
|
+
conf = runtime_config.engine_config.merge_config(conf)
|
|
247
|
+
return conf
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def create_executor(
|
|
251
|
+
param: tuple[str, ...],
|
|
252
|
+
directory: PathlibPath,
|
|
253
|
+
conn_args: Iterable[str],
|
|
254
|
+
edialect: Dialects,
|
|
255
|
+
debug: bool,
|
|
256
|
+
config: RuntimeConfig,
|
|
257
|
+
) -> Executor:
|
|
258
|
+
# Parse environment parameters from dedicated flag
|
|
259
|
+
namespace = DEFAULT_NAMESPACE
|
|
260
|
+
try:
|
|
261
|
+
env_params = parse_env_params(param)
|
|
262
|
+
from trilogy.scripts.display import show_environment_params
|
|
263
|
+
|
|
264
|
+
show_environment_params(env_params)
|
|
265
|
+
except ValueError as e:
|
|
266
|
+
print_error(str(e))
|
|
267
|
+
raise Exit(1) from e
|
|
268
|
+
|
|
269
|
+
# Parse connection arguments from remaining args
|
|
270
|
+
conn_dict = extra_to_kwargs(conn_args)
|
|
271
|
+
|
|
272
|
+
# Configure dialect
|
|
273
|
+
try:
|
|
274
|
+
conf = get_dialect_config(edialect, conn_dict, runtime_config=config)
|
|
275
|
+
except Exception as e:
|
|
276
|
+
handle_execution_exception(e)
|
|
277
|
+
|
|
278
|
+
# Create environment and set additional parameters if any exist
|
|
279
|
+
environment = Environment(working_path=str(directory), namespace=namespace)
|
|
280
|
+
if env_params:
|
|
281
|
+
environment.set_parameters(**env_params)
|
|
282
|
+
|
|
283
|
+
exec = Executor(
|
|
284
|
+
dialect=edialect,
|
|
285
|
+
engine=edialect.default_engine(conf=conf),
|
|
286
|
+
environment=environment,
|
|
287
|
+
hooks=[DebuggingHook()] if debug else [],
|
|
288
|
+
)
|
|
289
|
+
if config.startup_sql:
|
|
290
|
+
for script in config.startup_sql:
|
|
291
|
+
print_info(f"Executing startup SQL script: {script}")
|
|
292
|
+
exec.execute_file(script)
|
|
293
|
+
if config.startup_trilogy:
|
|
294
|
+
for script in config.startup_trilogy:
|
|
295
|
+
print_info(f"Executing startup Trilogy script: {script}")
|
|
296
|
+
exec.execute_file(script)
|
|
297
|
+
return exec
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def create_executor_for_script(
|
|
301
|
+
node: ScriptNode,
|
|
302
|
+
param: tuple[str, ...],
|
|
303
|
+
conn_args: Iterable[str],
|
|
304
|
+
edialect: Dialects,
|
|
305
|
+
debug: bool,
|
|
306
|
+
config: RuntimeConfig,
|
|
307
|
+
) -> Executor:
|
|
308
|
+
"""
|
|
309
|
+
Create an executor for a specific script node.
|
|
310
|
+
|
|
311
|
+
Each script gets its own executor with its own environment,
|
|
312
|
+
using the script's parent directory as the working path.
|
|
313
|
+
"""
|
|
314
|
+
directory = node.path.parent
|
|
315
|
+
return create_executor(param, directory, conn_args, edialect, debug, config)
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def validate_datasources(
|
|
319
|
+
exec: Executor, mock: bool = False, quiet: bool = False
|
|
320
|
+
) -> None:
|
|
321
|
+
"""Validate datasources with consistent error handling.
|
|
322
|
+
|
|
323
|
+
Args:
|
|
324
|
+
exec: The executor instance
|
|
325
|
+
mock: If True, mock datasources before validation (for unit tests)
|
|
326
|
+
quiet: If True, suppress informational messages (for parallel execution)
|
|
327
|
+
|
|
328
|
+
Raises:
|
|
329
|
+
Exit: If validation fails
|
|
330
|
+
"""
|
|
331
|
+
datasources = exec.environment.datasources.keys()
|
|
332
|
+
if not datasources:
|
|
333
|
+
if not quiet:
|
|
334
|
+
message = "unit" if mock else "integration"
|
|
335
|
+
print_success(f"No datasources found to {message} test.")
|
|
336
|
+
return
|
|
337
|
+
|
|
338
|
+
if mock:
|
|
339
|
+
exec.execute_text("mock datasources {};".format(", ".join(datasources)))
|
|
340
|
+
|
|
341
|
+
try:
|
|
342
|
+
exec.execute_text("validate datasources {};".format(", ".join(datasources)))
|
|
343
|
+
except ModelValidationError as e:
|
|
344
|
+
if not e.children:
|
|
345
|
+
print_error(f"Datasource validation failed: {e.message}")
|
|
346
|
+
for idx, child in enumerate(e.children or []):
|
|
347
|
+
print_error(f"Error {idx + 1}: {child.message}")
|
|
348
|
+
raise Exit(1) from e
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def handle_execution_exception(e: Exception, debug: bool = False) -> None:
|
|
352
|
+
print_error(f"Unexpected error: {e}")
|
|
353
|
+
if debug:
|
|
354
|
+
print_error(f"Full traceback:\n{traceback.format_exc()}")
|
|
355
|
+
raise Exit(1) from e
|