pytrilogy 0.3.138__cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (182) hide show
  1. LICENSE.md +19 -0
  2. _preql_import_resolver/__init__.py +5 -0
  3. _preql_import_resolver/_preql_import_resolver.cpython-311-x86_64-linux-gnu.so +0 -0
  4. pytrilogy-0.3.138.dist-info/METADATA +525 -0
  5. pytrilogy-0.3.138.dist-info/RECORD +182 -0
  6. pytrilogy-0.3.138.dist-info/WHEEL +5 -0
  7. pytrilogy-0.3.138.dist-info/entry_points.txt +2 -0
  8. pytrilogy-0.3.138.dist-info/licenses/LICENSE.md +19 -0
  9. trilogy/__init__.py +9 -0
  10. trilogy/ai/README.md +10 -0
  11. trilogy/ai/__init__.py +19 -0
  12. trilogy/ai/constants.py +92 -0
  13. trilogy/ai/conversation.py +107 -0
  14. trilogy/ai/enums.py +7 -0
  15. trilogy/ai/execute.py +50 -0
  16. trilogy/ai/models.py +34 -0
  17. trilogy/ai/prompts.py +87 -0
  18. trilogy/ai/providers/__init__.py +0 -0
  19. trilogy/ai/providers/anthropic.py +106 -0
  20. trilogy/ai/providers/base.py +24 -0
  21. trilogy/ai/providers/google.py +146 -0
  22. trilogy/ai/providers/openai.py +89 -0
  23. trilogy/ai/providers/utils.py +68 -0
  24. trilogy/authoring/README.md +3 -0
  25. trilogy/authoring/__init__.py +143 -0
  26. trilogy/constants.py +113 -0
  27. trilogy/core/README.md +52 -0
  28. trilogy/core/__init__.py +0 -0
  29. trilogy/core/constants.py +6 -0
  30. trilogy/core/enums.py +443 -0
  31. trilogy/core/env_processor.py +120 -0
  32. trilogy/core/environment_helpers.py +320 -0
  33. trilogy/core/ergonomics.py +193 -0
  34. trilogy/core/exceptions.py +123 -0
  35. trilogy/core/functions.py +1227 -0
  36. trilogy/core/graph_models.py +139 -0
  37. trilogy/core/internal.py +85 -0
  38. trilogy/core/models/__init__.py +0 -0
  39. trilogy/core/models/author.py +2672 -0
  40. trilogy/core/models/build.py +2521 -0
  41. trilogy/core/models/build_environment.py +180 -0
  42. trilogy/core/models/core.py +494 -0
  43. trilogy/core/models/datasource.py +322 -0
  44. trilogy/core/models/environment.py +748 -0
  45. trilogy/core/models/execute.py +1177 -0
  46. trilogy/core/optimization.py +251 -0
  47. trilogy/core/optimizations/__init__.py +12 -0
  48. trilogy/core/optimizations/base_optimization.py +17 -0
  49. trilogy/core/optimizations/hide_unused_concept.py +47 -0
  50. trilogy/core/optimizations/inline_datasource.py +102 -0
  51. trilogy/core/optimizations/predicate_pushdown.py +245 -0
  52. trilogy/core/processing/README.md +94 -0
  53. trilogy/core/processing/READMEv2.md +121 -0
  54. trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
  55. trilogy/core/processing/__init__.py +0 -0
  56. trilogy/core/processing/concept_strategies_v3.py +508 -0
  57. trilogy/core/processing/constants.py +15 -0
  58. trilogy/core/processing/discovery_node_factory.py +451 -0
  59. trilogy/core/processing/discovery_utility.py +517 -0
  60. trilogy/core/processing/discovery_validation.py +167 -0
  61. trilogy/core/processing/graph_utils.py +43 -0
  62. trilogy/core/processing/node_generators/README.md +9 -0
  63. trilogy/core/processing/node_generators/__init__.py +31 -0
  64. trilogy/core/processing/node_generators/basic_node.py +160 -0
  65. trilogy/core/processing/node_generators/common.py +268 -0
  66. trilogy/core/processing/node_generators/constant_node.py +38 -0
  67. trilogy/core/processing/node_generators/filter_node.py +315 -0
  68. trilogy/core/processing/node_generators/group_node.py +213 -0
  69. trilogy/core/processing/node_generators/group_to_node.py +117 -0
  70. trilogy/core/processing/node_generators/multiselect_node.py +205 -0
  71. trilogy/core/processing/node_generators/node_merge_node.py +653 -0
  72. trilogy/core/processing/node_generators/recursive_node.py +88 -0
  73. trilogy/core/processing/node_generators/rowset_node.py +165 -0
  74. trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
  75. trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
  76. trilogy/core/processing/node_generators/select_merge_node.py +748 -0
  77. trilogy/core/processing/node_generators/select_node.py +95 -0
  78. trilogy/core/processing/node_generators/synonym_node.py +98 -0
  79. trilogy/core/processing/node_generators/union_node.py +91 -0
  80. trilogy/core/processing/node_generators/unnest_node.py +182 -0
  81. trilogy/core/processing/node_generators/window_node.py +201 -0
  82. trilogy/core/processing/nodes/README.md +28 -0
  83. trilogy/core/processing/nodes/__init__.py +179 -0
  84. trilogy/core/processing/nodes/base_node.py +519 -0
  85. trilogy/core/processing/nodes/filter_node.py +75 -0
  86. trilogy/core/processing/nodes/group_node.py +194 -0
  87. trilogy/core/processing/nodes/merge_node.py +420 -0
  88. trilogy/core/processing/nodes/recursive_node.py +46 -0
  89. trilogy/core/processing/nodes/select_node_v2.py +242 -0
  90. trilogy/core/processing/nodes/union_node.py +53 -0
  91. trilogy/core/processing/nodes/unnest_node.py +62 -0
  92. trilogy/core/processing/nodes/window_node.py +56 -0
  93. trilogy/core/processing/utility.py +823 -0
  94. trilogy/core/query_processor.py +596 -0
  95. trilogy/core/statements/README.md +35 -0
  96. trilogy/core/statements/__init__.py +0 -0
  97. trilogy/core/statements/author.py +536 -0
  98. trilogy/core/statements/build.py +0 -0
  99. trilogy/core/statements/common.py +20 -0
  100. trilogy/core/statements/execute.py +155 -0
  101. trilogy/core/table_processor.py +66 -0
  102. trilogy/core/utility.py +8 -0
  103. trilogy/core/validation/README.md +46 -0
  104. trilogy/core/validation/__init__.py +0 -0
  105. trilogy/core/validation/common.py +161 -0
  106. trilogy/core/validation/concept.py +146 -0
  107. trilogy/core/validation/datasource.py +227 -0
  108. trilogy/core/validation/environment.py +73 -0
  109. trilogy/core/validation/fix.py +106 -0
  110. trilogy/dialect/__init__.py +32 -0
  111. trilogy/dialect/base.py +1359 -0
  112. trilogy/dialect/bigquery.py +256 -0
  113. trilogy/dialect/common.py +147 -0
  114. trilogy/dialect/config.py +144 -0
  115. trilogy/dialect/dataframe.py +50 -0
  116. trilogy/dialect/duckdb.py +177 -0
  117. trilogy/dialect/enums.py +147 -0
  118. trilogy/dialect/metadata.py +173 -0
  119. trilogy/dialect/mock.py +190 -0
  120. trilogy/dialect/postgres.py +91 -0
  121. trilogy/dialect/presto.py +104 -0
  122. trilogy/dialect/results.py +89 -0
  123. trilogy/dialect/snowflake.py +90 -0
  124. trilogy/dialect/sql_server.py +92 -0
  125. trilogy/engine.py +48 -0
  126. trilogy/execution/config.py +75 -0
  127. trilogy/executor.py +568 -0
  128. trilogy/hooks/__init__.py +4 -0
  129. trilogy/hooks/base_hook.py +40 -0
  130. trilogy/hooks/graph_hook.py +139 -0
  131. trilogy/hooks/query_debugger.py +166 -0
  132. trilogy/metadata/__init__.py +0 -0
  133. trilogy/parser.py +10 -0
  134. trilogy/parsing/README.md +21 -0
  135. trilogy/parsing/__init__.py +0 -0
  136. trilogy/parsing/common.py +1069 -0
  137. trilogy/parsing/config.py +5 -0
  138. trilogy/parsing/exceptions.py +8 -0
  139. trilogy/parsing/helpers.py +1 -0
  140. trilogy/parsing/parse_engine.py +2813 -0
  141. trilogy/parsing/render.py +750 -0
  142. trilogy/parsing/trilogy.lark +540 -0
  143. trilogy/py.typed +0 -0
  144. trilogy/render.py +42 -0
  145. trilogy/scripts/README.md +7 -0
  146. trilogy/scripts/__init__.py +0 -0
  147. trilogy/scripts/dependency/Cargo.lock +617 -0
  148. trilogy/scripts/dependency/Cargo.toml +39 -0
  149. trilogy/scripts/dependency/README.md +131 -0
  150. trilogy/scripts/dependency/build.sh +25 -0
  151. trilogy/scripts/dependency/src/directory_resolver.rs +162 -0
  152. trilogy/scripts/dependency/src/lib.rs +16 -0
  153. trilogy/scripts/dependency/src/main.rs +770 -0
  154. trilogy/scripts/dependency/src/parser.rs +435 -0
  155. trilogy/scripts/dependency/src/preql.pest +208 -0
  156. trilogy/scripts/dependency/src/python_bindings.rs +289 -0
  157. trilogy/scripts/dependency/src/resolver.rs +716 -0
  158. trilogy/scripts/dependency/tests/base.preql +3 -0
  159. trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
  160. trilogy/scripts/dependency/tests/customer.preql +6 -0
  161. trilogy/scripts/dependency/tests/main.preql +9 -0
  162. trilogy/scripts/dependency/tests/orders.preql +7 -0
  163. trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
  164. trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
  165. trilogy/scripts/dependency.py +323 -0
  166. trilogy/scripts/display.py +460 -0
  167. trilogy/scripts/environment.py +46 -0
  168. trilogy/scripts/parallel_execution.py +483 -0
  169. trilogy/scripts/single_execution.py +131 -0
  170. trilogy/scripts/trilogy.py +772 -0
  171. trilogy/std/__init__.py +0 -0
  172. trilogy/std/color.preql +3 -0
  173. trilogy/std/date.preql +13 -0
  174. trilogy/std/display.preql +18 -0
  175. trilogy/std/geography.preql +22 -0
  176. trilogy/std/metric.preql +15 -0
  177. trilogy/std/money.preql +67 -0
  178. trilogy/std/net.preql +14 -0
  179. trilogy/std/ranking.preql +7 -0
  180. trilogy/std/report.preql +5 -0
  181. trilogy/std/semantic.preql +6 -0
  182. trilogy/utility.py +34 -0
@@ -0,0 +1,483 @@
1
+ import threading
2
+ from dataclasses import dataclass
3
+ from datetime import datetime
4
+ from pathlib import Path
5
+ from typing import Any, Callable, Protocol
6
+
7
+ import networkx as nx
8
+
9
+ from trilogy import Executor
10
+ from trilogy.scripts.dependency import (
11
+ DependencyResolver,
12
+ DependencyStrategy,
13
+ ScriptNode,
14
+ create_script_nodes,
15
+ )
16
+
17
+
18
+ @dataclass
19
+ class ExecutionResult:
20
+ """Result of executing a single script."""
21
+
22
+ node: ScriptNode
23
+ success: bool
24
+ error: Exception | None = None
25
+ duration: float = 0.0 # seconds
26
+
27
+
28
+ @dataclass
29
+ class ParallelExecutionSummary:
30
+ """Summary of a parallel execution run."""
31
+
32
+ total_scripts: int
33
+ successful: int
34
+ failed: int
35
+ total_duration: float
36
+ results: list[ExecutionResult]
37
+
38
+ @property
39
+ def all_succeeded(self) -> bool:
40
+ return self.failed == 0
41
+
42
+
43
+ class ExecutionStrategy(Protocol):
44
+ """Protocol for execution traversal strategies."""
45
+
46
+ def execute(
47
+ self,
48
+ graph: nx.DiGraph,
49
+ resolver: DependencyResolver,
50
+ max_workers: int,
51
+ executor_factory: Callable[[ScriptNode], Any],
52
+ execution_fn: Callable[[Any, ScriptNode], None],
53
+ on_script_start: Callable[[ScriptNode], None] | None = None,
54
+ on_script_complete: Callable[[ExecutionResult], None] | None = None,
55
+ ) -> list[ExecutionResult]:
56
+ """
57
+ Execute scripts according to the strategy.
58
+
59
+ Args:
60
+ graph: The dependency graph (edges point from deps to dependents).
61
+ max_workers: Maximum parallel workers.
62
+ executor_factory: Factory to create executor for each script.
63
+ execution_fn: Function to execute a script.
64
+
65
+ Returns:
66
+ List of ExecutionResult for all scripts.
67
+ """
68
+ ...
69
+
70
+
71
+ # Type aliases for cleaner signatures
72
+ CompletedSet = set[ScriptNode]
73
+ FailedSet = set[ScriptNode]
74
+ InProgressSet = set[ScriptNode]
75
+ ResultsList = list[ExecutionResult]
76
+ RemainingDepsDict = dict[ScriptNode, int]
77
+ ReadyList = list[ScriptNode]
78
+ OnCompleteCallback = Callable[[ExecutionResult], None] | None
79
+
80
+
81
+ def _propagate_failure(
82
+ failed_node: ScriptNode,
83
+ graph: nx.DiGraph,
84
+ completed: CompletedSet,
85
+ in_progress: InProgressSet,
86
+ results: ResultsList,
87
+ failed: FailedSet,
88
+ on_script_complete: OnCompleteCallback,
89
+ ) -> None:
90
+ """
91
+ Recursively mark all *unstarted* dependents of a failed node as failed and skipped.
92
+ """
93
+ for dependent in graph.successors(failed_node):
94
+ if dependent not in completed and dependent not in in_progress:
95
+ skip_result = ExecutionResult(
96
+ node=dependent,
97
+ success=False,
98
+ error=RuntimeError("Skipped due to failed dependency"),
99
+ duration=0.0,
100
+ )
101
+ results.append(skip_result)
102
+ completed.add(dependent)
103
+ failed.add(dependent)
104
+ if on_script_complete:
105
+ on_script_complete(skip_result)
106
+ _propagate_failure(
107
+ dependent,
108
+ graph,
109
+ completed,
110
+ in_progress,
111
+ results,
112
+ failed,
113
+ on_script_complete,
114
+ )
115
+
116
+
117
+ def _get_next_ready(ready: ReadyList) -> ScriptNode | None:
118
+ """Get next ready node from the queue."""
119
+ if ready:
120
+ return ready.pop(0)
121
+ return None
122
+
123
+
124
+ def _mark_node_complete(
125
+ node: ScriptNode,
126
+ success: bool,
127
+ graph: nx.DiGraph,
128
+ completed: CompletedSet,
129
+ failed: FailedSet,
130
+ in_progress: InProgressSet,
131
+ remaining_deps: RemainingDepsDict,
132
+ ready: ReadyList,
133
+ results: ResultsList,
134
+ on_script_complete: OnCompleteCallback,
135
+ ) -> None:
136
+ """
137
+ Mark a node as complete, update dependent counts, and add newly ready/skipped nodes.
138
+ """
139
+ in_progress.discard(node)
140
+ completed.add(node)
141
+ if not success:
142
+ failed.add(node)
143
+
144
+ # Update dependents
145
+ for dependent in graph.successors(node):
146
+ if dependent in completed or dependent in in_progress:
147
+ continue
148
+
149
+ if success:
150
+ remaining_deps[dependent] -= 1
151
+ if remaining_deps[dependent] == 0:
152
+ # Check if any dependency failed before running
153
+ deps = set(graph.predecessors(dependent))
154
+ if deps & failed:
155
+ # Skip this node - dependency failed
156
+ skip_result = ExecutionResult(
157
+ node=dependent,
158
+ success=False,
159
+ error=RuntimeError("Skipped due to failed dependency"),
160
+ duration=0.0,
161
+ )
162
+ results.append(skip_result)
163
+ completed.add(dependent)
164
+ failed.add(dependent)
165
+ if on_script_complete:
166
+ on_script_complete(skip_result)
167
+ # Recursively mark dependents as failed
168
+ _propagate_failure(
169
+ dependent,
170
+ graph,
171
+ completed,
172
+ in_progress,
173
+ results,
174
+ failed,
175
+ on_script_complete,
176
+ )
177
+ else:
178
+ ready.append(dependent)
179
+ else:
180
+ # Current node failed - mark this dependent as skipped
181
+ if dependent not in failed:
182
+ skip_result = ExecutionResult(
183
+ node=dependent,
184
+ success=False,
185
+ error=RuntimeError("Skipped due to failed dependency"),
186
+ duration=0.0,
187
+ )
188
+ results.append(skip_result)
189
+ completed.add(dependent)
190
+ failed.add(dependent)
191
+ if on_script_complete:
192
+ on_script_complete(skip_result)
193
+ # Recursively mark dependents as failed
194
+ _propagate_failure(
195
+ dependent,
196
+ graph,
197
+ completed,
198
+ in_progress,
199
+ results,
200
+ failed,
201
+ on_script_complete,
202
+ )
203
+
204
+
205
+ def _is_execution_done(completed: CompletedSet, total_count: int) -> bool:
206
+ """Check if all nodes have been processed."""
207
+ return len(completed) >= total_count
208
+
209
+
210
+ def _execute_single(
211
+ node: ScriptNode,
212
+ executor_factory: Callable[[ScriptNode], Executor],
213
+ execution_fn: Callable[[Any, ScriptNode], None],
214
+ ) -> ExecutionResult:
215
+ """Execute a single script and return the result."""
216
+ start_time = datetime.now()
217
+ executor = None
218
+ try:
219
+ executor = executor_factory(node)
220
+ execution_fn(executor, node)
221
+
222
+ duration = (datetime.now() - start_time).total_seconds()
223
+ if executor:
224
+ executor.close()
225
+ return ExecutionResult(
226
+ node=node,
227
+ success=True,
228
+ error=None,
229
+ duration=duration,
230
+ )
231
+ except Exception as e:
232
+ duration = (datetime.now() - start_time).total_seconds()
233
+ if executor:
234
+ executor.close() # Ensure executor is closed even on failure
235
+ return ExecutionResult(
236
+ node=node,
237
+ success=False,
238
+ error=e,
239
+ duration=duration,
240
+ )
241
+
242
+
243
+ def _create_worker(
244
+ graph: nx.DiGraph,
245
+ lock: threading.Lock,
246
+ work_available: threading.Condition,
247
+ completed: CompletedSet,
248
+ failed: FailedSet,
249
+ in_progress: InProgressSet,
250
+ remaining_deps: RemainingDepsDict,
251
+ ready: ReadyList,
252
+ results: ResultsList,
253
+ total_count: int,
254
+ executor_factory: Callable[[ScriptNode], Any],
255
+ execution_fn: Callable[[Any, ScriptNode], None],
256
+ on_script_start: Callable[[ScriptNode], None] | None,
257
+ on_script_complete: OnCompleteCallback,
258
+ ) -> Callable[[], None]:
259
+ """
260
+ Create a worker function for thread execution to process the dependency graph.
261
+ """
262
+
263
+ def worker() -> None:
264
+ while True:
265
+ node = None
266
+
267
+ with work_available:
268
+ # Wait for work or global completion
269
+ while not ready and not _is_execution_done(completed, total_count):
270
+ work_available.wait()
271
+
272
+ if _is_execution_done(completed, total_count):
273
+ return
274
+
275
+ node = _get_next_ready(ready)
276
+ if node is None:
277
+ # Should be impossible if total_count check is correct, but handles race condition safety
278
+ continue
279
+
280
+ in_progress.add(node)
281
+
282
+ # Execute outside the lock
283
+ if node is not None:
284
+ if on_script_start:
285
+ on_script_start(node)
286
+ result = _execute_single(node, executor_factory, execution_fn)
287
+
288
+ # Use the lock for state updates and notification
289
+ with lock:
290
+ results.append(result)
291
+
292
+ if on_script_complete:
293
+ on_script_complete(result)
294
+
295
+ _mark_node_complete(
296
+ node,
297
+ result.success,
298
+ graph,
299
+ completed,
300
+ failed,
301
+ in_progress,
302
+ remaining_deps,
303
+ ready,
304
+ results,
305
+ on_script_complete,
306
+ )
307
+ work_available.notify_all() # Notify other workers of new ready/completed state
308
+
309
+ return worker
310
+
311
+
312
+ class EagerBFSStrategy:
313
+ """
314
+ Eager Breadth-First Search (BFS) execution strategy.
315
+
316
+ Scripts execute as soon as all their dependencies complete, maximizing parallelism.
317
+ Uses a thread pool coordinated by locks and condition variables.
318
+ """
319
+
320
+ def execute(
321
+ self,
322
+ graph: nx.DiGraph,
323
+ resolver: DependencyResolver,
324
+ max_workers: int,
325
+ executor_factory: Callable[[ScriptNode], Any],
326
+ execution_fn: Callable[[Any, ScriptNode], None],
327
+ on_script_start: Callable[[ScriptNode], None] | None = None,
328
+ on_script_complete: Callable[[ExecutionResult], None] | None = None,
329
+ ) -> list[ExecutionResult]:
330
+ """Execute scripts eagerly as dependencies complete."""
331
+ if not graph.nodes():
332
+ return []
333
+
334
+ lock = threading.Lock()
335
+ work_available = threading.Condition(lock)
336
+
337
+ # Track state
338
+ completed: CompletedSet = set()
339
+ failed: FailedSet = set()
340
+ in_progress: InProgressSet = set()
341
+ results: ResultsList = []
342
+
343
+ # Calculate in-degrees (number of incomplete dependencies)
344
+ remaining_deps: RemainingDepsDict = {
345
+ node: graph.in_degree(node) for node in graph.nodes()
346
+ }
347
+
348
+ # Ready queue - nodes with all dependencies satisfied initially (in-degree 0)
349
+ ready: ReadyList = [node for node in graph.nodes() if remaining_deps[node] == 0]
350
+
351
+ total_count = len(graph.nodes())
352
+
353
+ # Create the worker function
354
+ worker = _create_worker(
355
+ graph=graph,
356
+ lock=lock,
357
+ work_available=work_available,
358
+ completed=completed,
359
+ failed=failed,
360
+ in_progress=in_progress,
361
+ remaining_deps=remaining_deps,
362
+ ready=ready,
363
+ results=results,
364
+ total_count=total_count,
365
+ executor_factory=executor_factory,
366
+ execution_fn=execution_fn,
367
+ on_script_start=on_script_start,
368
+ on_script_complete=on_script_complete,
369
+ )
370
+
371
+ # Start worker threads
372
+ workers = min(max_workers, total_count)
373
+ threads: list[threading.Thread] = []
374
+ for _ in range(workers):
375
+ t = threading.Thread(target=worker, daemon=True)
376
+ t.start()
377
+ threads.append(t)
378
+
379
+ # Wake up any waiting workers if we have initial work
380
+ with work_available:
381
+ work_available.notify_all()
382
+
383
+ # Wait for all threads to complete
384
+ for t in threads:
385
+ t.join()
386
+
387
+ return results
388
+
389
+
390
+ class ParallelExecutor:
391
+ """
392
+ Executes scripts in parallel while respecting dependencies.
393
+
394
+ Uses an Eager BFS traversal by default, running scripts as soon as their
395
+ dependencies complete.
396
+ """
397
+
398
+ def __init__(
399
+ self,
400
+ max_workers: int = 5,
401
+ dependency_strategy: DependencyStrategy | None = None,
402
+ execution_strategy: ExecutionStrategy | None = None,
403
+ ):
404
+ """
405
+ Initialize the parallel executor.
406
+
407
+ Args:
408
+ max_workers: Maximum number of parallel workers.
409
+ dependency_strategy: Strategy for resolving dependencies.
410
+ execution_strategy: Strategy for traversing the graph during execution.
411
+ """
412
+ self.max_workers = max_workers
413
+ # Resolver finds dependencies and builds the graph
414
+ self.resolver = DependencyResolver(strategy=dependency_strategy)
415
+ # Execution strategy determines how the graph is traversed and executed
416
+ self.execution_strategy = execution_strategy or EagerBFSStrategy()
417
+
418
+ def execute(
419
+ self,
420
+ root: Path,
421
+ executor_factory: Callable[[ScriptNode], Any],
422
+ execution_fn: Callable[[Any, ScriptNode], None],
423
+ on_script_start: Callable[[ScriptNode], None] | None = None,
424
+ on_script_complete: Callable[[ExecutionResult], None] | None = None,
425
+ ) -> ParallelExecutionSummary:
426
+ """
427
+ Execute scripts in parallel respecting dependencies.
428
+
429
+ Args:
430
+ root: Root path (folder or single file) to find scripts.
431
+ executor_factory: Factory function to create an executor for a script.
432
+ execution_fn: Function that executes a script given (executor, node).
433
+
434
+ Returns:
435
+ ParallelExecutionSummary with all results.
436
+ """
437
+ start_time = datetime.now()
438
+
439
+ # Build dependency graph
440
+ if root.is_dir():
441
+ graph = self.resolver.build_folder_graph(root)
442
+ nodes = list(graph.nodes())
443
+ else:
444
+ nodes = create_script_nodes([root])
445
+ graph = self.resolver.build_graph(nodes)
446
+
447
+ # Total count of nodes for summary/completion check
448
+ total_scripts = len(nodes)
449
+
450
+ # Execute using the configured strategy
451
+ results = self.execution_strategy.execute(
452
+ graph=graph,
453
+ resolver=self.resolver,
454
+ max_workers=self.max_workers,
455
+ executor_factory=executor_factory,
456
+ execution_fn=execution_fn,
457
+ on_script_start=on_script_start,
458
+ on_script_complete=on_script_complete,
459
+ )
460
+
461
+ total_duration = (datetime.now() - start_time).total_seconds()
462
+ successful = sum(1 for r in results if r.success)
463
+
464
+ return ParallelExecutionSummary(
465
+ total_scripts=total_scripts,
466
+ successful=successful,
467
+ failed=total_scripts - successful,
468
+ total_duration=total_duration,
469
+ results=results,
470
+ )
471
+
472
+ def get_folder_execution_plan(self, folder: Path) -> nx.DiGraph:
473
+ """
474
+ Get the execution plan (dependency graph) for all scripts in a folder.
475
+ """
476
+ return self.resolver.build_folder_graph(folder)
477
+
478
+ def get_execution_plan(self, files: list[Path]) -> nx.DiGraph:
479
+ """
480
+ Get the execution plan (dependency graph) without executing.
481
+ """
482
+ nodes = create_script_nodes(files)
483
+ return self.resolver.build_graph(nodes)
@@ -0,0 +1,131 @@
1
+ import traceback
2
+ from datetime import datetime
3
+ from typing import Any, Union
4
+
5
+ from trilogy import Executor
6
+ from trilogy.core.statements.execute import PROCESSED_STATEMENT_TYPES
7
+ from trilogy.scripts.display import (
8
+ FETCH_LIMIT,
9
+ ResultSet,
10
+ create_progress_context,
11
+ print_error,
12
+ print_info,
13
+ print_results_table,
14
+ show_statement_result,
15
+ show_statement_type,
16
+ )
17
+
18
+
19
+ def get_statement_type(statement: PROCESSED_STATEMENT_TYPES) -> str:
20
+ """Get the type/class name of a statement."""
21
+ return type(statement).__name__
22
+
23
+
24
+ def execute_single_statement(
25
+ exec: Executor,
26
+ query: PROCESSED_STATEMENT_TYPES,
27
+ idx: int,
28
+ total_queries: int,
29
+ use_progress=False,
30
+ ) -> tuple[bool, ResultSet | None, Any, Union[Exception, None]]:
31
+ """Execute a single statement and handle results/errors consistently."""
32
+ # Log the statement type before execution
33
+ statement_type = get_statement_type(query)
34
+ if not use_progress: # Only show type when not using progress bar
35
+ show_statement_type(idx, total_queries, statement_type)
36
+
37
+ start_time = datetime.now()
38
+
39
+ try:
40
+ raw_results = exec.execute_statement(query)
41
+ results = (
42
+ ResultSet(
43
+ rows=raw_results.fetchmany(FETCH_LIMIT + 1), columns=raw_results.keys()
44
+ )
45
+ if raw_results
46
+ else None
47
+ )
48
+ duration = datetime.now() - start_time
49
+
50
+ if not use_progress:
51
+ show_statement_result(idx, total_queries, duration, bool(results))
52
+
53
+ return True, results, duration, None
54
+
55
+ except Exception as e:
56
+ duration = datetime.now() - start_time
57
+
58
+ if not use_progress:
59
+ show_statement_result(idx, total_queries, duration, False, str(e), type(e))
60
+
61
+ return False, None, duration, e
62
+
63
+
64
+ def execute_queries_with_progress(
65
+ exec: Executor, queries: list[PROCESSED_STATEMENT_TYPES]
66
+ ) -> Exception | None:
67
+ """Execute queries with Rich progress bar. Returns True if all succeeded, False if any failed."""
68
+ progress = create_progress_context()
69
+ results_to_print = []
70
+ exception = None
71
+
72
+ with progress:
73
+ task = progress.add_task("Executing statements...", total=len(queries))
74
+
75
+ for idx, query in enumerate(queries):
76
+ statement_type = get_statement_type(query)
77
+ progress.update(
78
+ task, description=f"Statement {idx+1}/{len(queries)} ({statement_type})"
79
+ )
80
+
81
+ success, results, duration, error = execute_single_statement(
82
+ exec, query, idx, len(queries), use_progress=True
83
+ )
84
+
85
+ if not success:
86
+ exception = error
87
+
88
+ # Store results for printing after progress is done
89
+ results_to_print.append(
90
+ (idx, len(queries), duration, success, results, error)
91
+ )
92
+ progress.advance(task)
93
+ if exception:
94
+ break
95
+
96
+ # Print all results after progress bar is finished
97
+ for idx, total_queries, duration, success, results, error in results_to_print:
98
+ if error:
99
+ show_statement_result(
100
+ idx, total_queries, duration, False, str(error), type(error)
101
+ )
102
+ print_error(f"Full traceback:\n{traceback.format_exc()}")
103
+ else:
104
+ show_statement_result(idx, total_queries, duration, bool(results))
105
+ if results and not error:
106
+ print_results_table(results)
107
+
108
+ return exception
109
+
110
+
111
+ def execute_queries_simple(
112
+ exec: Executor, queries: list[PROCESSED_STATEMENT_TYPES]
113
+ ) -> Exception | None:
114
+ """Execute queries with simple output. Returns True if all succeeded, False if any failed."""
115
+ exception = None
116
+
117
+ for idx, query in enumerate(queries):
118
+ if len(queries) > 1:
119
+ print_info(f"Executing statement {idx+1} of {len(queries)}...")
120
+
121
+ success, results, duration, error = execute_single_statement(
122
+ exec, query, idx, len(queries), use_progress=False
123
+ )
124
+
125
+ if not success:
126
+ exception = error
127
+
128
+ if results and not error:
129
+ print_results_table(results)
130
+
131
+ return exception