dvt-core 1.11.0b4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dvt-core might be problematic. Click here for more details.

Files changed (261) hide show
  1. dvt/__init__.py +7 -0
  2. dvt/_pydantic_shim.py +26 -0
  3. dvt/adapters/__init__.py +16 -0
  4. dvt/adapters/multi_adapter_manager.py +268 -0
  5. dvt/artifacts/__init__.py +0 -0
  6. dvt/artifacts/exceptions/__init__.py +1 -0
  7. dvt/artifacts/exceptions/schemas.py +31 -0
  8. dvt/artifacts/resources/__init__.py +116 -0
  9. dvt/artifacts/resources/base.py +68 -0
  10. dvt/artifacts/resources/types.py +93 -0
  11. dvt/artifacts/resources/v1/analysis.py +10 -0
  12. dvt/artifacts/resources/v1/catalog.py +23 -0
  13. dvt/artifacts/resources/v1/components.py +275 -0
  14. dvt/artifacts/resources/v1/config.py +282 -0
  15. dvt/artifacts/resources/v1/documentation.py +11 -0
  16. dvt/artifacts/resources/v1/exposure.py +52 -0
  17. dvt/artifacts/resources/v1/function.py +53 -0
  18. dvt/artifacts/resources/v1/generic_test.py +32 -0
  19. dvt/artifacts/resources/v1/group.py +22 -0
  20. dvt/artifacts/resources/v1/hook.py +11 -0
  21. dvt/artifacts/resources/v1/macro.py +30 -0
  22. dvt/artifacts/resources/v1/metric.py +173 -0
  23. dvt/artifacts/resources/v1/model.py +146 -0
  24. dvt/artifacts/resources/v1/owner.py +10 -0
  25. dvt/artifacts/resources/v1/saved_query.py +112 -0
  26. dvt/artifacts/resources/v1/seed.py +42 -0
  27. dvt/artifacts/resources/v1/semantic_layer_components.py +72 -0
  28. dvt/artifacts/resources/v1/semantic_model.py +315 -0
  29. dvt/artifacts/resources/v1/singular_test.py +14 -0
  30. dvt/artifacts/resources/v1/snapshot.py +92 -0
  31. dvt/artifacts/resources/v1/source_definition.py +85 -0
  32. dvt/artifacts/resources/v1/sql_operation.py +10 -0
  33. dvt/artifacts/resources/v1/unit_test_definition.py +78 -0
  34. dvt/artifacts/schemas/__init__.py +0 -0
  35. dvt/artifacts/schemas/base.py +191 -0
  36. dvt/artifacts/schemas/batch_results.py +24 -0
  37. dvt/artifacts/schemas/catalog/__init__.py +12 -0
  38. dvt/artifacts/schemas/catalog/v1/__init__.py +0 -0
  39. dvt/artifacts/schemas/catalog/v1/catalog.py +60 -0
  40. dvt/artifacts/schemas/freshness/__init__.py +1 -0
  41. dvt/artifacts/schemas/freshness/v3/__init__.py +0 -0
  42. dvt/artifacts/schemas/freshness/v3/freshness.py +159 -0
  43. dvt/artifacts/schemas/manifest/__init__.py +2 -0
  44. dvt/artifacts/schemas/manifest/v12/__init__.py +0 -0
  45. dvt/artifacts/schemas/manifest/v12/manifest.py +212 -0
  46. dvt/artifacts/schemas/results.py +148 -0
  47. dvt/artifacts/schemas/run/__init__.py +2 -0
  48. dvt/artifacts/schemas/run/v5/__init__.py +0 -0
  49. dvt/artifacts/schemas/run/v5/run.py +184 -0
  50. dvt/artifacts/schemas/upgrades/__init__.py +4 -0
  51. dvt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
  52. dvt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
  53. dvt/artifacts/utils/validation.py +153 -0
  54. dvt/cli/__init__.py +1 -0
  55. dvt/cli/context.py +16 -0
  56. dvt/cli/exceptions.py +56 -0
  57. dvt/cli/flags.py +558 -0
  58. dvt/cli/main.py +971 -0
  59. dvt/cli/option_types.py +121 -0
  60. dvt/cli/options.py +79 -0
  61. dvt/cli/params.py +803 -0
  62. dvt/cli/requires.py +478 -0
  63. dvt/cli/resolvers.py +32 -0
  64. dvt/cli/types.py +40 -0
  65. dvt/clients/__init__.py +0 -0
  66. dvt/clients/checked_load.py +82 -0
  67. dvt/clients/git.py +164 -0
  68. dvt/clients/jinja.py +206 -0
  69. dvt/clients/jinja_static.py +245 -0
  70. dvt/clients/registry.py +192 -0
  71. dvt/clients/yaml_helper.py +68 -0
  72. dvt/compilation.py +833 -0
  73. dvt/compute/__init__.py +26 -0
  74. dvt/compute/base.py +288 -0
  75. dvt/compute/engines/__init__.py +13 -0
  76. dvt/compute/engines/duckdb_engine.py +368 -0
  77. dvt/compute/engines/spark_engine.py +273 -0
  78. dvt/compute/query_analyzer.py +212 -0
  79. dvt/compute/router.py +483 -0
  80. dvt/config/__init__.py +4 -0
  81. dvt/config/catalogs.py +95 -0
  82. dvt/config/compute_config.py +406 -0
  83. dvt/config/profile.py +411 -0
  84. dvt/config/profiles_v2.py +464 -0
  85. dvt/config/project.py +893 -0
  86. dvt/config/renderer.py +232 -0
  87. dvt/config/runtime.py +491 -0
  88. dvt/config/selectors.py +209 -0
  89. dvt/config/utils.py +78 -0
  90. dvt/connectors/.gitignore +6 -0
  91. dvt/connectors/README.md +306 -0
  92. dvt/connectors/catalog.yml +217 -0
  93. dvt/connectors/download_connectors.py +300 -0
  94. dvt/constants.py +29 -0
  95. dvt/context/__init__.py +0 -0
  96. dvt/context/base.py +746 -0
  97. dvt/context/configured.py +136 -0
  98. dvt/context/context_config.py +350 -0
  99. dvt/context/docs.py +82 -0
  100. dvt/context/exceptions_jinja.py +179 -0
  101. dvt/context/macro_resolver.py +195 -0
  102. dvt/context/macros.py +171 -0
  103. dvt/context/manifest.py +73 -0
  104. dvt/context/providers.py +2198 -0
  105. dvt/context/query_header.py +14 -0
  106. dvt/context/secret.py +59 -0
  107. dvt/context/target.py +74 -0
  108. dvt/contracts/__init__.py +0 -0
  109. dvt/contracts/files.py +413 -0
  110. dvt/contracts/graph/__init__.py +0 -0
  111. dvt/contracts/graph/manifest.py +1904 -0
  112. dvt/contracts/graph/metrics.py +98 -0
  113. dvt/contracts/graph/model_config.py +71 -0
  114. dvt/contracts/graph/node_args.py +42 -0
  115. dvt/contracts/graph/nodes.py +1806 -0
  116. dvt/contracts/graph/semantic_manifest.py +233 -0
  117. dvt/contracts/graph/unparsed.py +812 -0
  118. dvt/contracts/project.py +417 -0
  119. dvt/contracts/results.py +53 -0
  120. dvt/contracts/selection.py +23 -0
  121. dvt/contracts/sql.py +86 -0
  122. dvt/contracts/state.py +69 -0
  123. dvt/contracts/util.py +46 -0
  124. dvt/deprecations.py +347 -0
  125. dvt/deps/__init__.py +0 -0
  126. dvt/deps/base.py +153 -0
  127. dvt/deps/git.py +196 -0
  128. dvt/deps/local.py +80 -0
  129. dvt/deps/registry.py +131 -0
  130. dvt/deps/resolver.py +149 -0
  131. dvt/deps/tarball.py +121 -0
  132. dvt/docs/source/_ext/dbt_click.py +118 -0
  133. dvt/docs/source/conf.py +32 -0
  134. dvt/env_vars.py +64 -0
  135. dvt/event_time/event_time.py +40 -0
  136. dvt/event_time/sample_window.py +60 -0
  137. dvt/events/__init__.py +16 -0
  138. dvt/events/base_types.py +37 -0
  139. dvt/events/core_types_pb2.py +2 -0
  140. dvt/events/logging.py +109 -0
  141. dvt/events/types.py +2534 -0
  142. dvt/exceptions.py +1487 -0
  143. dvt/flags.py +89 -0
  144. dvt/graph/__init__.py +11 -0
  145. dvt/graph/cli.py +248 -0
  146. dvt/graph/graph.py +172 -0
  147. dvt/graph/queue.py +213 -0
  148. dvt/graph/selector.py +375 -0
  149. dvt/graph/selector_methods.py +976 -0
  150. dvt/graph/selector_spec.py +223 -0
  151. dvt/graph/thread_pool.py +18 -0
  152. dvt/hooks.py +21 -0
  153. dvt/include/README.md +49 -0
  154. dvt/include/__init__.py +3 -0
  155. dvt/include/global_project.py +4 -0
  156. dvt/include/starter_project/.gitignore +4 -0
  157. dvt/include/starter_project/README.md +15 -0
  158. dvt/include/starter_project/__init__.py +3 -0
  159. dvt/include/starter_project/analyses/.gitkeep +0 -0
  160. dvt/include/starter_project/dvt_project.yml +36 -0
  161. dvt/include/starter_project/macros/.gitkeep +0 -0
  162. dvt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
  163. dvt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
  164. dvt/include/starter_project/models/example/schema.yml +21 -0
  165. dvt/include/starter_project/seeds/.gitkeep +0 -0
  166. dvt/include/starter_project/snapshots/.gitkeep +0 -0
  167. dvt/include/starter_project/tests/.gitkeep +0 -0
  168. dvt/internal_deprecations.py +27 -0
  169. dvt/jsonschemas/__init__.py +3 -0
  170. dvt/jsonschemas/jsonschemas.py +309 -0
  171. dvt/jsonschemas/project/0.0.110.json +4717 -0
  172. dvt/jsonschemas/project/0.0.85.json +2015 -0
  173. dvt/jsonschemas/resources/0.0.110.json +2636 -0
  174. dvt/jsonschemas/resources/0.0.85.json +2536 -0
  175. dvt/jsonschemas/resources/latest.json +6773 -0
  176. dvt/links.py +4 -0
  177. dvt/materializations/__init__.py +0 -0
  178. dvt/materializations/incremental/__init__.py +0 -0
  179. dvt/materializations/incremental/microbatch.py +235 -0
  180. dvt/mp_context.py +8 -0
  181. dvt/node_types.py +37 -0
  182. dvt/parser/__init__.py +23 -0
  183. dvt/parser/analysis.py +21 -0
  184. dvt/parser/base.py +549 -0
  185. dvt/parser/common.py +267 -0
  186. dvt/parser/docs.py +52 -0
  187. dvt/parser/fixtures.py +51 -0
  188. dvt/parser/functions.py +30 -0
  189. dvt/parser/generic_test.py +100 -0
  190. dvt/parser/generic_test_builders.py +334 -0
  191. dvt/parser/hooks.py +119 -0
  192. dvt/parser/macros.py +137 -0
  193. dvt/parser/manifest.py +2204 -0
  194. dvt/parser/models.py +574 -0
  195. dvt/parser/partial.py +1179 -0
  196. dvt/parser/read_files.py +445 -0
  197. dvt/parser/schema_generic_tests.py +423 -0
  198. dvt/parser/schema_renderer.py +111 -0
  199. dvt/parser/schema_yaml_readers.py +936 -0
  200. dvt/parser/schemas.py +1467 -0
  201. dvt/parser/search.py +149 -0
  202. dvt/parser/seeds.py +28 -0
  203. dvt/parser/singular_test.py +20 -0
  204. dvt/parser/snapshots.py +44 -0
  205. dvt/parser/sources.py +557 -0
  206. dvt/parser/sql.py +63 -0
  207. dvt/parser/unit_tests.py +622 -0
  208. dvt/plugins/__init__.py +20 -0
  209. dvt/plugins/contracts.py +10 -0
  210. dvt/plugins/exceptions.py +2 -0
  211. dvt/plugins/manager.py +164 -0
  212. dvt/plugins/manifest.py +21 -0
  213. dvt/profiler.py +20 -0
  214. dvt/py.typed +1 -0
  215. dvt/runners/__init__.py +2 -0
  216. dvt/runners/exposure_runner.py +7 -0
  217. dvt/runners/no_op_runner.py +46 -0
  218. dvt/runners/saved_query_runner.py +7 -0
  219. dvt/selected_resources.py +8 -0
  220. dvt/task/__init__.py +0 -0
  221. dvt/task/base.py +504 -0
  222. dvt/task/build.py +197 -0
  223. dvt/task/clean.py +57 -0
  224. dvt/task/clone.py +162 -0
  225. dvt/task/compile.py +151 -0
  226. dvt/task/compute.py +366 -0
  227. dvt/task/debug.py +650 -0
  228. dvt/task/deps.py +280 -0
  229. dvt/task/docs/__init__.py +3 -0
  230. dvt/task/docs/generate.py +408 -0
  231. dvt/task/docs/index.html +250 -0
  232. dvt/task/docs/serve.py +28 -0
  233. dvt/task/freshness.py +323 -0
  234. dvt/task/function.py +122 -0
  235. dvt/task/group_lookup.py +46 -0
  236. dvt/task/init.py +374 -0
  237. dvt/task/list.py +237 -0
  238. dvt/task/printer.py +176 -0
  239. dvt/task/profiles.py +256 -0
  240. dvt/task/retry.py +175 -0
  241. dvt/task/run.py +1146 -0
  242. dvt/task/run_operation.py +142 -0
  243. dvt/task/runnable.py +802 -0
  244. dvt/task/seed.py +104 -0
  245. dvt/task/show.py +150 -0
  246. dvt/task/snapshot.py +57 -0
  247. dvt/task/sql.py +111 -0
  248. dvt/task/test.py +464 -0
  249. dvt/tests/fixtures/__init__.py +1 -0
  250. dvt/tests/fixtures/project.py +620 -0
  251. dvt/tests/util.py +651 -0
  252. dvt/tracking.py +529 -0
  253. dvt/utils/__init__.py +3 -0
  254. dvt/utils/artifact_upload.py +151 -0
  255. dvt/utils/utils.py +408 -0
  256. dvt/version.py +249 -0
  257. dvt_core-1.11.0b4.dist-info/METADATA +252 -0
  258. dvt_core-1.11.0b4.dist-info/RECORD +261 -0
  259. dvt_core-1.11.0b4.dist-info/WHEEL +5 -0
  260. dvt_core-1.11.0b4.dist-info/entry_points.txt +2 -0
  261. dvt_core-1.11.0b4.dist-info/top_level.txt +1 -0
dvt/task/compute.py ADDED
@@ -0,0 +1,366 @@
1
+ """
2
+ DVT Compute Tasks
3
+
4
+ This module implements compute layer management commands:
5
+ - show: Show compute configuration
6
+ - engines: List available compute engines
7
+ - test: Test compute engines
8
+ """
9
+
10
+ import os
11
+ from pathlib import Path
12
+ from typing import Dict, List, Optional
13
+
14
+ from dvt.cli.flags import Flags
15
+ from dvt.config.compute_config import ComputeConfig, load_compute_config
16
+ from dvt.events.types import Note
17
+ from dvt.task.base import BaseTask
18
+
19
+ from dbt_common.events.functions import fire_event
20
+ from dbt_common.ui import green, red, yellow
21
+
22
+
23
+ class ComputeShowTask(BaseTask):
24
+ """Task to show compute layer configuration."""
25
+
26
+ def __init__(self, args: Flags) -> None:
27
+ super().__init__(args)
28
+ self.project_dir = args.PROJECT_DIR
29
+
30
+ def run(self) -> bool:
31
+ """Show compute layer configuration."""
32
+ fire_event(Note(msg=""))
33
+ fire_event(Note(msg="=" * 60))
34
+ fire_event(Note(msg="DVT Compute Configuration"))
35
+ fire_event(Note(msg="=" * 60))
36
+ fire_event(Note(msg=f"Project directory: {self.project_dir}"))
37
+ fire_event(Note(msg=""))
38
+
39
+ # Load compute config
40
+ try:
41
+ compute_config = load_compute_config(Path(self.project_dir))
42
+ except Exception as e:
43
+ fire_event(Note(msg=red(f"Error loading compute config: {e}")))
44
+ fire_event(Note(msg=""))
45
+ fire_event(Note(msg="Using default configuration"))
46
+ compute_config = ComputeConfig()
47
+
48
+ # Show configuration
49
+ fire_event(Note(msg="General Settings:"))
50
+ fire_event(Note(msg=f" Default engine: {green(compute_config.default_engine)}"))
51
+ fire_event(Note(msg=f" Data threshold: {compute_config.data_threshold_mb} MB"))
52
+ fire_event(Note(msg=""))
53
+
54
+ # Show DuckDB config
55
+ fire_event(Note(msg="DuckDB Configuration:"))
56
+ fire_event(Note(msg=f" Enabled: {green('Yes') if compute_config.duckdb.enabled else red('No')}"))
57
+ fire_event(Note(msg=f" Memory limit: {compute_config.duckdb.memory_limit}"))
58
+ fire_event(Note(msg=f" Threads: {compute_config.duckdb.threads}"))
59
+ if compute_config.duckdb.extensions:
60
+ fire_event(Note(msg=f" Extensions: {', '.join(compute_config.duckdb.extensions)}"))
61
+ fire_event(Note(msg=""))
62
+
63
+ # Show Spark config
64
+ fire_event(Note(msg="Spark Configuration:"))
65
+ fire_event(Note(msg=f" Local mode enabled: {green('Yes') if compute_config.spark_local.enabled else red('No')}"))
66
+ fire_event(Note(msg=f" Cluster mode enabled: {green('Yes') if compute_config.spark_cluster.enabled else red('No')}"))
67
+ fire_event(Note(msg=""))
68
+
69
+ if compute_config.spark_local.enabled:
70
+ fire_event(Note(msg=" Spark Local Settings:"))
71
+ fire_event(Note(msg=f" Master: {compute_config.spark_local.master}"))
72
+ fire_event(Note(msg=f" Driver memory: {compute_config.spark_local.driver_memory}"))
73
+ fire_event(Note(msg=f" Executor memory: {compute_config.spark_local.executor_memory}"))
74
+ fire_event(Note(msg=""))
75
+
76
+ if compute_config.spark_cluster.enabled:
77
+ fire_event(Note(msg=" Spark Cluster Settings:"))
78
+ fire_event(Note(msg=f" Master: {compute_config.spark_cluster.master or yellow('Not configured')}"))
79
+ fire_event(Note(msg=f" Deploy mode: {compute_config.spark_cluster.deploy_mode}"))
80
+ fire_event(Note(msg=""))
81
+
82
+ # Show auto-select config
83
+ fire_event(Note(msg="Auto-Select Configuration:"))
84
+ fire_event(Note(msg=f" Enabled: {green('Yes') if compute_config.auto_select.enabled else red('No')}"))
85
+ if compute_config.auto_select.enabled and compute_config.auto_select.rules:
86
+ fire_event(Note(msg=f" Rules: {len(compute_config.auto_select.rules)} configured"))
87
+ fire_event(Note(msg=""))
88
+ for i, rule in enumerate(compute_config.auto_select.rules, 1):
89
+ fire_event(Note(msg=f" Rule {i}: {rule.name}"))
90
+ fire_event(Note(msg=f" Priority: {rule.priority}"))
91
+ fire_event(Note(msg=f" Action: {rule.action}"))
92
+ fire_event(Note(msg=f" Description: {rule.description}"))
93
+ fire_event(Note(msg=""))
94
+ else:
95
+ fire_event(Note(msg=" No auto-select rules configured"))
96
+ fire_event(Note(msg=""))
97
+
98
+ fire_event(Note(msg=f"Use 'dvt compute engines' to see available engines"))
99
+ fire_event(Note(msg=f"Use 'dvt compute test' to test compute engines"))
100
+ fire_event(Note(msg=""))
101
+
102
+ return True
103
+
104
+ def interpret_results(self, results) -> bool:
105
+ return results
106
+
107
+
108
+ class ComputeEnginesTask(BaseTask):
109
+ """Task to list available compute engines."""
110
+
111
+ def __init__(self, args: Flags) -> None:
112
+ super().__init__(args)
113
+ self.project_dir = args.PROJECT_DIR
114
+
115
+ def run(self) -> bool:
116
+ """List available compute engines."""
117
+ fire_event(Note(msg=""))
118
+ fire_event(Note(msg="=" * 60))
119
+ fire_event(Note(msg="Available Compute Engines"))
120
+ fire_event(Note(msg="=" * 60))
121
+ fire_event(Note(msg=""))
122
+
123
+ # Load compute config
124
+ try:
125
+ compute_config = load_compute_config(Path(self.project_dir))
126
+ except Exception:
127
+ compute_config = ComputeConfig()
128
+
129
+ # List engines with status
130
+ engines = [
131
+ {
132
+ "name": "pushdown",
133
+ "description": "Execute queries directly on source database",
134
+ "enabled": True,
135
+ "type": "native",
136
+ },
137
+ {
138
+ "name": "duckdb",
139
+ "description": "Lightweight in-process analytical database",
140
+ "enabled": compute_config.duckdb.enabled,
141
+ "type": "compute",
142
+ },
143
+ {
144
+ "name": "spark_local",
145
+ "description": "Apache Spark in local mode",
146
+ "enabled": compute_config.spark_local.enabled,
147
+ "type": "compute",
148
+ },
149
+ {
150
+ "name": "spark_cluster",
151
+ "description": "Apache Spark on cluster",
152
+ "enabled": compute_config.spark_cluster.enabled,
153
+ "type": "compute",
154
+ },
155
+ ]
156
+
157
+ fire_event(Note(msg="Engine List:"))
158
+ fire_event(Note(msg=""))
159
+
160
+ for engine in engines:
161
+ status = green("✓ Enabled") if engine["enabled"] else red("✗ Disabled")
162
+ fire_event(Note(msg=f" {green(engine['name'])} [{engine['type']}]"))
163
+ fire_event(Note(msg=f" Status: {status}"))
164
+ fire_event(Note(msg=f" Description: {engine['description']}"))
165
+ fire_event(Note(msg=""))
166
+
167
+ fire_event(Note(msg=f"Default engine: {green(compute_config.default_engine)}"))
168
+ fire_event(Note(msg=""))
169
+ fire_event(Note(msg="Use 'dvt compute test <engine>' to test a specific engine"))
170
+ fire_event(Note(msg=""))
171
+
172
+ return True
173
+
174
+ def interpret_results(self, results) -> bool:
175
+ return results
176
+
177
+
178
+ class ComputeTestTask(BaseTask):
179
+ """Task to test compute engines."""
180
+
181
+ def __init__(self, args: Flags, engine_name: Optional[str] = None) -> None:
182
+ super().__init__(args)
183
+ self.engine_name = engine_name
184
+ self.project_dir = args.PROJECT_DIR
185
+
186
+ def run(self) -> bool:
187
+ """Test one or all compute engines."""
188
+ fire_event(Note(msg=""))
189
+ fire_event(Note(msg="=" * 60))
190
+ if self.engine_name:
191
+ fire_event(Note(msg=f"Testing Compute Engine: {self.engine_name}"))
192
+ else:
193
+ fire_event(Note(msg="Testing All Compute Engines"))
194
+ fire_event(Note(msg="=" * 60))
195
+ fire_event(Note(msg=""))
196
+
197
+ # Load compute config
198
+ try:
199
+ compute_config = load_compute_config(Path(self.project_dir))
200
+ except Exception as e:
201
+ fire_event(Note(msg=yellow(f"Warning: Could not load compute config: {e}")))
202
+ compute_config = ComputeConfig()
203
+
204
+ # Determine which engines to test
205
+ if self.engine_name:
206
+ engines_to_test = [self.engine_name]
207
+ else:
208
+ engines_to_test = ["pushdown", "duckdb", "spark_local", "spark_cluster"]
209
+
210
+ # Test each engine
211
+ results = {}
212
+ for engine_name in engines_to_test:
213
+ fire_event(Note(msg=f"Testing {engine_name}..."))
214
+ result = self._test_engine(engine_name, compute_config)
215
+ results[engine_name] = result
216
+
217
+ if result["success"]:
218
+ fire_event(Note(msg=green(f" ✓ {result['message']}")))
219
+ else:
220
+ fire_event(Note(msg=red(f" ✗ {result['message']}")))
221
+
222
+ fire_event(Note(msg=""))
223
+
224
+ # Summary
225
+ success_count = sum(1 for r in results.values() if r["success"])
226
+ fail_count = len(results) - success_count
227
+
228
+ fire_event(Note(msg="=" * 60))
229
+ fire_event(Note(msg="Summary"))
230
+ fire_event(Note(msg="=" * 60))
231
+ fire_event(Note(msg=f"Total engines tested: {len(results)}"))
232
+ fire_event(Note(msg=green(f"Passed: {success_count}")))
233
+ if fail_count > 0:
234
+ fire_event(Note(msg=red(f"Failed: {fail_count}")))
235
+
236
+ fire_event(Note(msg=""))
237
+ for engine_name, result in results.items():
238
+ status = green("✓") if result["success"] else red("✗")
239
+ fire_event(Note(msg=f" {status} {engine_name}"))
240
+
241
+ fire_event(Note(msg=""))
242
+
243
+ return fail_count == 0
244
+
245
+ def _test_engine(self, engine_name: str, compute_config: ComputeConfig) -> Dict[str, any]:
246
+ """Test a specific compute engine."""
247
+ try:
248
+ if engine_name == "pushdown":
249
+ return {
250
+ "success": True,
251
+ "message": "Pushdown is always available (uses source database)",
252
+ }
253
+
254
+ elif engine_name == "duckdb":
255
+ if not compute_config.duckdb.enabled:
256
+ return {
257
+ "success": False,
258
+ "message": "DuckDB is disabled in configuration",
259
+ }
260
+
261
+ # Try to import and initialize DuckDB
262
+ try:
263
+ import duckdb
264
+
265
+ # Test connection
266
+ conn = duckdb.connect(":memory:")
267
+ conn.execute("SELECT 1").fetchone()
268
+ conn.close()
269
+
270
+ return {
271
+ "success": True,
272
+ "message": f"DuckDB available (version {duckdb.__version__})",
273
+ }
274
+ except ImportError:
275
+ return {
276
+ "success": False,
277
+ "message": "DuckDB not installed (pip install duckdb)",
278
+ }
279
+ except Exception as e:
280
+ return {
281
+ "success": False,
282
+ "message": f"DuckDB error: {str(e)}",
283
+ }
284
+
285
+ elif engine_name == "spark_local":
286
+ if not compute_config.spark_local.enabled:
287
+ return {
288
+ "success": False,
289
+ "message": "Spark local mode is disabled in configuration",
290
+ }
291
+
292
+ # Try to import PySpark
293
+ try:
294
+ from pyspark import __version__ as spark_version
295
+ from pyspark.sql import SparkSession
296
+
297
+ # Try to create a local Spark session
298
+ spark = (
299
+ SparkSession.builder.master("local[1]")
300
+ .appName("DVT-Test")
301
+ .config("spark.ui.enabled", "false")
302
+ .getOrCreate()
303
+ )
304
+
305
+ # Test basic operation
306
+ df = spark.createDataFrame([(1,)], ["test"])
307
+ result = df.count()
308
+ spark.stop()
309
+
310
+ return {
311
+ "success": True,
312
+ "message": f"Spark local mode available (version {spark_version})",
313
+ }
314
+ except ImportError:
315
+ return {
316
+ "success": False,
317
+ "message": "PySpark not installed (pip install pyspark)",
318
+ }
319
+ except Exception as e:
320
+ return {
321
+ "success": False,
322
+ "message": f"Spark local error: {str(e)}",
323
+ }
324
+
325
+ elif engine_name == "spark_cluster":
326
+ if not compute_config.spark_cluster.enabled:
327
+ return {
328
+ "success": False,
329
+ "message": "Spark cluster mode is disabled in configuration",
330
+ }
331
+
332
+ if not compute_config.spark_cluster.master:
333
+ return {
334
+ "success": False,
335
+ "message": "Spark cluster master not configured",
336
+ }
337
+
338
+ # For cluster mode, just check if PySpark is available
339
+ # Actual cluster connection would require network access
340
+ try:
341
+ from pyspark import __version__ as spark_version
342
+
343
+ return {
344
+ "success": True,
345
+ "message": f"PySpark available for cluster mode (version {spark_version}). Cluster connection not tested.",
346
+ }
347
+ except ImportError:
348
+ return {
349
+ "success": False,
350
+ "message": "PySpark not installed (pip install pyspark)",
351
+ }
352
+
353
+ else:
354
+ return {
355
+ "success": False,
356
+ "message": f"Unknown engine: {engine_name}",
357
+ }
358
+
359
+ except Exception as e:
360
+ return {
361
+ "success": False,
362
+ "message": f"Test error: {str(e)}",
363
+ }
364
+
365
+ def interpret_results(self, results) -> bool:
366
+ return results