dvt-core 0.52.2__cp310-cp310-macosx_10_9_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbt/__init__.py +7 -0
- dbt/_pydantic_shim.py +26 -0
- dbt/artifacts/__init__.py +0 -0
- dbt/artifacts/exceptions/__init__.py +1 -0
- dbt/artifacts/exceptions/schemas.py +31 -0
- dbt/artifacts/resources/__init__.py +116 -0
- dbt/artifacts/resources/base.py +67 -0
- dbt/artifacts/resources/types.py +93 -0
- dbt/artifacts/resources/v1/analysis.py +10 -0
- dbt/artifacts/resources/v1/catalog.py +23 -0
- dbt/artifacts/resources/v1/components.py +274 -0
- dbt/artifacts/resources/v1/config.py +277 -0
- dbt/artifacts/resources/v1/documentation.py +11 -0
- dbt/artifacts/resources/v1/exposure.py +51 -0
- dbt/artifacts/resources/v1/function.py +52 -0
- dbt/artifacts/resources/v1/generic_test.py +31 -0
- dbt/artifacts/resources/v1/group.py +21 -0
- dbt/artifacts/resources/v1/hook.py +11 -0
- dbt/artifacts/resources/v1/macro.py +29 -0
- dbt/artifacts/resources/v1/metric.py +172 -0
- dbt/artifacts/resources/v1/model.py +145 -0
- dbt/artifacts/resources/v1/owner.py +10 -0
- dbt/artifacts/resources/v1/saved_query.py +111 -0
- dbt/artifacts/resources/v1/seed.py +41 -0
- dbt/artifacts/resources/v1/semantic_layer_components.py +72 -0
- dbt/artifacts/resources/v1/semantic_model.py +314 -0
- dbt/artifacts/resources/v1/singular_test.py +14 -0
- dbt/artifacts/resources/v1/snapshot.py +91 -0
- dbt/artifacts/resources/v1/source_definition.py +84 -0
- dbt/artifacts/resources/v1/sql_operation.py +10 -0
- dbt/artifacts/resources/v1/unit_test_definition.py +77 -0
- dbt/artifacts/schemas/__init__.py +0 -0
- dbt/artifacts/schemas/base.py +191 -0
- dbt/artifacts/schemas/batch_results.py +24 -0
- dbt/artifacts/schemas/catalog/__init__.py +11 -0
- dbt/artifacts/schemas/catalog/v1/__init__.py +0 -0
- dbt/artifacts/schemas/catalog/v1/catalog.py +59 -0
- dbt/artifacts/schemas/freshness/__init__.py +1 -0
- dbt/artifacts/schemas/freshness/v3/__init__.py +0 -0
- dbt/artifacts/schemas/freshness/v3/freshness.py +158 -0
- dbt/artifacts/schemas/manifest/__init__.py +2 -0
- dbt/artifacts/schemas/manifest/v12/__init__.py +0 -0
- dbt/artifacts/schemas/manifest/v12/manifest.py +211 -0
- dbt/artifacts/schemas/results.py +147 -0
- dbt/artifacts/schemas/run/__init__.py +2 -0
- dbt/artifacts/schemas/run/v5/__init__.py +0 -0
- dbt/artifacts/schemas/run/v5/run.py +184 -0
- dbt/artifacts/schemas/upgrades/__init__.py +4 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
- dbt/artifacts/utils/validation.py +153 -0
- dbt/cli/__init__.py +1 -0
- dbt/cli/context.py +17 -0
- dbt/cli/exceptions.py +57 -0
- dbt/cli/flags.py +560 -0
- dbt/cli/main.py +2039 -0
- dbt/cli/option_types.py +121 -0
- dbt/cli/options.py +80 -0
- dbt/cli/params.py +804 -0
- dbt/cli/requires.py +490 -0
- dbt/cli/resolvers.py +50 -0
- dbt/cli/types.py +40 -0
- dbt/clients/__init__.py +0 -0
- dbt/clients/checked_load.py +83 -0
- dbt/clients/git.py +164 -0
- dbt/clients/jinja.py +206 -0
- dbt/clients/jinja_static.py +245 -0
- dbt/clients/registry.py +192 -0
- dbt/clients/yaml_helper.py +68 -0
- dbt/compilation.py +876 -0
- dbt/compute/__init__.py +14 -0
- dbt/compute/engines/__init__.py +12 -0
- dbt/compute/engines/spark_engine.py +624 -0
- dbt/compute/federated_executor.py +837 -0
- dbt/compute/filter_pushdown.cpython-310-darwin.so +0 -0
- dbt/compute/filter_pushdown.py +273 -0
- dbt/compute/jar_provisioning.cpython-310-darwin.so +0 -0
- dbt/compute/jar_provisioning.py +255 -0
- dbt/compute/java_compat.cpython-310-darwin.so +0 -0
- dbt/compute/java_compat.py +689 -0
- dbt/compute/jdbc_utils.cpython-310-darwin.so +0 -0
- dbt/compute/jdbc_utils.py +678 -0
- dbt/compute/smart_selector.cpython-310-darwin.so +0 -0
- dbt/compute/smart_selector.py +311 -0
- dbt/compute/strategies/__init__.py +54 -0
- dbt/compute/strategies/base.py +165 -0
- dbt/compute/strategies/dataproc.py +207 -0
- dbt/compute/strategies/emr.py +203 -0
- dbt/compute/strategies/local.py +364 -0
- dbt/compute/strategies/standalone.py +262 -0
- dbt/config/__init__.py +4 -0
- dbt/config/catalogs.py +94 -0
- dbt/config/compute.cpython-310-darwin.so +0 -0
- dbt/config/compute.py +547 -0
- dbt/config/dvt_profile.cpython-310-darwin.so +0 -0
- dbt/config/dvt_profile.py +342 -0
- dbt/config/profile.py +422 -0
- dbt/config/project.py +873 -0
- dbt/config/project_utils.py +28 -0
- dbt/config/renderer.py +231 -0
- dbt/config/runtime.py +553 -0
- dbt/config/selectors.py +208 -0
- dbt/config/utils.py +77 -0
- dbt/constants.py +28 -0
- dbt/context/__init__.py +0 -0
- dbt/context/base.py +745 -0
- dbt/context/configured.py +135 -0
- dbt/context/context_config.py +382 -0
- dbt/context/docs.py +82 -0
- dbt/context/exceptions_jinja.py +178 -0
- dbt/context/macro_resolver.py +195 -0
- dbt/context/macros.py +171 -0
- dbt/context/manifest.py +72 -0
- dbt/context/providers.py +2249 -0
- dbt/context/query_header.py +13 -0
- dbt/context/secret.py +58 -0
- dbt/context/target.py +74 -0
- dbt/contracts/__init__.py +0 -0
- dbt/contracts/files.py +413 -0
- dbt/contracts/graph/__init__.py +0 -0
- dbt/contracts/graph/manifest.py +1904 -0
- dbt/contracts/graph/metrics.py +97 -0
- dbt/contracts/graph/model_config.py +70 -0
- dbt/contracts/graph/node_args.py +42 -0
- dbt/contracts/graph/nodes.py +1806 -0
- dbt/contracts/graph/semantic_manifest.py +232 -0
- dbt/contracts/graph/unparsed.py +811 -0
- dbt/contracts/project.py +417 -0
- dbt/contracts/results.py +53 -0
- dbt/contracts/selection.py +23 -0
- dbt/contracts/sql.py +85 -0
- dbt/contracts/state.py +68 -0
- dbt/contracts/util.py +46 -0
- dbt/deprecations.py +346 -0
- dbt/deps/__init__.py +0 -0
- dbt/deps/base.py +152 -0
- dbt/deps/git.py +195 -0
- dbt/deps/local.py +79 -0
- dbt/deps/registry.py +130 -0
- dbt/deps/resolver.py +149 -0
- dbt/deps/tarball.py +120 -0
- dbt/docs/source/_ext/dbt_click.py +119 -0
- dbt/docs/source/conf.py +32 -0
- dbt/env_vars.py +64 -0
- dbt/event_time/event_time.py +40 -0
- dbt/event_time/sample_window.py +60 -0
- dbt/events/__init__.py +15 -0
- dbt/events/base_types.py +36 -0
- dbt/events/core_types_pb2.py +2 -0
- dbt/events/logging.py +108 -0
- dbt/events/types.py +2516 -0
- dbt/exceptions.py +1486 -0
- dbt/flags.py +89 -0
- dbt/graph/__init__.py +11 -0
- dbt/graph/cli.py +247 -0
- dbt/graph/graph.py +172 -0
- dbt/graph/queue.py +214 -0
- dbt/graph/selector.py +374 -0
- dbt/graph/selector_methods.py +975 -0
- dbt/graph/selector_spec.py +222 -0
- dbt/graph/thread_pool.py +18 -0
- dbt/hooks.py +21 -0
- dbt/include/README.md +49 -0
- dbt/include/__init__.py +3 -0
- dbt/include/starter_project/.gitignore +4 -0
- dbt/include/starter_project/README.md +15 -0
- dbt/include/starter_project/__init__.py +3 -0
- dbt/include/starter_project/analyses/.gitkeep +0 -0
- dbt/include/starter_project/dbt_project.yml +36 -0
- dbt/include/starter_project/macros/.gitkeep +0 -0
- dbt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
- dbt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
- dbt/include/starter_project/models/example/schema.yml +21 -0
- dbt/include/starter_project/seeds/.gitkeep +0 -0
- dbt/include/starter_project/snapshots/.gitkeep +0 -0
- dbt/include/starter_project/tests/.gitkeep +0 -0
- dbt/internal_deprecations.py +26 -0
- dbt/jsonschemas/__init__.py +3 -0
- dbt/jsonschemas/jsonschemas.py +309 -0
- dbt/jsonschemas/project/0.0.110.json +4717 -0
- dbt/jsonschemas/project/0.0.85.json +2015 -0
- dbt/jsonschemas/resources/0.0.110.json +2636 -0
- dbt/jsonschemas/resources/0.0.85.json +2536 -0
- dbt/jsonschemas/resources/latest.json +6773 -0
- dbt/links.py +4 -0
- dbt/materializations/__init__.py +0 -0
- dbt/materializations/incremental/__init__.py +0 -0
- dbt/materializations/incremental/microbatch.py +236 -0
- dbt/mp_context.py +8 -0
- dbt/node_types.py +37 -0
- dbt/parser/__init__.py +23 -0
- dbt/parser/analysis.py +21 -0
- dbt/parser/base.py +548 -0
- dbt/parser/common.py +266 -0
- dbt/parser/docs.py +52 -0
- dbt/parser/fixtures.py +51 -0
- dbt/parser/functions.py +30 -0
- dbt/parser/generic_test.py +100 -0
- dbt/parser/generic_test_builders.py +333 -0
- dbt/parser/hooks.py +118 -0
- dbt/parser/macros.py +137 -0
- dbt/parser/manifest.py +2204 -0
- dbt/parser/models.py +573 -0
- dbt/parser/partial.py +1178 -0
- dbt/parser/read_files.py +445 -0
- dbt/parser/schema_generic_tests.py +422 -0
- dbt/parser/schema_renderer.py +111 -0
- dbt/parser/schema_yaml_readers.py +935 -0
- dbt/parser/schemas.py +1466 -0
- dbt/parser/search.py +149 -0
- dbt/parser/seeds.py +28 -0
- dbt/parser/singular_test.py +20 -0
- dbt/parser/snapshots.py +44 -0
- dbt/parser/sources.py +558 -0
- dbt/parser/sql.py +62 -0
- dbt/parser/unit_tests.py +621 -0
- dbt/plugins/__init__.py +20 -0
- dbt/plugins/contracts.py +9 -0
- dbt/plugins/exceptions.py +2 -0
- dbt/plugins/manager.py +163 -0
- dbt/plugins/manifest.py +21 -0
- dbt/profiler.py +20 -0
- dbt/py.typed +1 -0
- dbt/query_analyzer.cpython-310-darwin.so +0 -0
- dbt/query_analyzer.py +410 -0
- dbt/runners/__init__.py +2 -0
- dbt/runners/exposure_runner.py +7 -0
- dbt/runners/no_op_runner.py +45 -0
- dbt/runners/saved_query_runner.py +7 -0
- dbt/selected_resources.py +8 -0
- dbt/task/__init__.py +0 -0
- dbt/task/base.py +503 -0
- dbt/task/build.py +197 -0
- dbt/task/clean.py +56 -0
- dbt/task/clone.py +161 -0
- dbt/task/compile.py +150 -0
- dbt/task/compute.py +454 -0
- dbt/task/debug.py +505 -0
- dbt/task/deps.py +280 -0
- dbt/task/docs/__init__.py +3 -0
- dbt/task/docs/generate.py +660 -0
- dbt/task/docs/index.html +250 -0
- dbt/task/docs/serve.py +29 -0
- dbt/task/freshness.py +322 -0
- dbt/task/function.py +121 -0
- dbt/task/group_lookup.py +46 -0
- dbt/task/init.py +553 -0
- dbt/task/java.py +316 -0
- dbt/task/list.py +236 -0
- dbt/task/printer.py +175 -0
- dbt/task/retry.py +175 -0
- dbt/task/run.py +1306 -0
- dbt/task/run_operation.py +141 -0
- dbt/task/runnable.py +758 -0
- dbt/task/seed.py +103 -0
- dbt/task/show.py +149 -0
- dbt/task/snapshot.py +56 -0
- dbt/task/spark.py +414 -0
- dbt/task/sql.py +110 -0
- dbt/task/target_sync.py +759 -0
- dbt/task/test.py +464 -0
- dbt/tests/fixtures/__init__.py +1 -0
- dbt/tests/fixtures/project.py +620 -0
- dbt/tests/util.py +651 -0
- dbt/tracking.py +529 -0
- dbt/utils/__init__.py +3 -0
- dbt/utils/artifact_upload.py +151 -0
- dbt/utils/utils.py +408 -0
- dbt/version.py +268 -0
- dvt_cli/__init__.py +72 -0
- dvt_core-0.52.2.dist-info/METADATA +286 -0
- dvt_core-0.52.2.dist-info/RECORD +275 -0
- dvt_core-0.52.2.dist-info/WHEEL +5 -0
- dvt_core-0.52.2.dist-info/entry_points.txt +2 -0
- dvt_core-0.52.2.dist-info/top_level.txt +2 -0
dbt/task/java.py
ADDED
|
@@ -0,0 +1,316 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Java Task Module
|
|
3
|
+
|
|
4
|
+
Handles DVT java management commands:
|
|
5
|
+
- check: Check Java and show compatibility with installed PySpark
|
|
6
|
+
- search: Find ALL Java installations on the system
|
|
7
|
+
- set: Interactive selection to set JAVA_HOME
|
|
8
|
+
- install: Guide for installing compatible Java
|
|
9
|
+
|
|
10
|
+
v0.51.3: New module for comprehensive Java management.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import os
|
|
14
|
+
import platform
|
|
15
|
+
from typing import List, Optional
|
|
16
|
+
|
|
17
|
+
import click
|
|
18
|
+
|
|
19
|
+
from dbt.compute.java_compat import (
|
|
20
|
+
JavaInstallation,
|
|
21
|
+
find_all_java_installations,
|
|
22
|
+
get_current_java,
|
|
23
|
+
get_pyspark_info,
|
|
24
|
+
get_pyspark_versions_for_java,
|
|
25
|
+
check_java_pyspark_compatibility,
|
|
26
|
+
set_java_home_persistent,
|
|
27
|
+
PYSPARK_JAVA_COMPATIBILITY,
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class JavaTask:
|
|
32
|
+
"""Task for managing Java installations."""
|
|
33
|
+
|
|
34
|
+
def check(self) -> bool:
|
|
35
|
+
"""
|
|
36
|
+
Check current Java installation and PySpark compatibility.
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
bool: True if Java is compatible with installed PySpark
|
|
40
|
+
"""
|
|
41
|
+
click.echo()
|
|
42
|
+
click.echo(click.style("Java Status", fg="cyan", bold=True))
|
|
43
|
+
click.echo("-" * 40)
|
|
44
|
+
|
|
45
|
+
# Get current Java
|
|
46
|
+
java = get_current_java()
|
|
47
|
+
if java:
|
|
48
|
+
click.echo(f" JAVA_HOME: {java.path}")
|
|
49
|
+
click.echo(f" Version: Java {java.version}")
|
|
50
|
+
click.echo(f" Vendor: {java.vendor}")
|
|
51
|
+
click.echo(f" Details: {java.version_string}")
|
|
52
|
+
else:
|
|
53
|
+
click.echo(click.style(" ✗ Java not found!", fg="red"))
|
|
54
|
+
click.echo()
|
|
55
|
+
click.echo(" Run 'dvt java search' to find Java installations")
|
|
56
|
+
click.echo(" Run 'dvt java install' for installation guide")
|
|
57
|
+
click.echo()
|
|
58
|
+
return False
|
|
59
|
+
|
|
60
|
+
click.echo()
|
|
61
|
+
click.echo(click.style("PySpark Status", fg="cyan", bold=True))
|
|
62
|
+
click.echo("-" * 40)
|
|
63
|
+
|
|
64
|
+
# Get PySpark info
|
|
65
|
+
pyspark = get_pyspark_info()
|
|
66
|
+
if pyspark:
|
|
67
|
+
click.echo(f" Version: {pyspark.version}")
|
|
68
|
+
click.echo(f" Required Java: {', '.join(str(v) for v in pyspark.java_supported)}")
|
|
69
|
+
click.echo(f" Recommended: Java {pyspark.java_recommended}")
|
|
70
|
+
else:
|
|
71
|
+
click.echo(click.style(" ✗ PySpark not installed!", fg="red"))
|
|
72
|
+
click.echo()
|
|
73
|
+
click.echo(" Install with: pip install pyspark")
|
|
74
|
+
click.echo()
|
|
75
|
+
return False
|
|
76
|
+
|
|
77
|
+
click.echo()
|
|
78
|
+
click.echo(click.style("Compatibility", fg="cyan", bold=True))
|
|
79
|
+
click.echo("-" * 40)
|
|
80
|
+
|
|
81
|
+
# Check compatibility
|
|
82
|
+
is_compat, msg = check_java_pyspark_compatibility(java.version, pyspark.major_minor)
|
|
83
|
+
if is_compat:
|
|
84
|
+
click.echo(click.style(f" ✓ {msg}", fg="green"))
|
|
85
|
+
else:
|
|
86
|
+
click.echo(click.style(f" ✗ {msg}", fg="red"))
|
|
87
|
+
click.echo()
|
|
88
|
+
click.echo(" Run 'dvt java set' to select a compatible Java version")
|
|
89
|
+
|
|
90
|
+
click.echo()
|
|
91
|
+
return is_compat
|
|
92
|
+
|
|
93
|
+
def search(self) -> List[JavaInstallation]:
|
|
94
|
+
"""
|
|
95
|
+
Find all Java installations on the system.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
List of JavaInstallation objects
|
|
99
|
+
"""
|
|
100
|
+
click.echo()
|
|
101
|
+
click.echo(click.style("Searching for Java installations...", fg="cyan"))
|
|
102
|
+
click.echo()
|
|
103
|
+
|
|
104
|
+
installations = find_all_java_installations()
|
|
105
|
+
|
|
106
|
+
if not installations:
|
|
107
|
+
click.echo(click.style("No Java installations found.", fg="yellow"))
|
|
108
|
+
click.echo()
|
|
109
|
+
click.echo("Run 'dvt java install' for installation guide")
|
|
110
|
+
click.echo()
|
|
111
|
+
return []
|
|
112
|
+
|
|
113
|
+
click.echo(f"Found {len(installations)} Java installation(s):")
|
|
114
|
+
click.echo()
|
|
115
|
+
|
|
116
|
+
# Get PySpark info for compatibility display
|
|
117
|
+
pyspark = get_pyspark_info()
|
|
118
|
+
|
|
119
|
+
for i, inst in enumerate(installations, 1):
|
|
120
|
+
# Mark current
|
|
121
|
+
current_marker = click.style(" * CURRENT", fg="green") if inst.is_current else ""
|
|
122
|
+
|
|
123
|
+
# Check compatibility with installed PySpark
|
|
124
|
+
if pyspark:
|
|
125
|
+
is_compat, _ = check_java_pyspark_compatibility(inst.version, pyspark.major_minor)
|
|
126
|
+
compat_marker = click.style(" ✓", fg="green") if is_compat else click.style(" ✗", fg="red")
|
|
127
|
+
else:
|
|
128
|
+
compat_marker = ""
|
|
129
|
+
|
|
130
|
+
click.echo(f" [{i}] Java {inst.version} ({inst.vendor}){current_marker}{compat_marker}")
|
|
131
|
+
click.echo(f" {inst.path}")
|
|
132
|
+
|
|
133
|
+
# Show which PySpark versions this Java supports
|
|
134
|
+
compatible_pyspark = get_pyspark_versions_for_java(inst.version)
|
|
135
|
+
if compatible_pyspark:
|
|
136
|
+
click.echo(f" Compatible with: PySpark {', '.join(compatible_pyspark)}")
|
|
137
|
+
click.echo()
|
|
138
|
+
|
|
139
|
+
return installations
|
|
140
|
+
|
|
141
|
+
def set_java_home(self, installation: Optional[JavaInstallation] = None) -> bool:
|
|
142
|
+
"""
|
|
143
|
+
Interactively select and set JAVA_HOME.
|
|
144
|
+
|
|
145
|
+
If no installation provided, presents interactive menu.
|
|
146
|
+
|
|
147
|
+
Args:
|
|
148
|
+
installation: Pre-selected JavaInstallation (optional)
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
bool: True if successful
|
|
152
|
+
"""
|
|
153
|
+
if installation:
|
|
154
|
+
# Direct set
|
|
155
|
+
success, msg = set_java_home_persistent(installation.path)
|
|
156
|
+
if success:
|
|
157
|
+
click.echo(click.style(f"✓ JAVA_HOME set to: {installation.path}", fg="green"))
|
|
158
|
+
click.echo(f" {msg}")
|
|
159
|
+
else:
|
|
160
|
+
click.echo(click.style(f"✗ {msg}", fg="red"))
|
|
161
|
+
return success
|
|
162
|
+
|
|
163
|
+
# Interactive selection
|
|
164
|
+
installations = find_all_java_installations()
|
|
165
|
+
if not installations:
|
|
166
|
+
click.echo(click.style("No Java installations found.", fg="yellow"))
|
|
167
|
+
click.echo("Run 'dvt java install' for installation guide")
|
|
168
|
+
return False
|
|
169
|
+
|
|
170
|
+
# Get PySpark info for compatibility display
|
|
171
|
+
pyspark = get_pyspark_info()
|
|
172
|
+
|
|
173
|
+
click.echo()
|
|
174
|
+
click.echo(click.style("Select Java installation:", fg="cyan", bold=True))
|
|
175
|
+
click.echo()
|
|
176
|
+
|
|
177
|
+
for i, inst in enumerate(installations, 1):
|
|
178
|
+
# Mark current
|
|
179
|
+
current_marker = click.style(" (current)", fg="blue") if inst.is_current else ""
|
|
180
|
+
|
|
181
|
+
# Check compatibility with installed PySpark
|
|
182
|
+
if pyspark:
|
|
183
|
+
is_compat, _ = check_java_pyspark_compatibility(inst.version, pyspark.major_minor)
|
|
184
|
+
if is_compat:
|
|
185
|
+
compat_marker = click.style(" ✓ compatible", fg="green")
|
|
186
|
+
else:
|
|
187
|
+
compat_marker = click.style(" ✗ incompatible", fg="red")
|
|
188
|
+
else:
|
|
189
|
+
compat_marker = ""
|
|
190
|
+
|
|
191
|
+
click.echo(f" [{i}] Java {inst.version} ({inst.vendor}){current_marker}{compat_marker}")
|
|
192
|
+
click.echo(f" {inst.path}")
|
|
193
|
+
click.echo()
|
|
194
|
+
|
|
195
|
+
# Get user choice
|
|
196
|
+
while True:
|
|
197
|
+
try:
|
|
198
|
+
choice = click.prompt("Your choice", type=int)
|
|
199
|
+
if 1 <= choice <= len(installations):
|
|
200
|
+
break
|
|
201
|
+
click.echo(click.style(f"Please enter a number between 1 and {len(installations)}", fg="yellow"))
|
|
202
|
+
except click.Abort:
|
|
203
|
+
click.echo("\nAborted.")
|
|
204
|
+
return False
|
|
205
|
+
|
|
206
|
+
selected = installations[choice - 1]
|
|
207
|
+
|
|
208
|
+
# Warn if incompatible with PySpark
|
|
209
|
+
if pyspark:
|
|
210
|
+
is_compat, msg = check_java_pyspark_compatibility(selected.version, pyspark.major_minor)
|
|
211
|
+
if not is_compat:
|
|
212
|
+
click.echo()
|
|
213
|
+
click.echo(click.style(f"⚠️ Warning: {msg}", fg="yellow"))
|
|
214
|
+
if not click.confirm("Continue anyway?"):
|
|
215
|
+
return False
|
|
216
|
+
|
|
217
|
+
# Set JAVA_HOME
|
|
218
|
+
success, msg = set_java_home_persistent(selected.path)
|
|
219
|
+
click.echo()
|
|
220
|
+
if success:
|
|
221
|
+
click.echo(click.style(f"✓ JAVA_HOME set to: {selected.path}", fg="green"))
|
|
222
|
+
click.echo(f" {msg}")
|
|
223
|
+
else:
|
|
224
|
+
click.echo(click.style(f"✗ {msg}", fg="red"))
|
|
225
|
+
|
|
226
|
+
return success
|
|
227
|
+
|
|
228
|
+
def install_guide(self) -> None:
|
|
229
|
+
"""
|
|
230
|
+
Show installation guide for compatible Java version.
|
|
231
|
+
|
|
232
|
+
Displays platform-specific installation instructions based on
|
|
233
|
+
the installed PySpark version.
|
|
234
|
+
"""
|
|
235
|
+
click.echo()
|
|
236
|
+
|
|
237
|
+
# Get PySpark info
|
|
238
|
+
pyspark = get_pyspark_info()
|
|
239
|
+
if pyspark:
|
|
240
|
+
click.echo(click.style(f"Java Installation Guide for PySpark {pyspark.version}", fg="cyan", bold=True))
|
|
241
|
+
click.echo("=" * 60)
|
|
242
|
+
click.echo()
|
|
243
|
+
click.echo(f"PySpark {pyspark.major_minor} requires Java: {', '.join(str(v) for v in pyspark.java_supported)}")
|
|
244
|
+
click.echo(f"Recommended: Java {pyspark.java_recommended}")
|
|
245
|
+
recommended = pyspark.java_recommended
|
|
246
|
+
else:
|
|
247
|
+
click.echo(click.style("Java Installation Guide", fg="cyan", bold=True))
|
|
248
|
+
click.echo("=" * 60)
|
|
249
|
+
click.echo()
|
|
250
|
+
click.echo("PySpark is not installed. Assuming Java 17+ for latest PySpark.")
|
|
251
|
+
recommended = 17
|
|
252
|
+
|
|
253
|
+
click.echo()
|
|
254
|
+
os_type = platform.system()
|
|
255
|
+
|
|
256
|
+
if os_type == "Darwin": # macOS
|
|
257
|
+
click.echo(click.style("📦 macOS Installation Options:", fg="yellow", bold=True))
|
|
258
|
+
click.echo()
|
|
259
|
+
click.echo(" Option 1: Homebrew (recommended)")
|
|
260
|
+
click.echo(click.style(f" brew install openjdk@{recommended}", fg="green"))
|
|
261
|
+
click.echo()
|
|
262
|
+
click.echo(" Option 2: SDKMAN (multiple versions)")
|
|
263
|
+
click.echo(click.style(" curl -s \"https://get.sdkman.io\" | bash", fg="green"))
|
|
264
|
+
click.echo(click.style(f" sdk install java {recommended}.0.2-tem", fg="green"))
|
|
265
|
+
click.echo()
|
|
266
|
+
click.echo(" Option 3: Download manually")
|
|
267
|
+
click.echo(click.style(" https://adoptium.net/", fg="blue"))
|
|
268
|
+
click.echo()
|
|
269
|
+
click.echo(" After installation:")
|
|
270
|
+
click.echo(f" export JAVA_HOME=$(/usr/libexec/java_home -v {recommended})")
|
|
271
|
+
|
|
272
|
+
elif os_type == "Linux":
|
|
273
|
+
click.echo(click.style("📦 Linux Installation Options:", fg="yellow", bold=True))
|
|
274
|
+
click.echo()
|
|
275
|
+
click.echo(" Ubuntu/Debian:")
|
|
276
|
+
click.echo(click.style(" sudo apt-get update", fg="green"))
|
|
277
|
+
click.echo(click.style(f" sudo apt-get install openjdk-{recommended}-jdk", fg="green"))
|
|
278
|
+
click.echo()
|
|
279
|
+
click.echo(" RHEL/CentOS/Fedora:")
|
|
280
|
+
click.echo(click.style(f" sudo dnf install java-{recommended}-openjdk-devel", fg="green"))
|
|
281
|
+
click.echo()
|
|
282
|
+
click.echo(" Arch Linux:")
|
|
283
|
+
click.echo(click.style(f" sudo pacman -S jdk{recommended}-openjdk", fg="green"))
|
|
284
|
+
click.echo()
|
|
285
|
+
click.echo(" SDKMAN (any distro):")
|
|
286
|
+
click.echo(click.style(" curl -s \"https://get.sdkman.io\" | bash", fg="green"))
|
|
287
|
+
click.echo(click.style(f" sdk install java {recommended}.0.2-tem", fg="green"))
|
|
288
|
+
click.echo()
|
|
289
|
+
click.echo(" After installation:")
|
|
290
|
+
click.echo(f" export JAVA_HOME=/usr/lib/jvm/java-{recommended}-openjdk")
|
|
291
|
+
|
|
292
|
+
elif os_type == "Windows":
|
|
293
|
+
click.echo(click.style("📦 Windows Installation Options:", fg="yellow", bold=True))
|
|
294
|
+
click.echo()
|
|
295
|
+
click.echo(" Option 1: Winget (Windows 11/10)")
|
|
296
|
+
click.echo(click.style(f" winget install EclipseAdoptium.Temurin.{recommended}.JDK", fg="green"))
|
|
297
|
+
click.echo()
|
|
298
|
+
click.echo(" Option 2: Chocolatey")
|
|
299
|
+
click.echo(click.style(f" choco install temurin{recommended}", fg="green"))
|
|
300
|
+
click.echo()
|
|
301
|
+
click.echo(" Option 3: Scoop")
|
|
302
|
+
click.echo(click.style(" scoop bucket add java", fg="green"))
|
|
303
|
+
click.echo(click.style(f" scoop install temurin{recommended}-jdk", fg="green"))
|
|
304
|
+
click.echo()
|
|
305
|
+
click.echo(" Option 4: Download manually")
|
|
306
|
+
click.echo(click.style(" https://adoptium.net/", fg="blue"))
|
|
307
|
+
click.echo()
|
|
308
|
+
click.echo(" After installation:")
|
|
309
|
+
click.echo(" Set JAVA_HOME in System Environment Variables")
|
|
310
|
+
|
|
311
|
+
click.echo()
|
|
312
|
+
click.echo(click.style("After installing Java:", fg="cyan"))
|
|
313
|
+
click.echo(" 1. Restart your terminal")
|
|
314
|
+
click.echo(" 2. Run 'dvt java search' to verify")
|
|
315
|
+
click.echo(" 3. Run 'dvt java set' to select the installation")
|
|
316
|
+
click.echo()
|
dbt/task/list.py
ADDED
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Iterator, List
|
|
3
|
+
|
|
4
|
+
from dbt.cli.flags import Flags
|
|
5
|
+
from dbt.config.runtime import RuntimeConfig
|
|
6
|
+
from dbt.contracts.graph.manifest import Manifest
|
|
7
|
+
from dbt.contracts.graph.nodes import (
|
|
8
|
+
Exposure,
|
|
9
|
+
Metric,
|
|
10
|
+
SavedQuery,
|
|
11
|
+
SemanticModel,
|
|
12
|
+
SourceDefinition,
|
|
13
|
+
UnitTestDefinition,
|
|
14
|
+
)
|
|
15
|
+
from dbt.events.types import NoNodesSelected
|
|
16
|
+
from dbt.graph import ResourceTypeSelector
|
|
17
|
+
from dbt.node_types import NodeType
|
|
18
|
+
from dbt.task.base import resource_types_from_args
|
|
19
|
+
from dbt.task.runnable import GraphRunnableTask
|
|
20
|
+
from dbt.utils import JSONEncoder
|
|
21
|
+
from dbt_common.events.contextvars import task_contextvars
|
|
22
|
+
from dbt_common.events.functions import fire_event, warn_or_error
|
|
23
|
+
from dbt_common.events.types import PrintEvent
|
|
24
|
+
from dbt_common.exceptions import DbtInternalError, DbtRuntimeError
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class ListTask(GraphRunnableTask):
|
|
28
|
+
DEFAULT_RESOURCE_VALUES = frozenset(
|
|
29
|
+
(
|
|
30
|
+
NodeType.Model,
|
|
31
|
+
NodeType.Snapshot,
|
|
32
|
+
NodeType.Seed,
|
|
33
|
+
NodeType.Test,
|
|
34
|
+
NodeType.Source,
|
|
35
|
+
NodeType.Exposure,
|
|
36
|
+
NodeType.Metric,
|
|
37
|
+
NodeType.SavedQuery,
|
|
38
|
+
NodeType.SemanticModel,
|
|
39
|
+
NodeType.Unit,
|
|
40
|
+
NodeType.Function,
|
|
41
|
+
)
|
|
42
|
+
)
|
|
43
|
+
ALL_RESOURCE_VALUES = DEFAULT_RESOURCE_VALUES | frozenset((NodeType.Analysis,))
|
|
44
|
+
ALLOWED_KEYS = frozenset(
|
|
45
|
+
(
|
|
46
|
+
"alias",
|
|
47
|
+
"name",
|
|
48
|
+
"package_name",
|
|
49
|
+
"depends_on",
|
|
50
|
+
"tags",
|
|
51
|
+
"config",
|
|
52
|
+
"resource_type",
|
|
53
|
+
"source_name",
|
|
54
|
+
"original_file_path",
|
|
55
|
+
"unique_id",
|
|
56
|
+
)
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
def __init__(self, args: Flags, config: RuntimeConfig, manifest: Manifest) -> None:
|
|
60
|
+
super().__init__(args, config, manifest)
|
|
61
|
+
if self.args.models:
|
|
62
|
+
if self.args.select:
|
|
63
|
+
raise DbtRuntimeError('"models" and "select" are mutually exclusive arguments')
|
|
64
|
+
if self.args.resource_types:
|
|
65
|
+
raise DbtRuntimeError(
|
|
66
|
+
'"models" and "resource_type" are mutually exclusive ' "arguments"
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
def _iterate_selected_nodes(self):
|
|
70
|
+
selector = self.get_node_selector()
|
|
71
|
+
spec = self.get_selection_spec()
|
|
72
|
+
unique_ids = sorted(selector.get_selected(spec))
|
|
73
|
+
if not unique_ids:
|
|
74
|
+
warn_or_error(NoNodesSelected())
|
|
75
|
+
return
|
|
76
|
+
if self.manifest is None:
|
|
77
|
+
raise DbtInternalError("manifest is None in _iterate_selected_nodes")
|
|
78
|
+
for unique_id in unique_ids:
|
|
79
|
+
if unique_id in self.manifest.nodes:
|
|
80
|
+
yield self.manifest.nodes[unique_id]
|
|
81
|
+
elif unique_id in self.manifest.sources:
|
|
82
|
+
yield self.manifest.sources[unique_id]
|
|
83
|
+
elif unique_id in self.manifest.exposures:
|
|
84
|
+
yield self.manifest.exposures[unique_id]
|
|
85
|
+
elif unique_id in self.manifest.metrics:
|
|
86
|
+
yield self.manifest.metrics[unique_id]
|
|
87
|
+
elif unique_id in self.manifest.semantic_models:
|
|
88
|
+
yield self.manifest.semantic_models[unique_id]
|
|
89
|
+
elif unique_id in self.manifest.unit_tests:
|
|
90
|
+
yield self.manifest.unit_tests[unique_id]
|
|
91
|
+
elif unique_id in self.manifest.saved_queries:
|
|
92
|
+
yield self.manifest.saved_queries[unique_id]
|
|
93
|
+
elif unique_id in self.manifest.functions:
|
|
94
|
+
yield self.manifest.functions[unique_id]
|
|
95
|
+
else:
|
|
96
|
+
raise DbtRuntimeError(
|
|
97
|
+
f'Got an unexpected result from node selection: "{unique_id}"'
|
|
98
|
+
f"Listing this node type is not yet supported!"
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
def generate_selectors(self):
|
|
102
|
+
for node in self._iterate_selected_nodes():
|
|
103
|
+
if node.resource_type == NodeType.Source:
|
|
104
|
+
assert isinstance(node, SourceDefinition)
|
|
105
|
+
# sources are searched for by pkg.source_name.table_name
|
|
106
|
+
source_selector = ".".join([node.package_name, node.source_name, node.name])
|
|
107
|
+
yield f"source:{source_selector}"
|
|
108
|
+
elif node.resource_type == NodeType.Exposure:
|
|
109
|
+
assert isinstance(node, Exposure)
|
|
110
|
+
# exposures are searched for by pkg.exposure_name
|
|
111
|
+
exposure_selector = ".".join([node.package_name, node.name])
|
|
112
|
+
yield f"exposure:{exposure_selector}"
|
|
113
|
+
elif node.resource_type == NodeType.Metric:
|
|
114
|
+
assert isinstance(node, Metric)
|
|
115
|
+
# metrics are searched for by pkg.metric_name
|
|
116
|
+
metric_selector = ".".join([node.package_name, node.name])
|
|
117
|
+
yield f"metric:{metric_selector}"
|
|
118
|
+
elif node.resource_type == NodeType.SavedQuery:
|
|
119
|
+
assert isinstance(node, SavedQuery)
|
|
120
|
+
saved_query_selector = ".".join([node.package_name, node.name])
|
|
121
|
+
yield f"saved_query:{saved_query_selector}"
|
|
122
|
+
elif node.resource_type == NodeType.SemanticModel:
|
|
123
|
+
assert isinstance(node, SemanticModel)
|
|
124
|
+
semantic_model_selector = ".".join([node.package_name, node.name])
|
|
125
|
+
yield f"semantic_model:{semantic_model_selector}"
|
|
126
|
+
elif node.resource_type == NodeType.Unit:
|
|
127
|
+
assert isinstance(node, UnitTestDefinition)
|
|
128
|
+
unit_test_selector = ".".join([node.package_name, node.versioned_name])
|
|
129
|
+
yield f"unit_test:{unit_test_selector}"
|
|
130
|
+
else:
|
|
131
|
+
# everything else is from `fqn`
|
|
132
|
+
yield ".".join(node.fqn)
|
|
133
|
+
|
|
134
|
+
def generate_names(self):
|
|
135
|
+
for node in self._iterate_selected_nodes():
|
|
136
|
+
yield node.search_name
|
|
137
|
+
|
|
138
|
+
def _get_nested_value(self, data, key_path):
|
|
139
|
+
"""Get nested value using dot notation (e.g., 'config.materialized')"""
|
|
140
|
+
keys = key_path.split(".")
|
|
141
|
+
current = data
|
|
142
|
+
for key in keys:
|
|
143
|
+
if isinstance(current, dict) and key in current:
|
|
144
|
+
current = current[key]
|
|
145
|
+
else:
|
|
146
|
+
return None
|
|
147
|
+
return current
|
|
148
|
+
|
|
149
|
+
def generate_json(self):
|
|
150
|
+
for node in self._iterate_selected_nodes():
|
|
151
|
+
node_dict = node.to_dict(omit_none=False)
|
|
152
|
+
|
|
153
|
+
if self.args.output_keys:
|
|
154
|
+
# Handle both nested and regular keys
|
|
155
|
+
result = {}
|
|
156
|
+
for key in self.args.output_keys:
|
|
157
|
+
if "." in key:
|
|
158
|
+
# Handle nested key (e.g., 'config.materialized')
|
|
159
|
+
value = self._get_nested_value(node_dict, key)
|
|
160
|
+
if value is not None:
|
|
161
|
+
result[key] = value
|
|
162
|
+
else:
|
|
163
|
+
# Handle regular key
|
|
164
|
+
if key in node_dict:
|
|
165
|
+
result[key] = node_dict[key]
|
|
166
|
+
else:
|
|
167
|
+
# Use default allowed keys
|
|
168
|
+
result = {k: v for k, v in node_dict.items() if k in self.ALLOWED_KEYS}
|
|
169
|
+
|
|
170
|
+
yield json.dumps(result, cls=JSONEncoder)
|
|
171
|
+
|
|
172
|
+
def generate_paths(self) -> Iterator[str]:
|
|
173
|
+
for node in self._iterate_selected_nodes():
|
|
174
|
+
yield node.original_file_path
|
|
175
|
+
|
|
176
|
+
def run(self):
|
|
177
|
+
# We set up a context manager here with "task_contextvars" because we
|
|
178
|
+
# we need the project_root in compile_manifest.
|
|
179
|
+
with task_contextvars(project_root=self.config.project_root):
|
|
180
|
+
self.compile_manifest()
|
|
181
|
+
output = self.args.output
|
|
182
|
+
if output == "selector":
|
|
183
|
+
generator = self.generate_selectors
|
|
184
|
+
elif output == "name":
|
|
185
|
+
generator = self.generate_names
|
|
186
|
+
elif output == "json":
|
|
187
|
+
generator = self.generate_json
|
|
188
|
+
elif output == "path":
|
|
189
|
+
generator = self.generate_paths
|
|
190
|
+
else:
|
|
191
|
+
raise DbtInternalError("Invalid output {}".format(output))
|
|
192
|
+
|
|
193
|
+
return self.output_results(generator())
|
|
194
|
+
|
|
195
|
+
def output_results(self, results):
|
|
196
|
+
"""Log, or output a plain, newline-delimited, and ready-to-pipe list of nodes found."""
|
|
197
|
+
for result in results:
|
|
198
|
+
self.node_results.append(result)
|
|
199
|
+
# No formatting, still get to stdout when --quiet is used
|
|
200
|
+
fire_event(PrintEvent(msg=result))
|
|
201
|
+
return self.node_results
|
|
202
|
+
|
|
203
|
+
@property
|
|
204
|
+
def resource_types(self) -> List[NodeType]:
|
|
205
|
+
if self.args.models:
|
|
206
|
+
return [NodeType.Model]
|
|
207
|
+
|
|
208
|
+
resource_types = resource_types_from_args(
|
|
209
|
+
self.args, set(self.ALL_RESOURCE_VALUES), set(self.DEFAULT_RESOURCE_VALUES)
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
return list(resource_types)
|
|
213
|
+
|
|
214
|
+
@property
|
|
215
|
+
def selection_arg(self):
|
|
216
|
+
# for backwards compatibility, list accepts both --models and --select,
|
|
217
|
+
# with slightly different behavior: --models implies --resource-type model
|
|
218
|
+
if self.args.models:
|
|
219
|
+
return self.args.models
|
|
220
|
+
else:
|
|
221
|
+
return self.args.select
|
|
222
|
+
|
|
223
|
+
def get_node_selector(self) -> ResourceTypeSelector:
|
|
224
|
+
if self.manifest is None or self.graph is None:
|
|
225
|
+
raise DbtInternalError("manifest and graph must be set to get perform node selection")
|
|
226
|
+
return ResourceTypeSelector(
|
|
227
|
+
graph=self.graph,
|
|
228
|
+
manifest=self.manifest,
|
|
229
|
+
previous_state=self.previous_state,
|
|
230
|
+
resource_types=self.resource_types,
|
|
231
|
+
include_empty_nodes=True,
|
|
232
|
+
)
|
|
233
|
+
|
|
234
|
+
def interpret_results(self, results):
|
|
235
|
+
# list command should always return 0 as exit code
|
|
236
|
+
return True
|