@altimateai/altimate-code 0.5.1 → 0.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +35 -0
- package/bin/altimate +6 -0
- package/bin/altimate-code +6 -0
- package/dbt-tools/bin/altimate-dbt +2 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/altimate/__init__.py +0 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/altimate/fetch_schema.py +35 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/altimate/utils.py +353 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/altimate/validate_sql.py +114 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/__init__.py +178 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/__main__.py +96 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/_typing.py +17 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/__init__.py +3 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/__init__.py +18 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/_typing.py +18 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/column.py +332 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/dataframe.py +866 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/functions.py +1267 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/group.py +59 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/normalize.py +78 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/operations.py +53 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/readwriter.py +108 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/session.py +190 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/transforms.py +9 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/types.py +212 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/util.py +32 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dataframe/sql/window.py +134 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/__init__.py +118 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/athena.py +166 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/bigquery.py +1331 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/clickhouse.py +1393 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/databricks.py +131 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/dialect.py +1915 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/doris.py +561 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/drill.py +157 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/druid.py +20 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/duckdb.py +1159 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/dune.py +16 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/hive.py +787 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/materialize.py +94 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/mysql.py +1324 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/oracle.py +378 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/postgres.py +778 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/presto.py +788 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/prql.py +203 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/redshift.py +448 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/risingwave.py +78 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/snowflake.py +1464 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/spark.py +202 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/spark2.py +349 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/sqlite.py +320 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/starrocks.py +343 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/tableau.py +61 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/teradata.py +356 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/trino.py +115 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/dialects/tsql.py +1403 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/diff.py +456 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/errors.py +93 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/__init__.py +95 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/context.py +101 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/env.py +246 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/python.py +460 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/executor/table.py +155 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/expressions.py +8870 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/generator.py +4993 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/helper.py +582 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/jsonpath.py +227 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/lineage.py +423 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/__init__.py +11 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/annotate_types.py +589 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/canonicalize.py +222 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/eliminate_ctes.py +43 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/eliminate_joins.py +181 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/eliminate_subqueries.py +189 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/isolate_table_selects.py +50 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/merge_subqueries.py +415 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/normalize.py +200 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/normalize_identifiers.py +64 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/optimize_joins.py +91 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/optimizer.py +94 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/pushdown_predicates.py +222 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/pushdown_projections.py +172 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/qualify.py +104 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/qualify_columns.py +1024 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/qualify_tables.py +155 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/scope.py +904 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/simplify.py +1587 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/optimizer/unnest_subqueries.py +302 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/parser.py +8501 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/planner.py +463 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/schema.py +588 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/serde.py +68 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/time.py +687 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/tokens.py +1520 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/transforms.py +1020 -0
- package/dbt-tools/dist/altimate_python_packages/altimate_packages/sqlglot/trie.py +81 -0
- package/dbt-tools/dist/altimate_python_packages/dbt_core_integration.py +825 -0
- package/dbt-tools/dist/altimate_python_packages/dbt_utils.py +157 -0
- package/dbt-tools/dist/index.js +23859 -0
- package/package.json +13 -13
- package/postinstall.mjs +42 -0
- package/skills/altimate-setup/SKILL.md +31 -0
|
@@ -0,0 +1,825 @@
|
|
|
1
|
+
try:
|
|
2
|
+
from dbt.version import __version__ as dbt_version
|
|
3
|
+
except Exception:
|
|
4
|
+
raise Exception("dbt not found. Please install dbt to use this extension.")
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
import threading
|
|
8
|
+
import uuid
|
|
9
|
+
from collections import UserDict
|
|
10
|
+
from copy import copy
|
|
11
|
+
from functools import lru_cache, partial
|
|
12
|
+
from hashlib import md5
|
|
13
|
+
from typing import (
|
|
14
|
+
TYPE_CHECKING,
|
|
15
|
+
Any,
|
|
16
|
+
Callable,
|
|
17
|
+
Dict,
|
|
18
|
+
List,
|
|
19
|
+
Optional,
|
|
20
|
+
Tuple,
|
|
21
|
+
TypeVar,
|
|
22
|
+
Union,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
import agate
|
|
26
|
+
import json
|
|
27
|
+
from dbt.adapters.factory import get_adapter, register_adapter
|
|
28
|
+
from dbt.config.runtime import RuntimeConfig
|
|
29
|
+
from dbt.flags import set_from_args
|
|
30
|
+
from dbt.parser.manifest import ManifestLoader, process_node
|
|
31
|
+
from dbt.parser.sql import SqlBlockParser, SqlMacroParser
|
|
32
|
+
from dbt.task.sql import SqlCompileRunner, SqlExecuteRunner
|
|
33
|
+
from dbt.tracking import disable_tracking
|
|
34
|
+
|
|
35
|
+
DBT_MAJOR_VER, DBT_MINOR_VER, DBT_PATCH_VER = (
|
|
36
|
+
int(v) if v.isnumeric() else v for v in dbt_version.split(".")
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
if DBT_MAJOR_VER >=1 and DBT_MINOR_VER >= 8:
|
|
40
|
+
from dbt.contracts.graph.manifest import Manifest # type: ignore
|
|
41
|
+
from dbt.contracts.graph.nodes import ManifestNode, CompiledNode # type: ignore
|
|
42
|
+
from dbt.artifacts.resources.v1.components import ColumnInfo # type: ignore
|
|
43
|
+
from dbt.artifacts.resources.types import NodeType # type: ignore
|
|
44
|
+
from dbt_common.events.functions import fire_event # type: ignore
|
|
45
|
+
from dbt.artifacts.schemas.manifest import WritableManifest # type: ignore
|
|
46
|
+
elif DBT_MAJOR_VER >= 1 and DBT_MINOR_VER > 3:
|
|
47
|
+
from dbt.contracts.graph.nodes import ColumnInfo, ManifestNode, CompiledNode # type: ignore
|
|
48
|
+
from dbt.node_types import NodeType # type: ignore
|
|
49
|
+
from dbt.contracts.graph.manifest import WritableManifest # type: ignore
|
|
50
|
+
from dbt.events.functions import fire_event # type: ignore
|
|
51
|
+
else:
|
|
52
|
+
from dbt.contracts.graph.compiled import ManifestNode, CompiledNode # type: ignore
|
|
53
|
+
from dbt.contracts.graph.parsed import ColumnInfo # type: ignore
|
|
54
|
+
from dbt.node_types import NodeType # type: ignore
|
|
55
|
+
from dbt.events.functions import fire_event # type: ignore
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
if TYPE_CHECKING:
|
|
59
|
+
# These imports are only used for type checking
|
|
60
|
+
from dbt.adapters.base import BaseRelation # type: ignore
|
|
61
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 8:
|
|
62
|
+
from dbt.adapters.contracts.connection import AdapterResponse
|
|
63
|
+
else:
|
|
64
|
+
from dbt.contracts.connection import AdapterResponse
|
|
65
|
+
|
|
66
|
+
Primitive = Union[bool, str, float, None]
|
|
67
|
+
PrimitiveDict = Dict[str, Primitive]
|
|
68
|
+
|
|
69
|
+
CACHE = {}
|
|
70
|
+
CACHE_VERSION = 1
|
|
71
|
+
SQL_CACHE_SIZE = 1024
|
|
72
|
+
|
|
73
|
+
MANIFEST_ARTIFACT = "manifest.json"
|
|
74
|
+
|
|
75
|
+
RAW_CODE = "raw_code" if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 3 else "raw_sql"
|
|
76
|
+
COMPILED_CODE = (
|
|
77
|
+
"compiled_code" if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 3 else "compiled_sql"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
JINJA_CONTROL_SEQS = ["{{", "}}", "{%", "%}", "{#", "#}"]
|
|
81
|
+
|
|
82
|
+
T = TypeVar("T")
|
|
83
|
+
REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES = "REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES"
|
|
84
|
+
DBT_DEBUG = "DBT_DEBUG"
|
|
85
|
+
DBT_DEFER = "DBT_DEFER"
|
|
86
|
+
DBT_STATE = "DBT_STATE"
|
|
87
|
+
DBT_FAVOR_STATE = "DBT_FAVOR_STATE"
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def has_jinja(query: str) -> bool:
|
|
91
|
+
"""Utility to check for jinja prior to certain compilation procedures"""
|
|
92
|
+
return any(seq in query for seq in JINJA_CONTROL_SEQS)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def memoize_get_rendered(function):
|
|
96
|
+
"""Custom memoization function for dbt-core jinja interface"""
|
|
97
|
+
|
|
98
|
+
def wrapper(
|
|
99
|
+
string: str,
|
|
100
|
+
ctx: Dict[str, Any],
|
|
101
|
+
node: "ManifestNode" = None,
|
|
102
|
+
capture_macros: bool = False,
|
|
103
|
+
native: bool = False,
|
|
104
|
+
):
|
|
105
|
+
v = md5(string.strip().encode("utf-8")).hexdigest()
|
|
106
|
+
v += "__" + str(CACHE_VERSION)
|
|
107
|
+
if capture_macros == True and node is not None:
|
|
108
|
+
if node.is_ephemeral:
|
|
109
|
+
return function(string, ctx, node, capture_macros, native)
|
|
110
|
+
v += "__" + node.unique_id
|
|
111
|
+
rv = CACHE.get(v)
|
|
112
|
+
if rv is not None:
|
|
113
|
+
return rv
|
|
114
|
+
else:
|
|
115
|
+
rv = function(string, ctx, node, capture_macros, native)
|
|
116
|
+
CACHE[v] = rv
|
|
117
|
+
return rv
|
|
118
|
+
|
|
119
|
+
return wrapper
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def default_profiles_dir(project_dir):
|
|
123
|
+
"""Determines the directory where dbt will look for profiles.yml.
|
|
124
|
+
|
|
125
|
+
When DBT_PROFILES_DIR is set:
|
|
126
|
+
- If it's an absolute path, use it as is
|
|
127
|
+
- If it's a relative path, resolve it relative to the project directory
|
|
128
|
+
This matches dbt core's behavior and other path handling in the codebase
|
|
129
|
+
(see https://github.com/AltimateAI/vscode-dbt-power-user/issues/1518)
|
|
130
|
+
|
|
131
|
+
When DBT_PROFILES_DIR is not set:
|
|
132
|
+
- Look for profiles.yml in the project directory
|
|
133
|
+
- If not found, default to ~/.dbt/
|
|
134
|
+
"""
|
|
135
|
+
if "DBT_PROFILES_DIR" in os.environ:
|
|
136
|
+
profiles_dir = os.path.expanduser(os.environ["DBT_PROFILES_DIR"])
|
|
137
|
+
if os.path.isabs(profiles_dir):
|
|
138
|
+
return os.path.normpath(profiles_dir)
|
|
139
|
+
return os.path.normpath(os.path.join(project_dir, profiles_dir))
|
|
140
|
+
project_profiles_file = os.path.normpath(os.path.join(project_dir, "profiles.yml"))
|
|
141
|
+
return (
|
|
142
|
+
project_dir
|
|
143
|
+
if os.path.exists(project_profiles_file)
|
|
144
|
+
else os.path.join(os.path.expanduser("~"), ".dbt")
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def target_path(project_dir):
|
|
149
|
+
if "DBT_TARGET_PATH" in os.environ:
|
|
150
|
+
target_path = os.path.expanduser(os.environ["DBT_TARGET_PATH"])
|
|
151
|
+
if os.path.isabs(target_path):
|
|
152
|
+
return os.path.normpath(target_path)
|
|
153
|
+
return os.path.normpath(os.path.join(project_dir, target_path))
|
|
154
|
+
return None
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def find_package_paths(project_directories):
|
|
158
|
+
def get_package_path(project_dir):
|
|
159
|
+
try:
|
|
160
|
+
project = DbtProject(
|
|
161
|
+
project_dir=project_dir,
|
|
162
|
+
profiles_dir=default_profiles_dir(project_dir),
|
|
163
|
+
target_path=target_path(project_dir),
|
|
164
|
+
)
|
|
165
|
+
project.init_config()
|
|
166
|
+
packages_path = project.config.packages_install_path
|
|
167
|
+
if os.path.isabs(packages_path):
|
|
168
|
+
return os.path.normpath(packages_path)
|
|
169
|
+
return os.path.normpath(os.path.join(project_dir, packages_path))
|
|
170
|
+
except Exception as e:
|
|
171
|
+
# We don't care about exceptions here, that is dealt with later when the project is loaded
|
|
172
|
+
pass
|
|
173
|
+
|
|
174
|
+
return list(map(get_package_path, project_directories))
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
# Performance hacks
|
|
178
|
+
# jinja.get_rendered = memoize_get_rendered(jinja.get_rendered)
|
|
179
|
+
disable_tracking()
|
|
180
|
+
fire_event = lambda e: None
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
class ConfigInterface:
|
|
184
|
+
"""This mimic dbt-core args based interface for dbt-core
|
|
185
|
+
class instantiation"""
|
|
186
|
+
|
|
187
|
+
def __init__(
|
|
188
|
+
self,
|
|
189
|
+
threads: Optional[int] = 1,
|
|
190
|
+
target: Optional[str] = None,
|
|
191
|
+
profiles_dir: Optional[str] = None,
|
|
192
|
+
project_dir: Optional[str] = None,
|
|
193
|
+
profile: Optional[str] = None,
|
|
194
|
+
target_path: Optional[str] = None,
|
|
195
|
+
defer: Optional[bool] = False,
|
|
196
|
+
state: Optional[str] = None,
|
|
197
|
+
favor_state: Optional[bool] = False,
|
|
198
|
+
# dict in 1.5.x onwards, json string before.
|
|
199
|
+
vars: Optional[Union[Dict[str, Any], str]] = {} if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 5 else "{}",
|
|
200
|
+
):
|
|
201
|
+
self.threads = threads
|
|
202
|
+
self.target = target if target else os.environ.get("DBT_TARGET")
|
|
203
|
+
self.profiles_dir = profiles_dir
|
|
204
|
+
self.project_dir = project_dir
|
|
205
|
+
self.dependencies = []
|
|
206
|
+
self.single_threaded = threads == 1
|
|
207
|
+
self.quiet = True
|
|
208
|
+
self.profile = profile if profile else os.environ.get("DBT_PROFILE")
|
|
209
|
+
self.target_path = target_path
|
|
210
|
+
self.defer = defer
|
|
211
|
+
self.state = state
|
|
212
|
+
self.favor_state = favor_state
|
|
213
|
+
# dict in 1.5.x onwards, json string before.
|
|
214
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 5:
|
|
215
|
+
self.vars = vars if vars else json.loads(os.environ.get("DBT_VARS", "{}"))
|
|
216
|
+
else:
|
|
217
|
+
self.vars = vars if vars else os.environ.get("DBT_VARS", "{}")
|
|
218
|
+
|
|
219
|
+
def __str__(self):
|
|
220
|
+
return f"ConfigInterface(threads={self.threads}, target={self.target}, profiles_dir={self.profiles_dir}, project_dir={self.project_dir}, profile={self.profile}, target_path={self.target_path})"
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
class ManifestProxy(UserDict):
|
|
224
|
+
"""Proxy for manifest dictionary (`flat_graph`), if we need mutation then we should
|
|
225
|
+
create a copy of the dict or interface with the dbt-core manifest object instead"""
|
|
226
|
+
|
|
227
|
+
def _readonly(self, *args, **kwargs):
|
|
228
|
+
raise RuntimeError("Cannot modify ManifestProxy")
|
|
229
|
+
|
|
230
|
+
__setitem__ = _readonly
|
|
231
|
+
__delitem__ = _readonly
|
|
232
|
+
pop = _readonly
|
|
233
|
+
popitem = _readonly
|
|
234
|
+
clear = _readonly
|
|
235
|
+
update = _readonly
|
|
236
|
+
setdefault = _readonly
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
class DbtAdapterExecutionResult:
|
|
240
|
+
"""Interface for execution results, this keeps us 1 layer removed from dbt interfaces which may change"""
|
|
241
|
+
|
|
242
|
+
def __init__(
|
|
243
|
+
self,
|
|
244
|
+
adapter_response: "AdapterResponse",
|
|
245
|
+
table: agate.Table,
|
|
246
|
+
raw_sql: str,
|
|
247
|
+
compiled_sql: str,
|
|
248
|
+
) -> None:
|
|
249
|
+
self.adapter_response = adapter_response
|
|
250
|
+
self.table = table
|
|
251
|
+
self.raw_sql = raw_sql
|
|
252
|
+
self.compiled_sql = compiled_sql
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
class DbtAdapterCompilationResult:
|
|
256
|
+
"""Interface for compilation results, this keeps us 1 layer removed from dbt interfaces which may change"""
|
|
257
|
+
|
|
258
|
+
def __init__(self, raw_sql: str, compiled_sql: str, node: "ManifestNode") -> None:
|
|
259
|
+
self.raw_sql = raw_sql
|
|
260
|
+
self.compiled_sql = compiled_sql
|
|
261
|
+
self.node = node
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
class DbtProject:
|
|
265
|
+
"""Container for a dbt project. The dbt attribute is the primary interface for
|
|
266
|
+
dbt-core. The adapter attribute is the primary interface for the dbt adapter"""
|
|
267
|
+
|
|
268
|
+
def __init__(
|
|
269
|
+
self,
|
|
270
|
+
target_name: Optional[str] = None,
|
|
271
|
+
profiles_dir: Optional[str] = None,
|
|
272
|
+
project_dir: Optional[str] = None,
|
|
273
|
+
threads: Optional[int] = 1,
|
|
274
|
+
profile: Optional[str] = None,
|
|
275
|
+
target_path: Optional[str] = None,
|
|
276
|
+
defer_to_prod: bool = False,
|
|
277
|
+
manifest_path: Optional[str] = None,
|
|
278
|
+
favor_state: bool = False,
|
|
279
|
+
vars: Optional[Dict[str, Any]] = {},
|
|
280
|
+
):
|
|
281
|
+
self.args = ConfigInterface(
|
|
282
|
+
threads=threads,
|
|
283
|
+
target=target_name,
|
|
284
|
+
profiles_dir=profiles_dir,
|
|
285
|
+
project_dir=project_dir,
|
|
286
|
+
profile=profile,
|
|
287
|
+
target_path=target_path,
|
|
288
|
+
defer=defer_to_prod,
|
|
289
|
+
state=manifest_path,
|
|
290
|
+
favor_state=favor_state,
|
|
291
|
+
vars=vars,
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
# Utilities
|
|
295
|
+
self._sql_parser: Optional[SqlBlockParser] = None
|
|
296
|
+
self._macro_parser: Optional[SqlMacroParser] = None
|
|
297
|
+
self._sql_runner: Optional[SqlExecuteRunner] = None
|
|
298
|
+
self._sql_compiler: Optional[SqlCompileRunner] = None
|
|
299
|
+
|
|
300
|
+
# Tracks internal state version
|
|
301
|
+
self._version: int = 1
|
|
302
|
+
self.mutex = threading.Lock()
|
|
303
|
+
self.defer_to_prod = defer_to_prod
|
|
304
|
+
self.defer_to_prod_manifest_path = manifest_path
|
|
305
|
+
self.favor_state = favor_state
|
|
306
|
+
|
|
307
|
+
def init_config(self):
|
|
308
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 8:
|
|
309
|
+
from dbt_common.context import set_invocation_context
|
|
310
|
+
from dbt.flags import get_flags
|
|
311
|
+
set_invocation_context(os.environ)
|
|
312
|
+
set_from_args(self.args, None)
|
|
313
|
+
# Copy over global_flags
|
|
314
|
+
for key, value in get_flags().__dict__.items():
|
|
315
|
+
if key not in self.args.__dict__:
|
|
316
|
+
self.args.__dict__[key] = value
|
|
317
|
+
else:
|
|
318
|
+
set_from_args(self.args, self.args)
|
|
319
|
+
self.config = RuntimeConfig.from_args(self.args)
|
|
320
|
+
if hasattr(self.config, "source_paths"):
|
|
321
|
+
self.config.model_paths = self.config.source_paths
|
|
322
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 8:
|
|
323
|
+
from dbt.mp_context import get_mp_context
|
|
324
|
+
register_adapter(self.config, get_mp_context())
|
|
325
|
+
else:
|
|
326
|
+
register_adapter(self.config)
|
|
327
|
+
|
|
328
|
+
def init_project(self):
|
|
329
|
+
try:
|
|
330
|
+
self.init_config()
|
|
331
|
+
self.adapter = get_adapter(self.config)
|
|
332
|
+
self.adapter.connections.set_connection_name()
|
|
333
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 8:
|
|
334
|
+
from dbt.context.providers import generate_runtime_macro_context
|
|
335
|
+
self.adapter.set_macro_context_generator(generate_runtime_macro_context)
|
|
336
|
+
self.create_parser()
|
|
337
|
+
except Exception as e:
|
|
338
|
+
# reset project
|
|
339
|
+
self.config = None
|
|
340
|
+
self.dbt = None
|
|
341
|
+
raise Exception(str(e))
|
|
342
|
+
|
|
343
|
+
def parse_project(self) -> None:
|
|
344
|
+
try:
|
|
345
|
+
self.create_parser()
|
|
346
|
+
self.dbt.build_flat_graph()
|
|
347
|
+
except Exception as e:
|
|
348
|
+
# reset manifest
|
|
349
|
+
self.dbt = None
|
|
350
|
+
raise Exception(str(e))
|
|
351
|
+
|
|
352
|
+
self._sql_parser = None
|
|
353
|
+
self._macro_parser = None
|
|
354
|
+
self._sql_compiler = None
|
|
355
|
+
self._sql_runner = None
|
|
356
|
+
|
|
357
|
+
def create_parser(self) -> None:
|
|
358
|
+
all_projects = self.config.load_dependencies()
|
|
359
|
+
# filter out project with value LoomRunnableConfig class type as those projects are dependency projects
|
|
360
|
+
# https://github.com/AltimateAI/vscode-dbt-power-user/issues/1224
|
|
361
|
+
all_projects = {k: v for k, v in all_projects.items() if not v.__class__.__name__ == "LoomRunnableConfig"}
|
|
362
|
+
|
|
363
|
+
project_parser = ManifestLoader(
|
|
364
|
+
self.config,
|
|
365
|
+
all_projects,
|
|
366
|
+
self.adapter.connections.set_query_header,
|
|
367
|
+
)
|
|
368
|
+
self.dbt = project_parser.load()
|
|
369
|
+
project_parser.save_macros_to_adapter(self.adapter)
|
|
370
|
+
|
|
371
|
+
def set_defer_config(
|
|
372
|
+
self, defer_to_prod: bool, manifest_path: str, favor_state: bool
|
|
373
|
+
) -> None:
|
|
374
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 8:
|
|
375
|
+
self.args.defer = defer_to_prod
|
|
376
|
+
self.args.state = manifest_path
|
|
377
|
+
self.args.favor_state = favor_state
|
|
378
|
+
self.defer_to_prod = defer_to_prod
|
|
379
|
+
self.defer_to_prod_manifest_path = manifest_path
|
|
380
|
+
self.favor_state = favor_state
|
|
381
|
+
|
|
382
|
+
@classmethod
|
|
383
|
+
def from_args(cls, args: ConfigInterface) -> "DbtProject":
|
|
384
|
+
"""Instatiate the DbtProject directly from a ConfigInterface instance"""
|
|
385
|
+
return cls(
|
|
386
|
+
target=args.target,
|
|
387
|
+
profiles_dir=args.profiles_dir,
|
|
388
|
+
project_dir=args.project_dir,
|
|
389
|
+
threads=args.threads,
|
|
390
|
+
profile=args.profile,
|
|
391
|
+
target_path=args.target_path,
|
|
392
|
+
vars=args.vars,
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
@property
|
|
396
|
+
def sql_parser(self) -> SqlBlockParser:
|
|
397
|
+
"""A dbt-core SQL parser capable of parsing and adding nodes to the manifest via `parse_remote` which will
|
|
398
|
+
also return the added node to the caller. Note that post-parsing this still typically requires calls to
|
|
399
|
+
`_process_nodes_for_ref` and `_process_sources_for_ref` from `dbt.parser.manifest`
|
|
400
|
+
"""
|
|
401
|
+
if self._sql_parser is None:
|
|
402
|
+
self._sql_parser = SqlBlockParser(self.config, self.dbt, self.config)
|
|
403
|
+
return self._sql_parser
|
|
404
|
+
|
|
405
|
+
@property
|
|
406
|
+
def macro_parser(self) -> SqlMacroParser:
|
|
407
|
+
"""A dbt-core macro parser"""
|
|
408
|
+
if self._macro_parser is None:
|
|
409
|
+
self._macro_parser = SqlMacroParser(self.config, self.dbt)
|
|
410
|
+
return self._macro_parser
|
|
411
|
+
|
|
412
|
+
@property
|
|
413
|
+
def sql_runner(self) -> SqlExecuteRunner:
|
|
414
|
+
"""A runner which is used internally by the `execute_sql` function of `dbt.lib`.
|
|
415
|
+
The runners `node` attribute can be updated before calling `compile` or `compile_and_execute`.
|
|
416
|
+
"""
|
|
417
|
+
if self._sql_runner is None:
|
|
418
|
+
self._sql_runner = SqlExecuteRunner(
|
|
419
|
+
self.config, self.adapter, node=None, node_index=1, num_nodes=1
|
|
420
|
+
)
|
|
421
|
+
return self._sql_runner
|
|
422
|
+
|
|
423
|
+
@property
|
|
424
|
+
def sql_compiler(self) -> SqlCompileRunner:
|
|
425
|
+
"""A runner which is used internally by the `compile_sql` function of `dbt.lib`.
|
|
426
|
+
The runners `node` attribute can be updated before calling `compile` or `compile_and_execute`.
|
|
427
|
+
"""
|
|
428
|
+
if self._sql_compiler is None:
|
|
429
|
+
self._sql_compiler = SqlCompileRunner(
|
|
430
|
+
self.config, self.adapter, node=None, node_index=1, num_nodes=1
|
|
431
|
+
)
|
|
432
|
+
return self._sql_compiler
|
|
433
|
+
|
|
434
|
+
@property
|
|
435
|
+
def project_name(self) -> str:
|
|
436
|
+
"""dbt project name"""
|
|
437
|
+
return self.config.project_name
|
|
438
|
+
|
|
439
|
+
@property
|
|
440
|
+
def project_root(self) -> str:
|
|
441
|
+
"""dbt project root"""
|
|
442
|
+
return self.config.project_root
|
|
443
|
+
|
|
444
|
+
@property
|
|
445
|
+
def manifest(self) -> ManifestProxy:
|
|
446
|
+
"""dbt manifest dict"""
|
|
447
|
+
return ManifestProxy(self.dbt.flat_graph)
|
|
448
|
+
|
|
449
|
+
def safe_parse_project(self) -> None:
|
|
450
|
+
self.clear_caches()
|
|
451
|
+
# reinit the project because config may change
|
|
452
|
+
# this operation is cheap anyway
|
|
453
|
+
self.init_project()
|
|
454
|
+
# doing this so that we can allow inits to fail when config is
|
|
455
|
+
# bad and restart after the user sets it up correctly
|
|
456
|
+
if hasattr(self, "config"):
|
|
457
|
+
_config_pointer = copy(self.config)
|
|
458
|
+
else:
|
|
459
|
+
_config_pointer = None
|
|
460
|
+
try:
|
|
461
|
+
self.parse_project()
|
|
462
|
+
self.write_manifest_artifact()
|
|
463
|
+
|
|
464
|
+
if self.defer_to_prod:
|
|
465
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 8:
|
|
466
|
+
writable_manifest = WritableManifest.read_and_check_versions(self.defer_to_prod_manifest_path)
|
|
467
|
+
manifest = Manifest.from_writable_manifest(writable_manifest)
|
|
468
|
+
self.dbt.merge_from_artifact(
|
|
469
|
+
other=manifest,
|
|
470
|
+
)
|
|
471
|
+
else:
|
|
472
|
+
with open(self.defer_to_prod_manifest_path) as f:
|
|
473
|
+
manifest = WritableManifest.from_dict(json.load(f))
|
|
474
|
+
selected = set()
|
|
475
|
+
self.dbt.merge_from_artifact(
|
|
476
|
+
self.adapter,
|
|
477
|
+
other=manifest,
|
|
478
|
+
selected=selected,
|
|
479
|
+
favor_state=self.favor_state,
|
|
480
|
+
)
|
|
481
|
+
except Exception as e:
|
|
482
|
+
self.config = _config_pointer
|
|
483
|
+
raise Exception(str(e))
|
|
484
|
+
|
|
485
|
+
def write_manifest_artifact(self) -> None:
|
|
486
|
+
"""Write a manifest.json to disk"""
|
|
487
|
+
artifact_path = os.path.join(
|
|
488
|
+
self.config.project_root, self.config.target_path, MANIFEST_ARTIFACT
|
|
489
|
+
)
|
|
490
|
+
self.dbt.write(artifact_path)
|
|
491
|
+
|
|
492
|
+
def clear_caches(self) -> None:
|
|
493
|
+
"""Clear least recently used caches and reinstantiable container objects"""
|
|
494
|
+
self.get_ref_node.cache_clear()
|
|
495
|
+
self.get_source_node.cache_clear()
|
|
496
|
+
self.get_macro_function.cache_clear()
|
|
497
|
+
self.get_columns.cache_clear()
|
|
498
|
+
|
|
499
|
+
@lru_cache(maxsize=10)
|
|
500
|
+
def get_ref_node(self, target_model_name: str) -> "ManifestNode":
|
|
501
|
+
"""Get a `"ManifestNode"` from a dbt project model name"""
|
|
502
|
+
try:
|
|
503
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 6:
|
|
504
|
+
return self.dbt.resolve_ref(
|
|
505
|
+
source_node=None,
|
|
506
|
+
target_model_name=target_model_name,
|
|
507
|
+
target_model_version=None,
|
|
508
|
+
target_model_package=None,
|
|
509
|
+
current_project=self.config.project_name,
|
|
510
|
+
node_package=self.config.project_name,
|
|
511
|
+
)
|
|
512
|
+
if DBT_MAJOR_VER == 1 and DBT_MINOR_VER >= 5:
|
|
513
|
+
return self.dbt.resolve_ref(
|
|
514
|
+
target_model_name=target_model_name,
|
|
515
|
+
target_model_version=None,
|
|
516
|
+
target_model_package=None,
|
|
517
|
+
current_project=self.config.project_name,
|
|
518
|
+
node_package=self.config.project_name,
|
|
519
|
+
)
|
|
520
|
+
return self.dbt.resolve_ref(
|
|
521
|
+
target_model_name=target_model_name,
|
|
522
|
+
target_model_package=None,
|
|
523
|
+
current_project=self.config.project_name,
|
|
524
|
+
node_package=self.config.project_name,
|
|
525
|
+
)
|
|
526
|
+
except Exception as e:
|
|
527
|
+
raise Exception(str(e))
|
|
528
|
+
|
|
529
|
+
@lru_cache(maxsize=10)
|
|
530
|
+
def get_source_node(
|
|
531
|
+
self, target_source_name: str, target_table_name: str
|
|
532
|
+
) -> "ManifestNode":
|
|
533
|
+
"""Get a `"ManifestNode"` from a dbt project source name and table name"""
|
|
534
|
+
try:
|
|
535
|
+
return self.dbt.resolve_source(
|
|
536
|
+
target_source_name=target_source_name,
|
|
537
|
+
target_table_name=target_table_name,
|
|
538
|
+
current_project=self.config.project_name,
|
|
539
|
+
node_package=self.config.project_name,
|
|
540
|
+
)
|
|
541
|
+
except Exception as e:
|
|
542
|
+
raise Exception(str(e))
|
|
543
|
+
|
|
544
|
+
def get_server_node(self, sql: str, node_name="name", original_node: Optional[Union["ManifestNode", str]] = None):
|
|
545
|
+
"""Get a node for SQL execution against adapter"""
|
|
546
|
+
self._clear_node(node_name)
|
|
547
|
+
sql_node = self.sql_parser.parse_remote(sql, node_name)
|
|
548
|
+
# Enable copying original node properties
|
|
549
|
+
if original_node is not None:
|
|
550
|
+
if isinstance(original_node, str):
|
|
551
|
+
original_node = self.get_ref_node(original_node)
|
|
552
|
+
if original_node is not None and isinstance(original_node.node_info, dict) and "materialized" in original_node.node_info.keys() and original_node.node_info["materialized"] == "incremental":
|
|
553
|
+
sql_node.schema = original_node.schema
|
|
554
|
+
sql_node.database = original_node.database
|
|
555
|
+
sql_node.alias = original_node.alias
|
|
556
|
+
sql_node.node_info["materialized"] = "incremental"
|
|
557
|
+
sql_node.node_info.update({k: v for k, v in original_node.node_info.items() if k not in sql_node.node_info.keys()})
|
|
558
|
+
process_node(self.config, self.dbt, sql_node)
|
|
559
|
+
return sql_node
|
|
560
|
+
|
|
561
|
+
@lru_cache(maxsize=100)
|
|
562
|
+
def get_macro_function(self, macro_name: str, compiled_code: Optional[str] = None) -> Callable[[Dict[str, Any]], Any]:
|
|
563
|
+
"""Get macro as a function which takes a dict via argument named `kwargs`,
|
|
564
|
+
ie: `kwargs={"relation": ...}`
|
|
565
|
+
|
|
566
|
+
make_schema_fn = get_macro_function('make_schema')\n
|
|
567
|
+
make_schema_fn({'name': '__test_schema_1'})\n
|
|
568
|
+
make_schema_fn({'name': '__test_schema_2'})"""
|
|
569
|
+
if DBT_MAJOR_VER >= 1 and DBT_MINOR_VER >= 8:
|
|
570
|
+
model_context = {}
|
|
571
|
+
if compiled_code is not None:
|
|
572
|
+
model_context["compiled_code"] = compiled_code
|
|
573
|
+
return partial(
|
|
574
|
+
self.adapter.execute_macro, macro_name=macro_name, context_override=model_context,
|
|
575
|
+
)
|
|
576
|
+
else:
|
|
577
|
+
return partial(
|
|
578
|
+
self.adapter.execute_macro, macro_name=macro_name, manifest=self.dbt
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
def adapter_execute(
|
|
582
|
+
self, sql: str, auto_begin: bool = True, fetch: bool = False
|
|
583
|
+
) -> Tuple["AdapterResponse", agate.Table]:
|
|
584
|
+
"""Wraps adapter.execute. Execute SQL against database"""
|
|
585
|
+
return self.adapter.execute(sql, auto_begin, fetch)
|
|
586
|
+
|
|
587
|
+
def execute_macro(
|
|
588
|
+
self,
|
|
589
|
+
macro: str,
|
|
590
|
+
kwargs: Optional[Dict[str, Any]] = None,
|
|
591
|
+
compiled_code: Optional[str] = None
|
|
592
|
+
) -> Any:
|
|
593
|
+
"""Wraps adapter execute_macro. Execute a macro like a function."""
|
|
594
|
+
return self.get_macro_function(macro, compiled_code)(kwargs=kwargs)
|
|
595
|
+
|
|
596
|
+
def execute_sql(self, raw_sql: str, original_node: Optional[Union["ManifestNode", str]] = None) -> DbtAdapterExecutionResult:
|
|
597
|
+
"""Execute dbt SQL statement against database"""
|
|
598
|
+
with self.adapter.connection_named("master"):
|
|
599
|
+
# if no jinja chars then these are synonymous
|
|
600
|
+
compiled_sql = raw_sql
|
|
601
|
+
if has_jinja(raw_sql):
|
|
602
|
+
# jinja found, compile it
|
|
603
|
+
compilation_result = self._compile_sql(raw_sql, original_node)
|
|
604
|
+
compiled_sql = compilation_result.compiled_sql
|
|
605
|
+
|
|
606
|
+
return DbtAdapterExecutionResult(
|
|
607
|
+
*self.adapter_execute(compiled_sql, fetch=True),
|
|
608
|
+
raw_sql,
|
|
609
|
+
compiled_sql,
|
|
610
|
+
)
|
|
611
|
+
|
|
612
|
+
def execute_node(self, node: "ManifestNode") -> DbtAdapterExecutionResult:
|
|
613
|
+
"""Execute dbt SQL statement against database from a"ManifestNode"""
|
|
614
|
+
try:
|
|
615
|
+
if node is None:
|
|
616
|
+
raise ValueError("This model doesn't exist within this dbt project")
|
|
617
|
+
raw_sql: str = getattr(node, RAW_CODE)
|
|
618
|
+
compiled_sql: Optional[str] = getattr(node, COMPILED_CODE, None)
|
|
619
|
+
if compiled_sql:
|
|
620
|
+
# node is compiled, execute the SQL
|
|
621
|
+
return self.execute_sql(compiled_sql)
|
|
622
|
+
# node not compiled
|
|
623
|
+
if has_jinja(raw_sql):
|
|
624
|
+
# node has jinja in its SQL, compile it
|
|
625
|
+
compiled_sql = self._compile_node(node).compiled_sql
|
|
626
|
+
# execute the SQL
|
|
627
|
+
return self.execute_sql(compiled_sql or raw_sql)
|
|
628
|
+
except Exception as e:
|
|
629
|
+
raise Exception(str(e))
|
|
630
|
+
|
|
631
|
+
def compile_sql(self, raw_sql: str, original_node: Optional["ManifestNode"] = None) -> DbtAdapterCompilationResult:
|
|
632
|
+
try:
|
|
633
|
+
with self.adapter.connection_named("master"):
|
|
634
|
+
return self._compile_sql(raw_sql, original_node)
|
|
635
|
+
except Exception as e:
|
|
636
|
+
raise Exception(str(e))
|
|
637
|
+
|
|
638
|
+
def compile_node(
|
|
639
|
+
self, node: "ManifestNode"
|
|
640
|
+
) -> Optional[DbtAdapterCompilationResult]:
|
|
641
|
+
try:
|
|
642
|
+
if node is None:
|
|
643
|
+
raise ValueError("This model doesn't exist within this dbt project")
|
|
644
|
+
with self.adapter.connection_named("master"):
|
|
645
|
+
return self._compile_node(node)
|
|
646
|
+
except Exception as e:
|
|
647
|
+
raise Exception(str(e))
|
|
648
|
+
|
|
649
|
+
def _compile_sql(self, raw_sql: str, original_node: Optional[Union["ManifestNode", str]] = None) -> DbtAdapterCompilationResult:
|
|
650
|
+
"""Creates a node with a `dbt.parser.sql` class. Compile generated node."""
|
|
651
|
+
try:
|
|
652
|
+
temp_node_id = str("t_" + uuid.uuid4().hex)
|
|
653
|
+
server_node = self.get_server_node(raw_sql, temp_node_id, original_node)
|
|
654
|
+
node = self._compile_node(server_node)
|
|
655
|
+
self._clear_node(temp_node_id)
|
|
656
|
+
return node
|
|
657
|
+
except Exception as e:
|
|
658
|
+
raise Exception(str(e))
|
|
659
|
+
|
|
660
|
+
def _compile_node(
|
|
661
|
+
self, node: Union["ManifestNode", "CompiledNode"]
|
|
662
|
+
) -> Optional[DbtAdapterCompilationResult]:
|
|
663
|
+
"""Compiles existing node."""
|
|
664
|
+
try:
|
|
665
|
+
self.sql_compiler.node = copy(node)
|
|
666
|
+
if DBT_MAJOR_VER == 1 and DBT_MINOR_VER <= 3:
|
|
667
|
+
compiled_node = (
|
|
668
|
+
node
|
|
669
|
+
if isinstance(node, CompiledNode)
|
|
670
|
+
else self.sql_compiler.compile(self.dbt)
|
|
671
|
+
)
|
|
672
|
+
else:
|
|
673
|
+
# this is essentially a convenient wrapper to adapter.get_compiler
|
|
674
|
+
compiled_node = self.sql_compiler.compile(self.dbt)
|
|
675
|
+
return DbtAdapterCompilationResult(
|
|
676
|
+
getattr(compiled_node, RAW_CODE),
|
|
677
|
+
getattr(compiled_node, COMPILED_CODE),
|
|
678
|
+
compiled_node,
|
|
679
|
+
)
|
|
680
|
+
except Exception as e:
|
|
681
|
+
raise Exception(str(e))
|
|
682
|
+
|
|
683
|
+
def _clear_node(self, name="name"):
|
|
684
|
+
"""Removes the statically named node created by `execute_sql` and `compile_sql` in `dbt.lib`"""
|
|
685
|
+
if self.dbt is not None:
|
|
686
|
+
self.dbt.nodes.pop(
|
|
687
|
+
f"{NodeType.SqlOperation}.{self.project_name}.{name}", None
|
|
688
|
+
)
|
|
689
|
+
|
|
690
|
+
def get_relation(
|
|
691
|
+
self, database: Optional[str], schema: Optional[str], name: Optional[str]
|
|
692
|
+
) -> Optional["BaseRelation"]:
|
|
693
|
+
"""Wrapper for `adapter.get_relation`"""
|
|
694
|
+
return self.adapter.get_relation(database, schema, name)
|
|
695
|
+
|
|
696
|
+
def create_relation(
|
|
697
|
+
self, database: Optional[str], schema: Optional[str], name: Optional[str]
|
|
698
|
+
) -> "BaseRelation":
|
|
699
|
+
"""Wrapper for `adapter.Relation.create`"""
|
|
700
|
+
return self.adapter.Relation.create(database, schema, name)
|
|
701
|
+
|
|
702
|
+
def create_relation_from_node(self, node: "ManifestNode") -> "BaseRelation":
|
|
703
|
+
"""Wrapper for `adapter.Relation.create_from`"""
|
|
704
|
+
return self.adapter.Relation.create_from(self.config, node)
|
|
705
|
+
|
|
706
|
+
def get_columns_in_relation(self, relation: "BaseRelation") -> List[str]:
|
|
707
|
+
"""Wrapper for `adapter.get_columns_in_relation`"""
|
|
708
|
+
try:
|
|
709
|
+
with self.adapter.connection_named("master"):
|
|
710
|
+
return self.adapter.get_columns_in_relation(relation)
|
|
711
|
+
except Exception as e:
|
|
712
|
+
raise Exception(str(e))
|
|
713
|
+
|
|
714
|
+
@lru_cache(maxsize=5)
|
|
715
|
+
def get_columns(self, node: "ManifestNode") -> List["ColumnInfo"]:
|
|
716
|
+
"""Get a list of columns from a compiled node"""
|
|
717
|
+
columns = []
|
|
718
|
+
try:
|
|
719
|
+
columns.extend(
|
|
720
|
+
[
|
|
721
|
+
c.name
|
|
722
|
+
for c in self.get_columns_in_relation(
|
|
723
|
+
self.create_relation_from_node(node)
|
|
724
|
+
)
|
|
725
|
+
]
|
|
726
|
+
)
|
|
727
|
+
except Exception:
|
|
728
|
+
original_sql = str(getattr(node, RAW_CODE))
|
|
729
|
+
# TODO: account for `TOP` syntax
|
|
730
|
+
setattr(node, RAW_CODE, f"select * from ({original_sql}) limit 0")
|
|
731
|
+
result = self.execute_node(node)
|
|
732
|
+
setattr(node, RAW_CODE, original_sql)
|
|
733
|
+
delattr(node, COMPILED_CODE)
|
|
734
|
+
columns.extend(result.table.column_names)
|
|
735
|
+
return columns
|
|
736
|
+
|
|
737
|
+
def get_catalog(self) -> Dict[str, Any]:
|
|
738
|
+
"""Get catalog from adapter"""
|
|
739
|
+
catalog_table: agate.Table = agate.Table([])
|
|
740
|
+
catalog_data: List[PrimitiveDict] = []
|
|
741
|
+
exceptions: List[Exception] = []
|
|
742
|
+
try:
|
|
743
|
+
with self.adapter.connection_named("generate_catalog"):
|
|
744
|
+
catalog_table, exceptions = self.adapter.get_catalog(self.dbt)
|
|
745
|
+
|
|
746
|
+
if exceptions:
|
|
747
|
+
raise Exception(str(exceptions))
|
|
748
|
+
|
|
749
|
+
catalog_data = [
|
|
750
|
+
dict(
|
|
751
|
+
zip(catalog_table.column_names, map(dbt.utils._coerce_decimal, row))
|
|
752
|
+
)
|
|
753
|
+
for row in catalog_table
|
|
754
|
+
]
|
|
755
|
+
|
|
756
|
+
except Exception as e:
|
|
757
|
+
raise Exception(str(e))
|
|
758
|
+
return catalog_data
|
|
759
|
+
|
|
760
|
+
def get_or_create_relation(
|
|
761
|
+
self, database: str, schema: str, name: str
|
|
762
|
+
) -> Tuple["BaseRelation", bool]:
|
|
763
|
+
"""Get relation or create if not exists. Returns tuple of relation and
|
|
764
|
+
boolean result of whether it existed ie: (relation, did_exist)"""
|
|
765
|
+
ref = self.get_relation(database, schema, name)
|
|
766
|
+
return (
|
|
767
|
+
(ref, True)
|
|
768
|
+
if ref
|
|
769
|
+
else (self.create_relation(database, schema, name), False)
|
|
770
|
+
)
|
|
771
|
+
|
|
772
|
+
def create_schema(self, node: "ManifestNode"):
|
|
773
|
+
"""Create a schema in the database"""
|
|
774
|
+
return self.execute_macro(
|
|
775
|
+
"create_schema",
|
|
776
|
+
kwargs={"relation": self.create_relation_from_node(node)},
|
|
777
|
+
)
|
|
778
|
+
|
|
779
|
+
def materialize(
|
|
780
|
+
self, node: "ManifestNode", temporary: bool = True
|
|
781
|
+
) -> Tuple["AdapterResponse", None]:
|
|
782
|
+
"""Materialize a table in the database"""
|
|
783
|
+
return self.adapter_execute(
|
|
784
|
+
# Returns CTAS string so send to adapter.execute
|
|
785
|
+
self.execute_macro(
|
|
786
|
+
"create_table_as",
|
|
787
|
+
kwargs={
|
|
788
|
+
"sql": getattr(node, COMPILED_CODE),
|
|
789
|
+
"relation": self.create_relation_from_node(node),
|
|
790
|
+
"temporary": temporary,
|
|
791
|
+
},
|
|
792
|
+
),
|
|
793
|
+
auto_begin=True,
|
|
794
|
+
)
|
|
795
|
+
|
|
796
|
+
def get_dbt_version(self):
|
|
797
|
+
return [DBT_MAJOR_VER, DBT_MINOR_VER, DBT_PATCH_VER]
|
|
798
|
+
|
|
799
|
+
def validate_sql_dry_run(self, compiled_sql: str):
|
|
800
|
+
if DBT_MAJOR_VER < 1:
|
|
801
|
+
return None
|
|
802
|
+
if DBT_MINOR_VER < 6:
|
|
803
|
+
return None
|
|
804
|
+
try:
|
|
805
|
+
return self.adapter.validate_sql(compiled_sql)
|
|
806
|
+
except Exception as e:
|
|
807
|
+
raise Exception(str(e))
|
|
808
|
+
|
|
809
|
+
def get_target_names(self):
|
|
810
|
+
from dbt.config.profile import read_profile
|
|
811
|
+
profile = read_profile(self.args.profiles_dir)
|
|
812
|
+
profile = profile[self.config.profile_name]
|
|
813
|
+
if "outputs" in profile:
|
|
814
|
+
outputs = profile["outputs"]
|
|
815
|
+
return outputs.keys()
|
|
816
|
+
return []
|
|
817
|
+
|
|
818
|
+
def set_selected_target(self, target: str):
|
|
819
|
+
self.args.target = target
|
|
820
|
+
|
|
821
|
+
def cleanup_connections(self):
|
|
822
|
+
try:
|
|
823
|
+
self.adapter.cleanup_connections()
|
|
824
|
+
except Exception as e:
|
|
825
|
+
raise Exception(str(e))
|