dbt-adapters 1.22.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbt/adapters/__about__.py +1 -0
- dbt/adapters/__init__.py +8 -0
- dbt/adapters/base/README.md +13 -0
- dbt/adapters/base/__init__.py +16 -0
- dbt/adapters/base/column.py +173 -0
- dbt/adapters/base/connections.py +429 -0
- dbt/adapters/base/impl.py +2036 -0
- dbt/adapters/base/meta.py +150 -0
- dbt/adapters/base/plugin.py +32 -0
- dbt/adapters/base/query_headers.py +106 -0
- dbt/adapters/base/relation.py +648 -0
- dbt/adapters/cache.py +521 -0
- dbt/adapters/capability.py +63 -0
- dbt/adapters/catalogs/__init__.py +14 -0
- dbt/adapters/catalogs/_client.py +54 -0
- dbt/adapters/catalogs/_constants.py +1 -0
- dbt/adapters/catalogs/_exceptions.py +39 -0
- dbt/adapters/catalogs/_integration.py +113 -0
- dbt/adapters/clients/__init__.py +0 -0
- dbt/adapters/clients/jinja.py +24 -0
- dbt/adapters/contracts/__init__.py +0 -0
- dbt/adapters/contracts/connection.py +229 -0
- dbt/adapters/contracts/macros.py +11 -0
- dbt/adapters/contracts/relation.py +160 -0
- dbt/adapters/events/README.md +51 -0
- dbt/adapters/events/__init__.py +0 -0
- dbt/adapters/events/adapter_types_pb2.py +2 -0
- dbt/adapters/events/base_types.py +36 -0
- dbt/adapters/events/logging.py +83 -0
- dbt/adapters/events/types.py +436 -0
- dbt/adapters/exceptions/__init__.py +40 -0
- dbt/adapters/exceptions/alias.py +24 -0
- dbt/adapters/exceptions/cache.py +68 -0
- dbt/adapters/exceptions/compilation.py +269 -0
- dbt/adapters/exceptions/connection.py +16 -0
- dbt/adapters/exceptions/database.py +51 -0
- dbt/adapters/factory.py +264 -0
- dbt/adapters/protocol.py +150 -0
- dbt/adapters/py.typed +0 -0
- dbt/adapters/record/__init__.py +2 -0
- dbt/adapters/record/base.py +291 -0
- dbt/adapters/record/cursor/cursor.py +69 -0
- dbt/adapters/record/cursor/description.py +37 -0
- dbt/adapters/record/cursor/execute.py +39 -0
- dbt/adapters/record/cursor/fetchall.py +69 -0
- dbt/adapters/record/cursor/fetchmany.py +23 -0
- dbt/adapters/record/cursor/fetchone.py +23 -0
- dbt/adapters/record/cursor/rowcount.py +23 -0
- dbt/adapters/record/handle.py +55 -0
- dbt/adapters/record/serialization.py +115 -0
- dbt/adapters/reference_keys.py +39 -0
- dbt/adapters/relation_configs/README.md +25 -0
- dbt/adapters/relation_configs/__init__.py +12 -0
- dbt/adapters/relation_configs/config_base.py +46 -0
- dbt/adapters/relation_configs/config_change.py +26 -0
- dbt/adapters/relation_configs/config_validation.py +57 -0
- dbt/adapters/sql/__init__.py +2 -0
- dbt/adapters/sql/connections.py +263 -0
- dbt/adapters/sql/impl.py +286 -0
- dbt/adapters/utils.py +69 -0
- dbt/include/__init__.py +3 -0
- dbt/include/global_project/__init__.py +4 -0
- dbt/include/global_project/dbt_project.yml +7 -0
- dbt/include/global_project/docs/overview.md +43 -0
- dbt/include/global_project/macros/adapters/apply_grants.sql +167 -0
- dbt/include/global_project/macros/adapters/columns.sql +144 -0
- dbt/include/global_project/macros/adapters/freshness.sql +32 -0
- dbt/include/global_project/macros/adapters/indexes.sql +41 -0
- dbt/include/global_project/macros/adapters/metadata.sql +105 -0
- dbt/include/global_project/macros/adapters/persist_docs.sql +33 -0
- dbt/include/global_project/macros/adapters/relation.sql +84 -0
- dbt/include/global_project/macros/adapters/schema.sql +20 -0
- dbt/include/global_project/macros/adapters/show.sql +26 -0
- dbt/include/global_project/macros/adapters/timestamps.sql +52 -0
- dbt/include/global_project/macros/adapters/validate_sql.sql +10 -0
- dbt/include/global_project/macros/etc/datetime.sql +62 -0
- dbt/include/global_project/macros/etc/statement.sql +52 -0
- dbt/include/global_project/macros/generic_test_sql/accepted_values.sql +27 -0
- dbt/include/global_project/macros/generic_test_sql/not_null.sql +9 -0
- dbt/include/global_project/macros/generic_test_sql/relationships.sql +23 -0
- dbt/include/global_project/macros/generic_test_sql/unique.sql +12 -0
- dbt/include/global_project/macros/get_custom_name/get_custom_alias.sql +36 -0
- dbt/include/global_project/macros/get_custom_name/get_custom_database.sql +32 -0
- dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql +60 -0
- dbt/include/global_project/macros/materializations/configs.sql +21 -0
- dbt/include/global_project/macros/materializations/functions/aggregate.sql +65 -0
- dbt/include/global_project/macros/materializations/functions/function.sql +20 -0
- dbt/include/global_project/macros/materializations/functions/helpers.sql +20 -0
- dbt/include/global_project/macros/materializations/functions/scalar.sql +69 -0
- dbt/include/global_project/macros/materializations/hooks.sql +35 -0
- dbt/include/global_project/macros/materializations/models/clone/can_clone_table.sql +7 -0
- dbt/include/global_project/macros/materializations/models/clone/clone.sql +67 -0
- dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql +7 -0
- dbt/include/global_project/macros/materializations/models/incremental/column_helpers.sql +80 -0
- dbt/include/global_project/macros/materializations/models/incremental/incremental.sql +99 -0
- dbt/include/global_project/macros/materializations/models/incremental/is_incremental.sql +13 -0
- dbt/include/global_project/macros/materializations/models/incremental/merge.sql +120 -0
- dbt/include/global_project/macros/materializations/models/incremental/on_schema_change.sql +159 -0
- dbt/include/global_project/macros/materializations/models/incremental/strategies.sql +92 -0
- dbt/include/global_project/macros/materializations/models/materialized_view.sql +121 -0
- dbt/include/global_project/macros/materializations/models/table.sql +64 -0
- dbt/include/global_project/macros/materializations/models/view.sql +72 -0
- dbt/include/global_project/macros/materializations/seeds/helpers.sql +128 -0
- dbt/include/global_project/macros/materializations/seeds/seed.sql +60 -0
- dbt/include/global_project/macros/materializations/snapshots/helpers.sql +345 -0
- dbt/include/global_project/macros/materializations/snapshots/snapshot.sql +109 -0
- dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql +34 -0
- dbt/include/global_project/macros/materializations/snapshots/strategies.sql +184 -0
- dbt/include/global_project/macros/materializations/tests/helpers.sql +44 -0
- dbt/include/global_project/macros/materializations/tests/test.sql +66 -0
- dbt/include/global_project/macros/materializations/tests/unit.sql +40 -0
- dbt/include/global_project/macros/materializations/tests/where_subquery.sql +15 -0
- dbt/include/global_project/macros/python_model/python.sql +114 -0
- dbt/include/global_project/macros/relations/column/columns_spec_ddl.sql +89 -0
- dbt/include/global_project/macros/relations/create.sql +23 -0
- dbt/include/global_project/macros/relations/create_backup.sql +17 -0
- dbt/include/global_project/macros/relations/create_intermediate.sql +17 -0
- dbt/include/global_project/macros/relations/drop.sql +41 -0
- dbt/include/global_project/macros/relations/drop_backup.sql +14 -0
- dbt/include/global_project/macros/relations/materialized_view/alter.sql +55 -0
- dbt/include/global_project/macros/relations/materialized_view/create.sql +10 -0
- dbt/include/global_project/macros/relations/materialized_view/drop.sql +14 -0
- dbt/include/global_project/macros/relations/materialized_view/refresh.sql +9 -0
- dbt/include/global_project/macros/relations/materialized_view/rename.sql +10 -0
- dbt/include/global_project/macros/relations/materialized_view/replace.sql +10 -0
- dbt/include/global_project/macros/relations/rename.sql +35 -0
- dbt/include/global_project/macros/relations/rename_intermediate.sql +14 -0
- dbt/include/global_project/macros/relations/replace.sql +50 -0
- dbt/include/global_project/macros/relations/schema.sql +8 -0
- dbt/include/global_project/macros/relations/table/create.sql +60 -0
- dbt/include/global_project/macros/relations/table/drop.sql +14 -0
- dbt/include/global_project/macros/relations/table/rename.sql +10 -0
- dbt/include/global_project/macros/relations/table/replace.sql +10 -0
- dbt/include/global_project/macros/relations/view/create.sql +27 -0
- dbt/include/global_project/macros/relations/view/drop.sql +14 -0
- dbt/include/global_project/macros/relations/view/rename.sql +10 -0
- dbt/include/global_project/macros/relations/view/replace.sql +66 -0
- dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql +107 -0
- dbt/include/global_project/macros/utils/any_value.sql +9 -0
- dbt/include/global_project/macros/utils/array_append.sql +8 -0
- dbt/include/global_project/macros/utils/array_concat.sql +7 -0
- dbt/include/global_project/macros/utils/array_construct.sql +12 -0
- dbt/include/global_project/macros/utils/bool_or.sql +9 -0
- dbt/include/global_project/macros/utils/cast.sql +7 -0
- dbt/include/global_project/macros/utils/cast_bool_to_text.sql +7 -0
- dbt/include/global_project/macros/utils/concat.sql +7 -0
- dbt/include/global_project/macros/utils/data_types.sql +129 -0
- dbt/include/global_project/macros/utils/date.sql +10 -0
- dbt/include/global_project/macros/utils/date_spine.sql +75 -0
- dbt/include/global_project/macros/utils/date_trunc.sql +7 -0
- dbt/include/global_project/macros/utils/dateadd.sql +14 -0
- dbt/include/global_project/macros/utils/datediff.sql +14 -0
- dbt/include/global_project/macros/utils/equals.sql +14 -0
- dbt/include/global_project/macros/utils/escape_single_quotes.sql +8 -0
- dbt/include/global_project/macros/utils/except.sql +9 -0
- dbt/include/global_project/macros/utils/generate_series.sql +53 -0
- dbt/include/global_project/macros/utils/hash.sql +7 -0
- dbt/include/global_project/macros/utils/intersect.sql +9 -0
- dbt/include/global_project/macros/utils/last_day.sql +15 -0
- dbt/include/global_project/macros/utils/length.sql +11 -0
- dbt/include/global_project/macros/utils/listagg.sql +30 -0
- dbt/include/global_project/macros/utils/literal.sql +7 -0
- dbt/include/global_project/macros/utils/position.sql +11 -0
- dbt/include/global_project/macros/utils/replace.sql +14 -0
- dbt/include/global_project/macros/utils/right.sql +12 -0
- dbt/include/global_project/macros/utils/safe_cast.sql +9 -0
- dbt/include/global_project/macros/utils/split_part.sql +26 -0
- dbt/include/global_project/tests/generic/builtin.sql +30 -0
- dbt/include/py.typed +0 -0
- dbt_adapters-1.22.2.dist-info/METADATA +124 -0
- dbt_adapters-1.22.2.dist-info/RECORD +173 -0
- dbt_adapters-1.22.2.dist-info/WHEEL +4 -0
- dbt_adapters-1.22.2.dist-info/licenses/LICENSE +201 -0
dbt/adapters/cache.py
ADDED
|
@@ -0,0 +1,521 @@
|
|
|
1
|
+
from copy import deepcopy
|
|
2
|
+
import threading
|
|
3
|
+
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple
|
|
4
|
+
|
|
5
|
+
from dbt_common.events.functions import fire_event, fire_event_if
|
|
6
|
+
from dbt_common.utils.formatting import lowercase
|
|
7
|
+
|
|
8
|
+
from dbt.adapters.events.types import CacheAction, CacheDumpGraph
|
|
9
|
+
from dbt.adapters.exceptions.cache import (
|
|
10
|
+
DependentLinkNotCachedError,
|
|
11
|
+
NewNameAlreadyInCacheError,
|
|
12
|
+
NoneRelationFoundError,
|
|
13
|
+
ReferencedLinkNotCachedError,
|
|
14
|
+
TruncatedModelNameCausedCollisionError,
|
|
15
|
+
)
|
|
16
|
+
from dbt.adapters.reference_keys import (
|
|
17
|
+
_ReferenceKey,
|
|
18
|
+
_make_ref_key,
|
|
19
|
+
_make_ref_key_dict,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def dot_separated(key: _ReferenceKey) -> str:
|
|
24
|
+
"""Return the key in dot-separated string form.
|
|
25
|
+
|
|
26
|
+
:param _ReferenceKey key: The key to stringify.
|
|
27
|
+
"""
|
|
28
|
+
return ".".join(map(str, key))
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class _CachedRelation:
|
|
32
|
+
"""Nothing about _CachedRelation is guaranteed to be thread-safe!
|
|
33
|
+
|
|
34
|
+
:attr str schema: The schema of this relation.
|
|
35
|
+
:attr str identifier: The identifier of this relation.
|
|
36
|
+
:attr Dict[_ReferenceKey, _CachedRelation] referenced_by: The relations
|
|
37
|
+
that refer to this relation.
|
|
38
|
+
:attr BaseRelation inner: The underlying dbt relation.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
def __init__(self, inner) -> None:
|
|
42
|
+
self.referenced_by: Dict[_ReferenceKey, _CachedRelation] = {}
|
|
43
|
+
self.inner = inner
|
|
44
|
+
|
|
45
|
+
def __str__(self) -> str:
|
|
46
|
+
return ("_CachedRelation(database={}, schema={}, identifier={}, inner={})").format(
|
|
47
|
+
self.database, self.schema, self.identifier, self.inner
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def database(self) -> Optional[str]:
|
|
52
|
+
return lowercase(self.inner.database)
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def schema(self) -> Optional[str]:
|
|
56
|
+
return lowercase(self.inner.schema)
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def identifier(self) -> Optional[str]:
|
|
60
|
+
return lowercase(self.inner.identifier)
|
|
61
|
+
|
|
62
|
+
def __copy__(self):
|
|
63
|
+
new = self.__class__(self.inner)
|
|
64
|
+
new.__dict__.update(self.__dict__)
|
|
65
|
+
return new
|
|
66
|
+
|
|
67
|
+
def __deepcopy__(self, memo):
|
|
68
|
+
new = self.__class__(self.inner.incorporate())
|
|
69
|
+
new.__dict__.update(self.__dict__)
|
|
70
|
+
new.referenced_by = deepcopy(self.referenced_by, memo)
|
|
71
|
+
|
|
72
|
+
def is_referenced_by(self, key):
|
|
73
|
+
return key in self.referenced_by
|
|
74
|
+
|
|
75
|
+
def key(self):
|
|
76
|
+
"""Get the _ReferenceKey that represents this relation
|
|
77
|
+
|
|
78
|
+
:return _ReferenceKey: A key for this relation.
|
|
79
|
+
"""
|
|
80
|
+
return _make_ref_key(self)
|
|
81
|
+
|
|
82
|
+
def add_reference(self, referrer: "_CachedRelation"):
|
|
83
|
+
"""Add a reference from referrer to self, indicating that if this node
|
|
84
|
+
were drop...cascaded, the referrer would be dropped as well.
|
|
85
|
+
|
|
86
|
+
:param _CachedRelation referrer: The node that refers to this node.
|
|
87
|
+
"""
|
|
88
|
+
self.referenced_by[referrer.key()] = referrer
|
|
89
|
+
|
|
90
|
+
def collect_consequences(self):
|
|
91
|
+
"""Recursively collect a set of _ReferenceKeys that would
|
|
92
|
+
consequentially get dropped if this were dropped via
|
|
93
|
+
"drop ... cascade".
|
|
94
|
+
|
|
95
|
+
:return Set[_ReferenceKey]: All the relations that would be dropped
|
|
96
|
+
"""
|
|
97
|
+
consequences = {self.key()}
|
|
98
|
+
for relation in self.referenced_by.values():
|
|
99
|
+
consequences.update(relation.collect_consequences())
|
|
100
|
+
return consequences
|
|
101
|
+
|
|
102
|
+
def release_references(self, keys):
|
|
103
|
+
"""Non-recursively indicate that an iterable of _ReferenceKey no longer
|
|
104
|
+
exist. Unknown keys are ignored.
|
|
105
|
+
|
|
106
|
+
:param Iterable[_ReferenceKey] keys: The keys to drop.
|
|
107
|
+
"""
|
|
108
|
+
keys = set(self.referenced_by) & set(keys)
|
|
109
|
+
for key in keys:
|
|
110
|
+
self.referenced_by.pop(key)
|
|
111
|
+
|
|
112
|
+
def rename(self, new_relation):
|
|
113
|
+
"""Rename this cached relation to new_relation.
|
|
114
|
+
Note that this will change the output of key(), all refs must be
|
|
115
|
+
updated!
|
|
116
|
+
|
|
117
|
+
:param _CachedRelation new_relation: The new name to apply to the
|
|
118
|
+
relation
|
|
119
|
+
"""
|
|
120
|
+
# Relations store this stuff inside their `path` dict. But they
|
|
121
|
+
# also store a table_name, and usually use it in their .render(),
|
|
122
|
+
# so we need to update that as well. It doesn't appear that
|
|
123
|
+
# table_name is ever anything but the identifier (via .create())
|
|
124
|
+
self.inner = self.inner.incorporate(
|
|
125
|
+
path={
|
|
126
|
+
"database": new_relation.inner.database,
|
|
127
|
+
"schema": new_relation.inner.schema,
|
|
128
|
+
"identifier": new_relation.inner.identifier,
|
|
129
|
+
},
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
def rename_key(self, old_key, new_key):
|
|
133
|
+
"""Rename a reference that may or may not exist. Only handles the
|
|
134
|
+
reference itself, so this is the other half of what `rename` does.
|
|
135
|
+
|
|
136
|
+
If old_key is not in referenced_by, this is a no-op.
|
|
137
|
+
|
|
138
|
+
:param _ReferenceKey old_key: The old key to be renamed.
|
|
139
|
+
:param _ReferenceKey new_key: The new key to rename to.
|
|
140
|
+
:raises InternalError: If the new key already exists.
|
|
141
|
+
"""
|
|
142
|
+
if new_key in self.referenced_by:
|
|
143
|
+
raise NewNameAlreadyInCacheError(old_key, new_key)
|
|
144
|
+
|
|
145
|
+
if old_key not in self.referenced_by:
|
|
146
|
+
return
|
|
147
|
+
value = self.referenced_by.pop(old_key)
|
|
148
|
+
self.referenced_by[new_key] = value
|
|
149
|
+
|
|
150
|
+
def dump_graph_entry(self):
|
|
151
|
+
"""Return a key/value pair representing this key and its referents.
|
|
152
|
+
|
|
153
|
+
return List[str]: The dot-separated form of all referent keys.
|
|
154
|
+
"""
|
|
155
|
+
return [dot_separated(r) for r in self.referenced_by]
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
class RelationsCache:
|
|
159
|
+
"""A cache of the relations known to dbt. Keeps track of relationships
|
|
160
|
+
declared between tables and handles renames/drops as a real database would.
|
|
161
|
+
|
|
162
|
+
:attr Dict[_ReferenceKey, _CachedRelation] relations: The known relations.
|
|
163
|
+
:attr threading.RLock lock: The lock around relations, held during updates.
|
|
164
|
+
The adapters also hold this lock while filling the cache.
|
|
165
|
+
:attr Set[str] schemas: The set of known/cached schemas, all lowercased.
|
|
166
|
+
"""
|
|
167
|
+
|
|
168
|
+
def __init__(self, log_cache_events: bool = False) -> None:
|
|
169
|
+
self.relations: Dict[_ReferenceKey, _CachedRelation] = {}
|
|
170
|
+
self.lock = threading.RLock()
|
|
171
|
+
self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set()
|
|
172
|
+
self.log_cache_events = log_cache_events
|
|
173
|
+
|
|
174
|
+
def add_schema(
|
|
175
|
+
self,
|
|
176
|
+
database: Optional[str],
|
|
177
|
+
schema: Optional[str],
|
|
178
|
+
) -> None:
|
|
179
|
+
"""Add a schema to the set of known schemas (case-insensitive)
|
|
180
|
+
|
|
181
|
+
:param database: The database name to add.
|
|
182
|
+
:param schema: The schema name to add.
|
|
183
|
+
"""
|
|
184
|
+
self.schemas.add((lowercase(database), lowercase(schema)))
|
|
185
|
+
|
|
186
|
+
def drop_schema(
|
|
187
|
+
self,
|
|
188
|
+
database: Optional[str],
|
|
189
|
+
schema: Optional[str],
|
|
190
|
+
) -> None:
|
|
191
|
+
"""Drop the given schema and remove it from the set of known schemas.
|
|
192
|
+
|
|
193
|
+
Then remove all its contents (and their dependents, etc) as well.
|
|
194
|
+
"""
|
|
195
|
+
key = (lowercase(database), lowercase(schema))
|
|
196
|
+
if key not in self.schemas:
|
|
197
|
+
return
|
|
198
|
+
|
|
199
|
+
# avoid iterating over self.relations while removing things by
|
|
200
|
+
# collecting the list first.
|
|
201
|
+
|
|
202
|
+
with self.lock:
|
|
203
|
+
to_remove = self._list_relations_in_schema(database, schema)
|
|
204
|
+
self._remove_all(to_remove)
|
|
205
|
+
# handle a drop_schema race by using discard() over remove()
|
|
206
|
+
self.schemas.discard(key)
|
|
207
|
+
|
|
208
|
+
def update_schemas(self, schemas: Iterable[Tuple[Optional[str], str]]):
|
|
209
|
+
"""Add multiple schemas to the set of known schemas (case-insensitive)
|
|
210
|
+
|
|
211
|
+
:param schemas: An iterable of the schema names to add.
|
|
212
|
+
"""
|
|
213
|
+
self.schemas.update((lowercase(d), s.lower()) for (d, s) in schemas)
|
|
214
|
+
|
|
215
|
+
def __contains__(self, schema_id: Tuple[Optional[str], str]):
|
|
216
|
+
"""A schema is 'in' the relations cache if it is in the set of cached
|
|
217
|
+
schemas.
|
|
218
|
+
|
|
219
|
+
:param schema_id: The db name and schema name to look up.
|
|
220
|
+
"""
|
|
221
|
+
db, schema = schema_id
|
|
222
|
+
return (lowercase(db), schema.lower()) in self.schemas
|
|
223
|
+
|
|
224
|
+
def dump_graph(self):
|
|
225
|
+
"""Dump a key-only representation of the schema to a dictionary. Every
|
|
226
|
+
known relation is a key with a value of a list of keys it is referenced
|
|
227
|
+
by.
|
|
228
|
+
"""
|
|
229
|
+
# we have to hold the lock for the entire dump, if other threads modify
|
|
230
|
+
# self.relations or any cache entry's referenced_by during iteration
|
|
231
|
+
# it's a runtime error!
|
|
232
|
+
with self.lock:
|
|
233
|
+
return {dot_separated(k): str(v.dump_graph_entry()) for k, v in self.relations.items()}
|
|
234
|
+
|
|
235
|
+
def _setdefault(self, relation: _CachedRelation):
|
|
236
|
+
"""Add a relation to the cache, or return it if it already exists.
|
|
237
|
+
|
|
238
|
+
:param _CachedRelation relation: The relation to set or get.
|
|
239
|
+
:return _CachedRelation: The relation stored under the given relation's
|
|
240
|
+
key
|
|
241
|
+
"""
|
|
242
|
+
self.add_schema(relation.database, relation.schema)
|
|
243
|
+
key = relation.key()
|
|
244
|
+
return self.relations.setdefault(key, relation)
|
|
245
|
+
|
|
246
|
+
def _add_link(self, referenced_key, dependent_key):
|
|
247
|
+
"""Add a link between two relations to the database. Both the old and
|
|
248
|
+
new entries must alraedy exist in the database.
|
|
249
|
+
|
|
250
|
+
:param _ReferenceKey referenced_key: The key identifying the referenced
|
|
251
|
+
model (the one that if dropped will drop the dependent model).
|
|
252
|
+
:param _ReferenceKey dependent_key: The key identifying the dependent
|
|
253
|
+
model.
|
|
254
|
+
:raises InternalError: If either entry does not exist.
|
|
255
|
+
"""
|
|
256
|
+
referenced = self.relations.get(referenced_key)
|
|
257
|
+
if referenced is None:
|
|
258
|
+
return
|
|
259
|
+
if referenced is None:
|
|
260
|
+
raise ReferencedLinkNotCachedError(referenced_key)
|
|
261
|
+
|
|
262
|
+
dependent = self.relations.get(dependent_key)
|
|
263
|
+
if dependent is None:
|
|
264
|
+
raise DependentLinkNotCachedError(dependent_key)
|
|
265
|
+
|
|
266
|
+
assert dependent is not None # we just raised!
|
|
267
|
+
|
|
268
|
+
referenced.add_reference(dependent)
|
|
269
|
+
|
|
270
|
+
# This is called in plugins/postgres/dbt/adapters/postgres/impl.py
|
|
271
|
+
def add_link(self, referenced, dependent):
|
|
272
|
+
"""Add a link between two relations to the database. If either relation
|
|
273
|
+
does not exist, it will be added as an "external" relation.
|
|
274
|
+
|
|
275
|
+
The dependent model refers _to_ the referenced model. So, given
|
|
276
|
+
arguments of (jake_test, bar, jake_test, foo):
|
|
277
|
+
both values are in the schema jake_test and foo is a view that refers
|
|
278
|
+
to bar, so "drop bar cascade" will drop foo and all of foo's
|
|
279
|
+
dependents.
|
|
280
|
+
|
|
281
|
+
:param BaseRelation referenced: The referenced model.
|
|
282
|
+
:param BaseRelation dependent: The dependent model.
|
|
283
|
+
:raises InternalError: If either entry does not exist.
|
|
284
|
+
"""
|
|
285
|
+
ref_key = _make_ref_key(referenced)
|
|
286
|
+
dep_key = _make_ref_key(dependent)
|
|
287
|
+
if (ref_key.database, ref_key.schema) not in self:
|
|
288
|
+
# if we have not cached the referenced schema at all, we must be
|
|
289
|
+
# referring to a table outside our control. There's no need to make
|
|
290
|
+
# a link - we will never drop the referenced relation during a run.
|
|
291
|
+
fire_event(
|
|
292
|
+
CacheAction(
|
|
293
|
+
ref_key=ref_key._asdict(),
|
|
294
|
+
ref_key_2=dep_key._asdict(),
|
|
295
|
+
)
|
|
296
|
+
)
|
|
297
|
+
return
|
|
298
|
+
if ref_key not in self.relations:
|
|
299
|
+
# Insert a dummy "external" relation.
|
|
300
|
+
referenced = referenced.replace(type=referenced.External)
|
|
301
|
+
self.add(referenced)
|
|
302
|
+
if dep_key not in self.relations:
|
|
303
|
+
# Insert a dummy "external" relation.
|
|
304
|
+
dependent = dependent.replace(type=referenced.External)
|
|
305
|
+
self.add(dependent)
|
|
306
|
+
fire_event(
|
|
307
|
+
CacheAction(
|
|
308
|
+
action="add_link",
|
|
309
|
+
ref_key=dep_key._asdict(),
|
|
310
|
+
ref_key_2=ref_key._asdict(),
|
|
311
|
+
)
|
|
312
|
+
)
|
|
313
|
+
with self.lock:
|
|
314
|
+
self._add_link(ref_key, dep_key)
|
|
315
|
+
|
|
316
|
+
def add(self, relation):
|
|
317
|
+
"""Add the relation inner to the cache, under the schema schema and
|
|
318
|
+
identifier identifier
|
|
319
|
+
|
|
320
|
+
:param BaseRelation relation: The underlying relation.
|
|
321
|
+
"""
|
|
322
|
+
cached = _CachedRelation(relation)
|
|
323
|
+
fire_event_if(
|
|
324
|
+
self.log_cache_events,
|
|
325
|
+
lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()),
|
|
326
|
+
)
|
|
327
|
+
fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached)))
|
|
328
|
+
|
|
329
|
+
with self.lock:
|
|
330
|
+
self._setdefault(cached)
|
|
331
|
+
fire_event_if(
|
|
332
|
+
self.log_cache_events,
|
|
333
|
+
lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()),
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
def _remove_refs(self, keys):
|
|
337
|
+
"""Removes all references to all entries in keys. This does not
|
|
338
|
+
cascade!
|
|
339
|
+
|
|
340
|
+
:param Iterable[_ReferenceKey] keys: The keys to remove.
|
|
341
|
+
"""
|
|
342
|
+
# remove direct refs
|
|
343
|
+
for key in keys:
|
|
344
|
+
del self.relations[key]
|
|
345
|
+
# then remove all entries from each child
|
|
346
|
+
for cached in self.relations.values():
|
|
347
|
+
cached.release_references(keys)
|
|
348
|
+
|
|
349
|
+
def drop(self, relation):
|
|
350
|
+
"""Drop the named relation and cascade it appropriately to all
|
|
351
|
+
dependent relations.
|
|
352
|
+
|
|
353
|
+
Because dbt proactively does many `drop relation if exist ... cascade`
|
|
354
|
+
that are noops, nonexistent relation drops cause a debug log and no
|
|
355
|
+
other actions.
|
|
356
|
+
|
|
357
|
+
:param str schema: The schema of the relation to drop.
|
|
358
|
+
:param str identifier: The identifier of the relation to drop.
|
|
359
|
+
"""
|
|
360
|
+
dropped_key = _make_ref_key(relation)
|
|
361
|
+
dropped_key_msg = _make_ref_key_dict(relation)
|
|
362
|
+
fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg))
|
|
363
|
+
with self.lock:
|
|
364
|
+
if dropped_key not in self.relations:
|
|
365
|
+
fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg))
|
|
366
|
+
return
|
|
367
|
+
consequences = self.relations[dropped_key].collect_consequences()
|
|
368
|
+
# convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs
|
|
369
|
+
consequence_msgs = [key._asdict() for key in consequences]
|
|
370
|
+
fire_event(
|
|
371
|
+
CacheAction(
|
|
372
|
+
action="drop_cascade",
|
|
373
|
+
ref_key=dropped_key_msg,
|
|
374
|
+
ref_list=consequence_msgs,
|
|
375
|
+
)
|
|
376
|
+
)
|
|
377
|
+
self._remove_refs(consequences)
|
|
378
|
+
|
|
379
|
+
def _rename_relation(self, old_key, new_relation):
|
|
380
|
+
"""Rename a relation named old_key to new_key, updating references.
|
|
381
|
+
Return whether or not there was a key to rename.
|
|
382
|
+
|
|
383
|
+
:param _ReferenceKey old_key: The existing key, to rename from.
|
|
384
|
+
:param _CachedRelation new_key: The new relation, to rename to.
|
|
385
|
+
"""
|
|
386
|
+
# On the database level, a rename updates all values that were
|
|
387
|
+
# previously referenced by old_name to be referenced by new_name.
|
|
388
|
+
# basically, the name changes but some underlying ID moves. Kind of
|
|
389
|
+
# like an object reference!
|
|
390
|
+
relation = self.relations.pop(old_key)
|
|
391
|
+
new_key = new_relation.key()
|
|
392
|
+
|
|
393
|
+
# relation has to rename its innards, so it needs the _CachedRelation.
|
|
394
|
+
relation.rename(new_relation)
|
|
395
|
+
# update all the relations that refer to it
|
|
396
|
+
for cached in self.relations.values():
|
|
397
|
+
if cached.is_referenced_by(old_key):
|
|
398
|
+
fire_event(
|
|
399
|
+
CacheAction(
|
|
400
|
+
action="update_reference",
|
|
401
|
+
ref_key=_make_ref_key_dict(old_key),
|
|
402
|
+
ref_key_2=_make_ref_key_dict(new_key),
|
|
403
|
+
ref_key_3=_make_ref_key_dict(cached.key()),
|
|
404
|
+
)
|
|
405
|
+
)
|
|
406
|
+
|
|
407
|
+
cached.rename_key(old_key, new_key)
|
|
408
|
+
|
|
409
|
+
self.relations[new_key] = relation
|
|
410
|
+
# also fixup the schemas!
|
|
411
|
+
self.add_schema(new_key.database, new_key.schema)
|
|
412
|
+
|
|
413
|
+
return True
|
|
414
|
+
|
|
415
|
+
def _check_rename_constraints(self, old_key, new_key):
|
|
416
|
+
"""Check the rename constraints, and return whether or not the rename
|
|
417
|
+
can proceed.
|
|
418
|
+
|
|
419
|
+
If the new key is already present, that is an error.
|
|
420
|
+
If the old key is absent, we debug log and return False, assuming it's
|
|
421
|
+
a temp table being renamed.
|
|
422
|
+
|
|
423
|
+
:param _ReferenceKey old_key: The existing key, to rename from.
|
|
424
|
+
:param _ReferenceKey new_key: The new key, to rename to.
|
|
425
|
+
:return bool: If the old relation exists for renaming.
|
|
426
|
+
:raises InternalError: If the new key is already present.
|
|
427
|
+
"""
|
|
428
|
+
if new_key in self.relations:
|
|
429
|
+
# Tell user when collision caused by model names truncated during
|
|
430
|
+
# materialization.
|
|
431
|
+
raise TruncatedModelNameCausedCollisionError(new_key, self.relations)
|
|
432
|
+
|
|
433
|
+
if old_key not in self.relations:
|
|
434
|
+
fire_event(CacheAction(action="temporary_relation", ref_key=old_key._asdict()))
|
|
435
|
+
return False
|
|
436
|
+
return True
|
|
437
|
+
|
|
438
|
+
def rename(self, old, new):
|
|
439
|
+
"""Rename the old schema/identifier to the new schema/identifier and
|
|
440
|
+
update references.
|
|
441
|
+
|
|
442
|
+
If the new schema/identifier is already present, that is an error.
|
|
443
|
+
If the schema/identifier key is absent, we only debug log and return,
|
|
444
|
+
assuming it's a temp table being renamed.
|
|
445
|
+
|
|
446
|
+
:param BaseRelation old: The existing relation name information.
|
|
447
|
+
:param BaseRelation new: The new relation name information.
|
|
448
|
+
:raises InternalError: If the new key is already present.
|
|
449
|
+
"""
|
|
450
|
+
old_key = _make_ref_key(old)
|
|
451
|
+
new_key = _make_ref_key(new)
|
|
452
|
+
fire_event(
|
|
453
|
+
CacheAction(
|
|
454
|
+
action="rename_relation",
|
|
455
|
+
ref_key=old_key._asdict(),
|
|
456
|
+
ref_key_2=new_key._asdict(),
|
|
457
|
+
)
|
|
458
|
+
)
|
|
459
|
+
fire_event_if(
|
|
460
|
+
self.log_cache_events,
|
|
461
|
+
lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()),
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
with self.lock:
|
|
465
|
+
if self._check_rename_constraints(old_key, new_key):
|
|
466
|
+
self._rename_relation(old_key, _CachedRelation(new))
|
|
467
|
+
else:
|
|
468
|
+
self._setdefault(_CachedRelation(new))
|
|
469
|
+
|
|
470
|
+
fire_event_if(
|
|
471
|
+
self.log_cache_events,
|
|
472
|
+
lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()),
|
|
473
|
+
)
|
|
474
|
+
|
|
475
|
+
def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]:
|
|
476
|
+
"""Case-insensitively yield all relations matching the given schema.
|
|
477
|
+
|
|
478
|
+
:param str schema: The case-insensitive schema name to list from.
|
|
479
|
+
:return List[BaseRelation]: The list of relations with the given
|
|
480
|
+
schema
|
|
481
|
+
"""
|
|
482
|
+
database = lowercase(database)
|
|
483
|
+
schema = lowercase(schema)
|
|
484
|
+
with self.lock:
|
|
485
|
+
results = [
|
|
486
|
+
r.inner
|
|
487
|
+
for r in self.relations.values()
|
|
488
|
+
if (lowercase(r.schema) == schema and lowercase(r.database) == database)
|
|
489
|
+
]
|
|
490
|
+
|
|
491
|
+
if None in results:
|
|
492
|
+
raise NoneRelationFoundError()
|
|
493
|
+
return results
|
|
494
|
+
|
|
495
|
+
def clear(self):
|
|
496
|
+
"""Clear the cache"""
|
|
497
|
+
with self.lock:
|
|
498
|
+
self.relations.clear()
|
|
499
|
+
self.schemas.clear()
|
|
500
|
+
|
|
501
|
+
def _list_relations_in_schema(
|
|
502
|
+
self, database: Optional[str], schema: Optional[str]
|
|
503
|
+
) -> List[_CachedRelation]:
|
|
504
|
+
"""Get the relations in a schema. Callers should hold the lock."""
|
|
505
|
+
key = (lowercase(database), lowercase(schema))
|
|
506
|
+
|
|
507
|
+
to_remove: List[_CachedRelation] = []
|
|
508
|
+
for cachekey, relation in self.relations.items():
|
|
509
|
+
if (cachekey.database, cachekey.schema) == key:
|
|
510
|
+
to_remove.append(relation)
|
|
511
|
+
return to_remove
|
|
512
|
+
|
|
513
|
+
def _remove_all(self, to_remove: List[_CachedRelation]):
|
|
514
|
+
"""Remove all the listed relations. Ignore relations that have been
|
|
515
|
+
cascaded out.
|
|
516
|
+
"""
|
|
517
|
+
for relation in to_remove:
|
|
518
|
+
# it may have been cascaded out already
|
|
519
|
+
drop_key = _make_ref_key(relation)
|
|
520
|
+
if drop_key in self.relations:
|
|
521
|
+
self.drop(drop_key)
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Optional, DefaultDict, Mapping
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Capability(str, Enum):
|
|
7
|
+
"""Enumeration of optional adapter features which can be probed using BaseAdapter.capabilities()"""
|
|
8
|
+
|
|
9
|
+
SchemaMetadataByRelations = "SchemaMetadataByRelations"
|
|
10
|
+
"""Indicates efficient support for retrieving schema metadata for a list of relations, rather than always retrieving
|
|
11
|
+
all the relations in a schema."""
|
|
12
|
+
|
|
13
|
+
TableLastModifiedMetadata = "TableLastModifiedMetadata"
|
|
14
|
+
"""Indicates support for determining the time of the last table modification by querying database metadata."""
|
|
15
|
+
|
|
16
|
+
TableLastModifiedMetadataBatch = "TableLastModifiedMetadataBatch"
|
|
17
|
+
"""Indicates support for performantly determining the time of the last table modification by querying database
|
|
18
|
+
metadata in batch."""
|
|
19
|
+
|
|
20
|
+
GetCatalogForSingleRelation = "GetCatalogForSingleRelation"
|
|
21
|
+
"""Indicates support for getting catalog information including table-level and column-level metadata for a single
|
|
22
|
+
relation."""
|
|
23
|
+
|
|
24
|
+
MicrobatchConcurrency = "MicrobatchConcurrency"
|
|
25
|
+
"""Indicates support running the microbatch incremental materialization strategy concurrently across threads."""
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class Support(str, Enum):
|
|
29
|
+
Unknown = "Unknown"
|
|
30
|
+
"""The adapter has not declared whether this capability is a feature of the underlying DBMS."""
|
|
31
|
+
|
|
32
|
+
Unsupported = "Unsupported"
|
|
33
|
+
"""This capability is not possible with the underlying DBMS, so the adapter does not implement related macros."""
|
|
34
|
+
|
|
35
|
+
NotImplemented = "NotImplemented"
|
|
36
|
+
"""This capability is available in the underlying DBMS, but support has not yet been implemented in the adapter."""
|
|
37
|
+
|
|
38
|
+
Versioned = "Versioned"
|
|
39
|
+
"""Some versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
|
40
|
+
macros needed to use it."""
|
|
41
|
+
|
|
42
|
+
Full = "Full"
|
|
43
|
+
"""All versions of the DBMS supported by the adapter support this capability and the adapter has implemented any
|
|
44
|
+
macros needed to use it."""
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@dataclass
|
|
48
|
+
class CapabilitySupport:
|
|
49
|
+
support: Support
|
|
50
|
+
first_version: Optional[str] = None
|
|
51
|
+
|
|
52
|
+
def __bool__(self):
|
|
53
|
+
return self.support == Support.Versioned or self.support == Support.Full
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class CapabilityDict(DefaultDict[Capability, CapabilitySupport]):
|
|
57
|
+
def __init__(self, vals: Mapping[Capability, CapabilitySupport]):
|
|
58
|
+
super().__init__(self._default)
|
|
59
|
+
self.update(vals)
|
|
60
|
+
|
|
61
|
+
@staticmethod
|
|
62
|
+
def _default():
|
|
63
|
+
return CapabilitySupport(support=Support.Unknown)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from dbt.adapters.catalogs._client import CatalogIntegrationClient
|
|
2
|
+
from dbt.adapters.catalogs._exceptions import (
|
|
3
|
+
DbtCatalogIntegrationAlreadyExistsError,
|
|
4
|
+
DbtCatalogIntegrationNotFoundError,
|
|
5
|
+
DbtCatalogIntegrationNotSupportedError,
|
|
6
|
+
InvalidCatalogIntegrationConfigError,
|
|
7
|
+
)
|
|
8
|
+
from dbt.adapters.catalogs._integration import (
|
|
9
|
+
CatalogIntegration,
|
|
10
|
+
CatalogIntegrationConfig,
|
|
11
|
+
CatalogRelation,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from dbt.adapters.catalogs._constants import CATALOG_INTEGRATION_MODEL_CONFIG_NAME
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from typing import Dict, Iterable, Type
|
|
2
|
+
|
|
3
|
+
from dbt.adapters.catalogs._exceptions import (
|
|
4
|
+
DbtCatalogIntegrationAlreadyExistsError,
|
|
5
|
+
DbtCatalogIntegrationNotFoundError,
|
|
6
|
+
DbtCatalogIntegrationNotSupportedError,
|
|
7
|
+
)
|
|
8
|
+
from dbt.adapters.catalogs._integration import (
|
|
9
|
+
CatalogIntegration,
|
|
10
|
+
CatalogIntegrationConfig,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CatalogIntegrationClient:
|
|
15
|
+
"""
|
|
16
|
+
A repository class that manages catalog integrations
|
|
17
|
+
|
|
18
|
+
This class manages all types of catalog integrations,
|
|
19
|
+
supporting operations like registering new integrations and retrieving existing ones.
|
|
20
|
+
There is only one instance of this class per adapter.
|
|
21
|
+
|
|
22
|
+
Attributes:
|
|
23
|
+
__supported_catalogs (Dict[str, Type[CatalogIntegration]]): a dictionary of supported
|
|
24
|
+
catalog types mapped to their corresponding factory classes
|
|
25
|
+
__catalog_integrations (Dict[str, CatalogIntegration]): a dictionary of catalog
|
|
26
|
+
integration names mapped to their instances
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, supported_catalogs: Iterable[Type[CatalogIntegration]]):
|
|
30
|
+
self.__supported_catalogs: Dict[str, Type[CatalogIntegration]] = {
|
|
31
|
+
catalog.catalog_type.casefold(): catalog for catalog in supported_catalogs
|
|
32
|
+
}
|
|
33
|
+
self.__catalog_integrations: Dict[str, CatalogIntegration] = {}
|
|
34
|
+
|
|
35
|
+
def add(self, config: CatalogIntegrationConfig) -> CatalogIntegration:
|
|
36
|
+
factory = self.__catalog_integration_factory(config.catalog_type)
|
|
37
|
+
if config.name in self.__catalog_integrations:
|
|
38
|
+
raise DbtCatalogIntegrationAlreadyExistsError(config.name)
|
|
39
|
+
self.__catalog_integrations[config.name] = factory(config)
|
|
40
|
+
return self.get(config.name)
|
|
41
|
+
|
|
42
|
+
def get(self, name: str) -> CatalogIntegration:
|
|
43
|
+
try:
|
|
44
|
+
return self.__catalog_integrations[name]
|
|
45
|
+
except KeyError:
|
|
46
|
+
raise DbtCatalogIntegrationNotFoundError(name, self.__catalog_integrations.keys())
|
|
47
|
+
|
|
48
|
+
def __catalog_integration_factory(self, catalog_type: str) -> Type[CatalogIntegration]:
|
|
49
|
+
try:
|
|
50
|
+
return self.__supported_catalogs[catalog_type.casefold()]
|
|
51
|
+
except KeyError as e:
|
|
52
|
+
raise DbtCatalogIntegrationNotSupportedError(
|
|
53
|
+
catalog_type, self.__supported_catalogs.keys()
|
|
54
|
+
) from e
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
CATALOG_INTEGRATION_MODEL_CONFIG_NAME = "catalog_name"
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from typing import Iterable
|
|
2
|
+
|
|
3
|
+
from dbt_common.exceptions import DbtConfigError
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class DbtCatalogIntegrationAlreadyExistsError(DbtConfigError):
|
|
7
|
+
def __init__(self, catalog_name: str) -> None:
|
|
8
|
+
self.catalog_name = catalog_name
|
|
9
|
+
msg = f"Catalog already exists: {self.catalog_name}."
|
|
10
|
+
super().__init__(msg)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DbtCatalogIntegrationNotFoundError(DbtConfigError):
|
|
14
|
+
def __init__(self, catalog_name: str, existing_catalog_names: Iterable[str]) -> None:
|
|
15
|
+
self.catalog_name = catalog_name
|
|
16
|
+
msg = (
|
|
17
|
+
f"Catalog not found."
|
|
18
|
+
f"Received: {self.catalog_name}"
|
|
19
|
+
f"Expected one of: {', '.join(existing_catalog_names)}?"
|
|
20
|
+
)
|
|
21
|
+
super().__init__(msg)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class DbtCatalogIntegrationNotSupportedError(DbtConfigError):
|
|
25
|
+
def __init__(self, catalog_type: str, supported_catalog_types: Iterable[str]) -> None:
|
|
26
|
+
self.catalog_type = catalog_type
|
|
27
|
+
msg = (
|
|
28
|
+
f"Catalog type is not supported.\n"
|
|
29
|
+
f"Received: {catalog_type}\n"
|
|
30
|
+
f"Expected one of: {', '.join(supported_catalog_types)}"
|
|
31
|
+
)
|
|
32
|
+
super().__init__(msg)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class InvalidCatalogIntegrationConfigError(DbtConfigError):
|
|
36
|
+
def __init__(self, catalog_name: str, msg: str) -> None:
|
|
37
|
+
self.catalog_name = catalog_name
|
|
38
|
+
msg = f"Invalid catalog integration config: {self.catalog_name}. {msg}"
|
|
39
|
+
super().__init__(msg)
|