dbt-adapters 1.14.0__py3-none-any.whl → 1.14.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbt-adapters might be problematic. Click here for more details.
- dbt/adapters/__about__.py +1 -1
- dbt/adapters/base/impl.py +79 -5
- dbt/adapters/base/meta.py +1 -1
- dbt/adapters/base/relation.py +10 -1
- dbt/adapters/catalogs/__init__.py +10 -0
- dbt/adapters/catalogs/_client.py +54 -0
- dbt/adapters/catalogs/_exceptions.py +32 -0
- dbt/adapters/catalogs/_integration.py +75 -0
- dbt/adapters/contracts/connection.py +1 -1
- dbt/adapters/contracts/relation.py +1 -0
- dbt/adapters/events/adapter_types.proto +1 -0
- dbt/adapters/events/adapter_types_pb2.py +174 -174
- dbt/adapters/protocol.py +1 -1
- dbt/adapters/record/base.py +163 -0
- dbt/adapters/record/handle.py +16 -0
- dbt/adapters/record/serialization.py +31 -0
- dbt/adapters/relation_configs/config_base.py +1 -1
- dbt/adapters/sql/connections.py +1 -1
- dbt/adapters/sql/impl.py +8 -2
- dbt/include/global_project/macros/materializations/models/incremental/merge.sql +16 -29
- dbt/include/global_project/macros/python_model/python.sql +1 -1
- dbt_adapters-1.14.3.dist-info/METADATA +123 -0
- {dbt_adapters-1.14.0.dist-info → dbt_adapters-1.14.3.dist-info}/RECORD +25 -19
- dbt_adapters-1.14.0.dist-info/METADATA +0 -76
- {dbt_adapters-1.14.0.dist-info → dbt_adapters-1.14.3.dist-info}/WHEEL +0 -0
- {dbt_adapters-1.14.0.dist-info → dbt_adapters-1.14.3.dist-info}/licenses/LICENSE +0 -0
dbt/adapters/__about__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
version = "1.14.
|
|
1
|
+
version = "1.14.3"
|
dbt/adapters/base/impl.py
CHANGED
|
@@ -24,6 +24,13 @@ from typing import (
|
|
|
24
24
|
TYPE_CHECKING,
|
|
25
25
|
)
|
|
26
26
|
import pytz
|
|
27
|
+
|
|
28
|
+
from dbt.adapters.record.base import (
|
|
29
|
+
AdapterExecuteRecord,
|
|
30
|
+
AdapterGetPartitionsMetadataRecord,
|
|
31
|
+
AdapterConvertTypeRecord,
|
|
32
|
+
AdapterStandardizeGrantsDictRecord,
|
|
33
|
+
)
|
|
27
34
|
from dbt_common.behavior_flags import Behavior, BehaviorFlag
|
|
28
35
|
from dbt_common.clients.jinja import CallableMacroGenerator
|
|
29
36
|
from dbt_common.contracts.constraints import (
|
|
@@ -42,6 +49,7 @@ from dbt_common.exceptions import (
|
|
|
42
49
|
NotImplementedError,
|
|
43
50
|
UnexpectedNullError,
|
|
44
51
|
)
|
|
52
|
+
from dbt_common.record import auto_record_function, record_function, supports_replay
|
|
45
53
|
from dbt_common.utils import (
|
|
46
54
|
AttrDict,
|
|
47
55
|
cast_to_str,
|
|
@@ -65,6 +73,11 @@ from dbt.adapters.base.relation import (
|
|
|
65
73
|
)
|
|
66
74
|
from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
|
|
67
75
|
from dbt.adapters.capability import Capability, CapabilityDict
|
|
76
|
+
from dbt.adapters.catalogs import (
|
|
77
|
+
CatalogIntegration,
|
|
78
|
+
CatalogIntegrationClient,
|
|
79
|
+
CatalogIntegrationConfig,
|
|
80
|
+
)
|
|
68
81
|
from dbt.adapters.contracts.connection import Credentials
|
|
69
82
|
from dbt.adapters.contracts.macros import MacroResolverProtocol
|
|
70
83
|
from dbt.adapters.contracts.relation import RelationConfig
|
|
@@ -220,6 +233,7 @@ class SnapshotStrategy(TypedDict):
|
|
|
220
233
|
hard_deletes: Optional[str]
|
|
221
234
|
|
|
222
235
|
|
|
236
|
+
@supports_replay
|
|
223
237
|
class BaseAdapter(metaclass=AdapterMeta):
|
|
224
238
|
"""The BaseAdapter provides an abstract base class for adapters.
|
|
225
239
|
|
|
@@ -269,6 +283,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
269
283
|
Relation: Type[BaseRelation] = BaseRelation
|
|
270
284
|
Column: Type[BaseColumn] = BaseColumn
|
|
271
285
|
ConnectionManager: Type[BaseConnectionManager]
|
|
286
|
+
CATALOG_INTEGRATIONS: Dict[str, Type[CatalogIntegration]] = {}
|
|
272
287
|
|
|
273
288
|
# A set of clobber config fields accepted by this adapter
|
|
274
289
|
# for use in materializations
|
|
@@ -295,6 +310,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
295
310
|
self._macro_resolver: Optional[MacroResolverProtocol] = None
|
|
296
311
|
self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None
|
|
297
312
|
self.behavior = DEFAULT_BASE_BEHAVIOR_FLAGS # type: ignore
|
|
313
|
+
self._catalog_client = CatalogIntegrationClient(self.CATALOG_INTEGRATIONS)
|
|
314
|
+
|
|
315
|
+
def add_catalog_integration(self, catalog: CatalogIntegrationConfig) -> CatalogIntegration:
|
|
316
|
+
return self._catalog_client.add(catalog)
|
|
317
|
+
|
|
318
|
+
@available
|
|
319
|
+
def get_catalog_integration(self, name: str) -> CatalogIntegration:
|
|
320
|
+
return self._catalog_client.get(name)
|
|
298
321
|
|
|
299
322
|
###
|
|
300
323
|
# Methods to set / access a macro resolver
|
|
@@ -383,6 +406,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
383
406
|
self.connections.query_header.reset()
|
|
384
407
|
|
|
385
408
|
@available.parse(_parse_callback_empty_table)
|
|
409
|
+
@record_function(
|
|
410
|
+
AdapterExecuteRecord, method=True, index_on_thread_id=True, id_field_name="thread_id"
|
|
411
|
+
)
|
|
386
412
|
def execute(
|
|
387
413
|
self,
|
|
388
414
|
sql: str,
|
|
@@ -414,6 +440,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
414
440
|
"""
|
|
415
441
|
raise NotImplementedError("`validate_sql` is not implemented for this adapter!")
|
|
416
442
|
|
|
443
|
+
@auto_record_function("AdapterGetColumnSchemaFromQuery", group="Available")
|
|
417
444
|
@available.parse(lambda *a, **k: [])
|
|
418
445
|
def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]:
|
|
419
446
|
"""Get a list of the Columns with names and data types from the given sql."""
|
|
@@ -427,6 +454,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
427
454
|
]
|
|
428
455
|
return columns
|
|
429
456
|
|
|
457
|
+
@record_function(
|
|
458
|
+
AdapterGetPartitionsMetadataRecord,
|
|
459
|
+
method=True,
|
|
460
|
+
index_on_thread_id=True,
|
|
461
|
+
id_field_name="thread_id",
|
|
462
|
+
)
|
|
430
463
|
@available.parse(_parse_callback_empty_table)
|
|
431
464
|
def get_partitions_metadata(self, table: str) -> Tuple["agate.Table"]:
|
|
432
465
|
"""
|
|
@@ -576,6 +609,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
576
609
|
self.cache.clear()
|
|
577
610
|
self._relations_cache_for_schemas(relation_configs, required_schemas)
|
|
578
611
|
|
|
612
|
+
@auto_record_function("AdapterCacheAdded", group="Available")
|
|
579
613
|
@available
|
|
580
614
|
def cache_added(self, relation: Optional[BaseRelation]) -> str:
|
|
581
615
|
"""Cache a new relation in dbt. It will show up in `list relations`."""
|
|
@@ -586,6 +620,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
586
620
|
# so jinja doesn't render things
|
|
587
621
|
return ""
|
|
588
622
|
|
|
623
|
+
@auto_record_function("AdapterCacheDropped", group="Available")
|
|
589
624
|
@available
|
|
590
625
|
def cache_dropped(self, relation: Optional[BaseRelation]) -> str:
|
|
591
626
|
"""Drop a relation in dbt. It will no longer show up in
|
|
@@ -597,6 +632,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
597
632
|
self.cache.drop(relation)
|
|
598
633
|
return ""
|
|
599
634
|
|
|
635
|
+
@auto_record_function("AdapterCacheRenamed", group="Available")
|
|
600
636
|
@available
|
|
601
637
|
def cache_renamed(
|
|
602
638
|
self,
|
|
@@ -637,6 +673,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
637
673
|
"""Get a list of existing schemas in database"""
|
|
638
674
|
raise NotImplementedError("`list_schemas` is not implemented for this adapter!")
|
|
639
675
|
|
|
676
|
+
@auto_record_function("AdapterCheckSchemaExists", group="Available")
|
|
640
677
|
@available.parse(lambda *a, **k: False)
|
|
641
678
|
def check_schema_exists(self, database: str, schema: str) -> bool:
|
|
642
679
|
"""Check if a schema exists.
|
|
@@ -651,6 +688,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
651
688
|
###
|
|
652
689
|
# Abstract methods about relations
|
|
653
690
|
###
|
|
691
|
+
@auto_record_function("AdapterDropRelation", group="Available")
|
|
654
692
|
@abc.abstractmethod
|
|
655
693
|
@available.parse_none
|
|
656
694
|
def drop_relation(self, relation: BaseRelation) -> None:
|
|
@@ -660,12 +698,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
660
698
|
"""
|
|
661
699
|
raise NotImplementedError("`drop_relation` is not implemented for this adapter!")
|
|
662
700
|
|
|
701
|
+
@auto_record_function("AdapterTruncateRelation", group="Available")
|
|
663
702
|
@abc.abstractmethod
|
|
664
703
|
@available.parse_none
|
|
665
704
|
def truncate_relation(self, relation: BaseRelation) -> None:
|
|
666
705
|
"""Truncate the given relation."""
|
|
667
706
|
raise NotImplementedError("`truncate_relation` is not implemented for this adapter!")
|
|
668
707
|
|
|
708
|
+
@auto_record_function("AdapterRenameRelation", group="Available")
|
|
669
709
|
@abc.abstractmethod
|
|
670
710
|
@available.parse_none
|
|
671
711
|
def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None:
|
|
@@ -675,6 +715,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
675
715
|
"""
|
|
676
716
|
raise NotImplementedError("`rename_relation` is not implemented for this adapter!")
|
|
677
717
|
|
|
718
|
+
@auto_record_function("AdapterGetColumnsInRelation", group="Available")
|
|
678
719
|
@abc.abstractmethod
|
|
679
720
|
@available.parse_list
|
|
680
721
|
def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
|
|
@@ -687,6 +728,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
687
728
|
"`get_catalog_for_single_relation` is not implemented for this adapter!"
|
|
688
729
|
)
|
|
689
730
|
|
|
731
|
+
@auto_record_function("AdapterGetColumnsInTable", group="Available")
|
|
690
732
|
@available.deprecated("get_columns_in_relation", lambda *a, **k: [])
|
|
691
733
|
def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
|
|
692
734
|
"""DEPRECATED: Get a list of the columns in the given table."""
|
|
@@ -729,6 +771,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
729
771
|
###
|
|
730
772
|
# Methods about grants
|
|
731
773
|
###
|
|
774
|
+
@record_function(
|
|
775
|
+
AdapterStandardizeGrantsDictRecord,
|
|
776
|
+
method=True,
|
|
777
|
+
index_on_thread_id=True,
|
|
778
|
+
id_field_name="thread_id",
|
|
779
|
+
)
|
|
732
780
|
@available
|
|
733
781
|
def standardize_grants_dict(self, grants_table: "agate.Table") -> dict:
|
|
734
782
|
"""Translate the result of `show grants` (or equivalent) to match the
|
|
@@ -743,6 +791,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
743
791
|
:return: A standardized dictionary matching the `grants` config
|
|
744
792
|
:rtype: dict
|
|
745
793
|
"""
|
|
794
|
+
|
|
746
795
|
grants_dict: Dict[str, List[str]] = {}
|
|
747
796
|
for row in grants_table:
|
|
748
797
|
grantee = row["grantee"]
|
|
@@ -756,6 +805,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
756
805
|
###
|
|
757
806
|
# Provided methods about relations
|
|
758
807
|
###
|
|
808
|
+
@auto_record_function("AdapterGetMissingColumns", group="Available")
|
|
759
809
|
@available.parse_list
|
|
760
810
|
def get_missing_columns(
|
|
761
811
|
self, from_relation: BaseRelation, to_relation: BaseRelation
|
|
@@ -787,6 +837,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
787
837
|
|
|
788
838
|
return [col for (col_name, col) in from_columns.items() if col_name in missing_columns]
|
|
789
839
|
|
|
840
|
+
@auto_record_function("AdapterValidSnapshotTarget", group="Available")
|
|
790
841
|
@available.parse_none
|
|
791
842
|
def valid_snapshot_target(
|
|
792
843
|
self, relation: BaseRelation, column_names: Optional[Dict[str, str]] = None
|
|
@@ -819,10 +870,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
819
870
|
if missing:
|
|
820
871
|
raise SnapshotTargetNotSnapshotTableError(missing)
|
|
821
872
|
|
|
873
|
+
@auto_record_function("AdapterAssertValidSnapshotTargetGivenStrategy", group="Available")
|
|
822
874
|
@available.parse_none
|
|
823
875
|
def assert_valid_snapshot_target_given_strategy(
|
|
824
876
|
self, relation: BaseRelation, column_names: Dict[str, str], strategy: SnapshotStrategy
|
|
825
877
|
) -> None:
|
|
878
|
+
|
|
826
879
|
# Assert everything we can with the legacy function.
|
|
827
880
|
self.valid_snapshot_target(relation, column_names)
|
|
828
881
|
|
|
@@ -841,10 +894,12 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
841
894
|
if missing:
|
|
842
895
|
raise SnapshotTargetNotSnapshotTableError(missing)
|
|
843
896
|
|
|
897
|
+
@auto_record_function("AdapterExpandTargetColumnTypes", group="Available")
|
|
844
898
|
@available.parse_none
|
|
845
899
|
def expand_target_column_types(
|
|
846
900
|
self, from_relation: BaseRelation, to_relation: BaseRelation
|
|
847
901
|
) -> None:
|
|
902
|
+
|
|
848
903
|
if not isinstance(from_relation, self.Relation):
|
|
849
904
|
raise MacroArgTypeError(
|
|
850
905
|
method_name="expand_target_column_types",
|
|
@@ -935,8 +990,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
935
990
|
|
|
936
991
|
return matches
|
|
937
992
|
|
|
993
|
+
@auto_record_function("AdapterGetRelation", group="Available")
|
|
938
994
|
@available.parse_none
|
|
939
995
|
def get_relation(self, database: str, schema: str, identifier: str) -> Optional[BaseRelation]:
|
|
996
|
+
|
|
940
997
|
relations_list = self.list_relations(database, schema)
|
|
941
998
|
|
|
942
999
|
matches = self._make_match(relations_list, database, schema, identifier)
|
|
@@ -954,9 +1011,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
954
1011
|
|
|
955
1012
|
return None
|
|
956
1013
|
|
|
1014
|
+
@auto_record_function("AdapterAlreadyExists", group="Available")
|
|
957
1015
|
@available.deprecated("get_relation", lambda *a, **k: False)
|
|
958
1016
|
def already_exists(self, schema: str, name: str) -> bool:
|
|
959
1017
|
"""DEPRECATED: Return if a model already exists in the database"""
|
|
1018
|
+
|
|
960
1019
|
database = self.config.credentials.database
|
|
961
1020
|
relation = self.get_relation(database, schema, name)
|
|
962
1021
|
return relation is not None
|
|
@@ -965,12 +1024,14 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
965
1024
|
# ODBC FUNCTIONS -- these should not need to change for every adapter,
|
|
966
1025
|
# although some adapters may override them
|
|
967
1026
|
###
|
|
1027
|
+
@auto_record_function("AdapterCreateSchema", group="Available")
|
|
968
1028
|
@abc.abstractmethod
|
|
969
1029
|
@available.parse_none
|
|
970
1030
|
def create_schema(self, relation: BaseRelation):
|
|
971
1031
|
"""Create the given schema if it does not exist."""
|
|
972
1032
|
raise NotImplementedError("`create_schema` is not implemented for this adapter!")
|
|
973
1033
|
|
|
1034
|
+
@auto_record_function("AdapterDropSchema", group="Available")
|
|
974
1035
|
@abc.abstractmethod
|
|
975
1036
|
@available.parse_none
|
|
976
1037
|
def drop_schema(self, relation: BaseRelation):
|
|
@@ -979,11 +1040,13 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
979
1040
|
|
|
980
1041
|
@available
|
|
981
1042
|
@classmethod
|
|
1043
|
+
@auto_record_function("AdapterQuote", group="Available")
|
|
982
1044
|
@abc.abstractmethod
|
|
983
1045
|
def quote(cls, identifier: str) -> str:
|
|
984
1046
|
"""Quote the given identifier, as appropriate for the database."""
|
|
985
1047
|
raise NotImplementedError("`quote` is not implemented for this adapter!")
|
|
986
1048
|
|
|
1049
|
+
@auto_record_function("AdapterQuoteAsConfigured", group="Available")
|
|
987
1050
|
@available
|
|
988
1051
|
def quote_as_configured(self, identifier: str, quote_key: str) -> str:
|
|
989
1052
|
"""Quote or do not quote the given identifer as configured in the
|
|
@@ -992,6 +1055,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
992
1055
|
The quote key should be one of 'database' (on bigquery, 'profile'),
|
|
993
1056
|
'identifier', or 'schema', or it will be treated as if you set `True`.
|
|
994
1057
|
"""
|
|
1058
|
+
|
|
995
1059
|
try:
|
|
996
1060
|
key = ComponentName(quote_key)
|
|
997
1061
|
except ValueError:
|
|
@@ -1003,8 +1067,10 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
1003
1067
|
else:
|
|
1004
1068
|
return identifier
|
|
1005
1069
|
|
|
1070
|
+
@auto_record_function("AdapterQuoteSeedColumn", group="Available")
|
|
1006
1071
|
@available
|
|
1007
1072
|
def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str:
|
|
1073
|
+
|
|
1008
1074
|
quote_columns: bool = True
|
|
1009
1075
|
if isinstance(quote_config, bool):
|
|
1010
1076
|
quote_columns = quote_config
|
|
@@ -1107,7 +1173,11 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
1107
1173
|
|
|
1108
1174
|
@available
|
|
1109
1175
|
@classmethod
|
|
1176
|
+
@record_function(
|
|
1177
|
+
AdapterConvertTypeRecord, method=True, index_on_thread_id=True, id_field_name="thread_id"
|
|
1178
|
+
)
|
|
1110
1179
|
def convert_type(cls, agate_table: "agate.Table", col_idx: int) -> Optional[str]:
|
|
1180
|
+
|
|
1111
1181
|
return cls.convert_agate_type(agate_table, col_idx)
|
|
1112
1182
|
|
|
1113
1183
|
@classmethod
|
|
@@ -1224,7 +1294,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
1224
1294
|
kwargs = {"information_schema": information_schema, "schemas": schemas}
|
|
1225
1295
|
table = self.execute_macro(GET_CATALOG_MACRO_NAME, kwargs=kwargs)
|
|
1226
1296
|
|
|
1227
|
-
results = self._catalog_filter_table(table, used_schemas)
|
|
1297
|
+
results = self._catalog_filter_table(table, used_schemas)
|
|
1228
1298
|
return results
|
|
1229
1299
|
|
|
1230
1300
|
def _get_one_catalog_by_relations(
|
|
@@ -1239,7 +1309,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
1239
1309
|
}
|
|
1240
1310
|
table = self.execute_macro(GET_CATALOG_RELATIONS_MACRO_NAME, kwargs=kwargs)
|
|
1241
1311
|
|
|
1242
|
-
results = self._catalog_filter_table(table, used_schemas)
|
|
1312
|
+
results = self._catalog_filter_table(table, used_schemas)
|
|
1243
1313
|
return results
|
|
1244
1314
|
|
|
1245
1315
|
def get_filtered_catalog(
|
|
@@ -1435,7 +1505,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
1435
1505
|
macro_resolver=macro_resolver,
|
|
1436
1506
|
needs_conn=True,
|
|
1437
1507
|
)
|
|
1438
|
-
adapter_response, table = result.response, result.table
|
|
1508
|
+
adapter_response, table = result.response, result.table
|
|
1439
1509
|
adapter_responses.append(adapter_response)
|
|
1440
1510
|
|
|
1441
1511
|
for row in table:
|
|
@@ -1709,7 +1779,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
1709
1779
|
|
|
1710
1780
|
@available
|
|
1711
1781
|
@classmethod
|
|
1712
|
-
|
|
1782
|
+
@auto_record_function("AdapterRenderRawColumnConstraints", group="Available")
|
|
1783
|
+
def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List[str]:
|
|
1784
|
+
|
|
1713
1785
|
rendered_column_constraints = []
|
|
1714
1786
|
|
|
1715
1787
|
for v in raw_columns.values():
|
|
@@ -1763,7 +1835,9 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
1763
1835
|
|
|
1764
1836
|
@available
|
|
1765
1837
|
@classmethod
|
|
1838
|
+
@auto_record_function("AdapterRenderRawModelConstraints", group="Available")
|
|
1766
1839
|
def render_raw_model_constraints(cls, raw_constraints: List[Dict[str, Any]]) -> List[str]:
|
|
1840
|
+
|
|
1767
1841
|
return [c for c in map(cls.render_raw_model_constraint, raw_constraints) if c is not None]
|
|
1768
1842
|
|
|
1769
1843
|
@classmethod
|
|
@@ -1835,7 +1909,7 @@ class BaseAdapter(metaclass=AdapterMeta):
|
|
|
1835
1909
|
|
|
1836
1910
|
@available.parse_none
|
|
1837
1911
|
@classmethod
|
|
1838
|
-
def get_hard_deletes_behavior(cls, config):
|
|
1912
|
+
def get_hard_deletes_behavior(cls, config: Dict[str, str]) -> str:
|
|
1839
1913
|
"""Check the hard_deletes config enum, and the legacy invalidate_hard_deletes
|
|
1840
1914
|
config flag in order to determine which behavior should be used for deleted
|
|
1841
1915
|
records in a snapshot. The default is to ignore them."""
|
dbt/adapters/base/meta.py
CHANGED
|
@@ -121,7 +121,7 @@ class AdapterMeta(abc.ABCMeta):
|
|
|
121
121
|
# I'm not sure there is any benefit to it after poking around a bit,
|
|
122
122
|
# but having it doesn't hurt on the python side (and omitting it could
|
|
123
123
|
# hurt for obscure metaclass reasons, for all I know)
|
|
124
|
-
cls = abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
|
|
124
|
+
cls = abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs)
|
|
125
125
|
|
|
126
126
|
# this is very much inspired by ABCMeta's own implementation
|
|
127
127
|
|
dbt/adapters/base/relation.py
CHANGED
|
@@ -60,6 +60,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|
|
60
60
|
require_alias: bool = (
|
|
61
61
|
True # used to govern whether to add an alias when render_limited is called
|
|
62
62
|
)
|
|
63
|
+
catalog_name: Optional[str] = None
|
|
63
64
|
|
|
64
65
|
# register relation types that can be renamed for the purpose of replacing relations using stages and backups
|
|
65
66
|
# adding a relation type here also requires defining the associated rename macro
|
|
@@ -135,7 +136,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|
|
135
136
|
if str(self.path.get_lowered_part(k)).strip(self.quote_character) != v.lower().strip(
|
|
136
137
|
self.quote_character
|
|
137
138
|
):
|
|
138
|
-
approximate_match = False
|
|
139
|
+
approximate_match = False
|
|
139
140
|
|
|
140
141
|
if approximate_match and not exact_match:
|
|
141
142
|
target = self.create(database=database, schema=schema, identifier=identifier)
|
|
@@ -305,6 +306,13 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|
|
305
306
|
|
|
306
307
|
config_quoting = relation_config.quoting_dict
|
|
307
308
|
config_quoting.pop("column", None)
|
|
309
|
+
|
|
310
|
+
catalog_name = (
|
|
311
|
+
relation_config.catalog_name
|
|
312
|
+
if hasattr(relation_config, "catalog_name")
|
|
313
|
+
else relation_config.config.get("catalog", None) # type: ignore
|
|
314
|
+
)
|
|
315
|
+
|
|
308
316
|
# precedence: kwargs quoting > relation config quoting > base quoting > default quoting
|
|
309
317
|
quote_policy = deep_merge(
|
|
310
318
|
cls.get_default_quote_policy().to_dict(omit_none=True),
|
|
@@ -318,6 +326,7 @@ class BaseRelation(FakeAPIObject, Hashable):
|
|
|
318
326
|
schema=relation_config.schema,
|
|
319
327
|
identifier=relation_config.identifier,
|
|
320
328
|
quote_policy=quote_policy,
|
|
329
|
+
catalog_name=catalog_name,
|
|
321
330
|
**kwargs,
|
|
322
331
|
)
|
|
323
332
|
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from dbt.adapters.catalogs._client import CatalogIntegrationClient
|
|
2
|
+
from dbt.adapters.catalogs._exceptions import (
|
|
3
|
+
DbtCatalogIntegrationAlreadyExistsError,
|
|
4
|
+
DbtCatalogIntegrationNotFoundError,
|
|
5
|
+
DbtCatalogIntegrationNotSupportedError,
|
|
6
|
+
)
|
|
7
|
+
from dbt.adapters.catalogs._integration import (
|
|
8
|
+
CatalogIntegration,
|
|
9
|
+
CatalogIntegrationConfig,
|
|
10
|
+
)
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
from typing import Dict, Type
|
|
2
|
+
|
|
3
|
+
from dbt.adapters.catalogs._exceptions import (
|
|
4
|
+
DbtCatalogIntegrationAlreadyExistsError,
|
|
5
|
+
DbtCatalogIntegrationNotFoundError,
|
|
6
|
+
DbtCatalogIntegrationNotSupportedError,
|
|
7
|
+
)
|
|
8
|
+
from dbt.adapters.catalogs._integration import (
|
|
9
|
+
CatalogIntegration,
|
|
10
|
+
CatalogIntegrationConfig,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CatalogIntegrationClient:
|
|
15
|
+
"""
|
|
16
|
+
A repository class that manages catalog integrations
|
|
17
|
+
|
|
18
|
+
This class manages all types of catalog integrations,
|
|
19
|
+
supporting operations like registering new integrations and retrieving existing ones.
|
|
20
|
+
There is only one instance of this class per adapter.
|
|
21
|
+
|
|
22
|
+
Attributes:
|
|
23
|
+
__supported_catalogs (Dict[str, Type[CatalogIntegration]]): a dictionary of supported
|
|
24
|
+
catalog types mapped to their corresponding factory classes
|
|
25
|
+
__catalog_integrations (Dict[str, CatalogIntegration]): a dictionary of catalog
|
|
26
|
+
integration names mapped to their instances
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(self, supported_catalogs: Dict[str, Type[CatalogIntegration]]):
|
|
30
|
+
self.__supported_catalogs = supported_catalogs
|
|
31
|
+
self.__catalog_integrations: Dict[str, CatalogIntegration] = {}
|
|
32
|
+
|
|
33
|
+
def add(self, catalog_integration: CatalogIntegrationConfig) -> CatalogIntegration:
|
|
34
|
+
try:
|
|
35
|
+
catalog_factory = self.__supported_catalogs[catalog_integration.catalog_type]
|
|
36
|
+
except KeyError:
|
|
37
|
+
raise DbtCatalogIntegrationNotSupportedError(
|
|
38
|
+
catalog_integration.catalog_type, self.__supported_catalogs.keys()
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
if catalog_integration.name in self.__catalog_integrations.keys():
|
|
42
|
+
raise DbtCatalogIntegrationAlreadyExistsError(catalog_integration.name)
|
|
43
|
+
|
|
44
|
+
self.__catalog_integrations[catalog_integration.name] = catalog_factory(
|
|
45
|
+
catalog_integration
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
return self.get(catalog_integration.name)
|
|
49
|
+
|
|
50
|
+
def get(self, name: str) -> CatalogIntegration:
|
|
51
|
+
try:
|
|
52
|
+
return self.__catalog_integrations[name]
|
|
53
|
+
except KeyError:
|
|
54
|
+
raise DbtCatalogIntegrationNotFoundError(name, self.__catalog_integrations.keys())
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from typing import Iterable
|
|
2
|
+
|
|
3
|
+
from dbt_common.exceptions import DbtConfigError
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class DbtCatalogIntegrationAlreadyExistsError(DbtConfigError):
|
|
7
|
+
def __init__(self, catalog_name: str) -> None:
|
|
8
|
+
self.catalog_name = catalog_name
|
|
9
|
+
msg = f"Catalog already exists: {self.catalog_name}."
|
|
10
|
+
super().__init__(msg)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class DbtCatalogIntegrationNotFoundError(DbtConfigError):
|
|
14
|
+
def __init__(self, catalog_name: str, existing_catalog_names: Iterable[str]) -> None:
|
|
15
|
+
self.catalog_name = catalog_name
|
|
16
|
+
msg = (
|
|
17
|
+
f"Catalog not found."
|
|
18
|
+
f"Received: {self.catalog_name}"
|
|
19
|
+
f"Expected one of: {', '.join(existing_catalog_names)}?"
|
|
20
|
+
)
|
|
21
|
+
super().__init__(msg)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class DbtCatalogIntegrationNotSupportedError(DbtConfigError):
|
|
25
|
+
def __init__(self, catalog_type: str, supported_catalog_types: Iterable[str]) -> None:
|
|
26
|
+
self.catalog_type = catalog_type
|
|
27
|
+
msg = (
|
|
28
|
+
f"Catalog type is not supported."
|
|
29
|
+
f"Received: {catalog_type}"
|
|
30
|
+
f"Expected one of: {', '.join(supported_catalog_types)}"
|
|
31
|
+
)
|
|
32
|
+
super().__init__(msg)
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Any, Dict, Optional
|
|
3
|
+
from typing_extensions import Protocol
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class CatalogIntegrationConfig(Protocol):
|
|
7
|
+
"""
|
|
8
|
+
Represents the user configuration required to describe a catalog integration
|
|
9
|
+
|
|
10
|
+
This class serves as a blueprint for catalog integration configurations,
|
|
11
|
+
providing details about the catalog type, name, and other optional
|
|
12
|
+
properties necessary for integration. It is designed to be used with
|
|
13
|
+
any implementation that requires a catalog configuration protocol,
|
|
14
|
+
ensuring a standardized structure and attributes are in place.
|
|
15
|
+
|
|
16
|
+
Attributes:
|
|
17
|
+
name (str): the name of the catalog integration in the data platform, e.g. "my_favorite_iceberg_catalog"
|
|
18
|
+
- this is required for dbt to correctly reference catalogs by name from model configuration
|
|
19
|
+
- expected to be unique within the adapter, if not the entire data platform
|
|
20
|
+
catalog_type (str): the type of the catalog integration in the data platform, e.g. "iceberg_rest"
|
|
21
|
+
- this is required for dbt to determine the correct method for parsing user configuration
|
|
22
|
+
- usually a combination of the catalog and the way in which the data platform interacts with it
|
|
23
|
+
table_format (Optional[str]): the table format this catalog uses
|
|
24
|
+
- this is commonly unique to each catalog type, and should only be required from the user for catalogs that support multiple formats
|
|
25
|
+
external_volume (Optional[str]): external storage volume identifier
|
|
26
|
+
- while this is a separate concept from catalogs, we feel it is more user-friendly to group it with the catalog configuration
|
|
27
|
+
- a result of this grouping is that there can only be one external volume per catalog integration, but many catalogs can share the same volume
|
|
28
|
+
adapter_properties (Optional[Dict[str, Any]]):
|
|
29
|
+
- additional, adapter-specific properties are nested here to avoid future collision when expanding the catalog integration protocol
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
name: str
|
|
33
|
+
catalog_type: str
|
|
34
|
+
table_format: Optional[str]
|
|
35
|
+
external_volume: Optional[str]
|
|
36
|
+
adapter_properties: Optional[Dict[str, Any]]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class CatalogIntegrationMode(Enum):
|
|
40
|
+
READ = "r"
|
|
41
|
+
WRITE = "w"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class CatalogIntegration:
|
|
45
|
+
"""
|
|
46
|
+
Represent a catalog integration for a given user config
|
|
47
|
+
|
|
48
|
+
This class should be subclassed by specific catalog integration types in an adapter.
|
|
49
|
+
A catalog integration is a specific platform's way of interacting with a specific catalog.
|
|
50
|
+
|
|
51
|
+
Attributes:
|
|
52
|
+
name (str): the name of the catalog integration in the data platform, e.g. "my_favorite_iceberg_catalog"
|
|
53
|
+
- this is required for dbt to correctly reference catalogs by name from model configuration
|
|
54
|
+
- expected to be unique within the adapter, if not the entire data platform
|
|
55
|
+
catalog_type (str): the type of the catalog integration in the data platform, e.g. "iceberg_rest"
|
|
56
|
+
- this is required for dbt to determine the correct method for parsing user configuration
|
|
57
|
+
- usually a combination of the catalog and the way in which the data platform interacts with it
|
|
58
|
+
allows_writes (bool): identifies whether this catalog integration supports writes
|
|
59
|
+
- this is required for dbt to correctly identify whether a catalog is writable during parse time
|
|
60
|
+
- this is determined by the catalog integration type, hence it is a class attribute
|
|
61
|
+
table_format (Optional[str]): the table format this catalog uses
|
|
62
|
+
- this is commonly determined by the catalog integration type, hence it is usually a class attribute
|
|
63
|
+
- it should only be required from the user for catalogs that support multiple formats
|
|
64
|
+
external_volume (Optional[str]): external storage volume identifier
|
|
65
|
+
- while this is a separate concept from catalogs, we feel it is more user-friendly to group it with the catalog configuration
|
|
66
|
+
- a result of this grouping is that there can only be one external volume per catalog integration, but many catalogs can share the same volume
|
|
67
|
+
"""
|
|
68
|
+
|
|
69
|
+
allows_writes: CatalogIntegrationMode = CatalogIntegrationMode.READ
|
|
70
|
+
|
|
71
|
+
def __init__(self, config: CatalogIntegrationConfig):
|
|
72
|
+
self.name: str = config.name
|
|
73
|
+
self.catalog_type: str = config.catalog_type
|
|
74
|
+
self.table_format: Optional[str] = config.table_format or None
|
|
75
|
+
self.external_volume: Optional[str] = config.external_volume or None
|
|
@@ -124,7 +124,7 @@ class LazyHandle:
|
|
|
124
124
|
# and https://github.com/python/mypy/issues/5374
|
|
125
125
|
# for why we have type: ignore. Maybe someday dataclasses + abstract classes
|
|
126
126
|
# will work.
|
|
127
|
-
@dataclass
|
|
127
|
+
@dataclass
|
|
128
128
|
class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
|
|
129
129
|
database: str
|
|
130
130
|
schema: str
|