dbt-adapters 1.14.1__py3-none-any.whl → 1.14.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-adapters might be problematic. Click here for more details.

dbt/adapters/__about__.py CHANGED
@@ -1 +1 @@
1
- version = "1.14.1"
1
+ version = "1.14.4"
dbt/adapters/base/impl.py CHANGED
@@ -24,6 +24,13 @@ from typing import (
24
24
  TYPE_CHECKING,
25
25
  )
26
26
  import pytz
27
+
28
+ from dbt.adapters.record.base import (
29
+ AdapterExecuteRecord,
30
+ AdapterGetPartitionsMetadataRecord,
31
+ AdapterConvertTypeRecord,
32
+ AdapterStandardizeGrantsDictRecord,
33
+ )
27
34
  from dbt_common.behavior_flags import Behavior, BehaviorFlag
28
35
  from dbt_common.clients.jinja import CallableMacroGenerator
29
36
  from dbt_common.contracts.constraints import (
@@ -42,6 +49,7 @@ from dbt_common.exceptions import (
42
49
  NotImplementedError,
43
50
  UnexpectedNullError,
44
51
  )
52
+ from dbt_common.record import auto_record_function, record_function, supports_replay
45
53
  from dbt_common.utils import (
46
54
  AttrDict,
47
55
  cast_to_str,
@@ -65,6 +73,12 @@ from dbt.adapters.base.relation import (
65
73
  )
66
74
  from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
67
75
  from dbt.adapters.capability import Capability, CapabilityDict
76
+ from dbt.adapters.catalogs import (
77
+ CatalogIntegration,
78
+ CatalogIntegrationClient,
79
+ CatalogIntegrationConfig,
80
+ CatalogRelation,
81
+ )
68
82
  from dbt.adapters.contracts.connection import Credentials
69
83
  from dbt.adapters.contracts.macros import MacroResolverProtocol
70
84
  from dbt.adapters.contracts.relation import RelationConfig
@@ -220,6 +234,7 @@ class SnapshotStrategy(TypedDict):
220
234
  hard_deletes: Optional[str]
221
235
 
222
236
 
237
+ @supports_replay
223
238
  class BaseAdapter(metaclass=AdapterMeta):
224
239
  """The BaseAdapter provides an abstract base class for adapters.
225
240
 
@@ -269,6 +284,7 @@ class BaseAdapter(metaclass=AdapterMeta):
269
284
  Relation: Type[BaseRelation] = BaseRelation
270
285
  Column: Type[BaseColumn] = BaseColumn
271
286
  ConnectionManager: Type[BaseConnectionManager]
287
+ CATALOG_INTEGRATIONS: Iterable[Type[CatalogIntegration]] = []
272
288
 
273
289
  # A set of clobber config fields accepted by this adapter
274
290
  # for use in materializations
@@ -295,6 +311,21 @@ class BaseAdapter(metaclass=AdapterMeta):
295
311
  self._macro_resolver: Optional[MacroResolverProtocol] = None
296
312
  self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None
297
313
  self.behavior = DEFAULT_BASE_BEHAVIOR_FLAGS # type: ignore
314
+ self._catalog_client = CatalogIntegrationClient(self.CATALOG_INTEGRATIONS)
315
+
316
+ def add_catalog_integration(
317
+ self, catalog_integration: CatalogIntegrationConfig
318
+ ) -> CatalogIntegration:
319
+ return self._catalog_client.add(catalog_integration)
320
+
321
+ @available
322
+ def get_catalog_integration(self, name: str) -> CatalogIntegration:
323
+ return self._catalog_client.get(name)
324
+
325
+ @available
326
+ def build_catalog_relation(self, config: RelationConfig) -> CatalogRelation:
327
+ catalog = self.get_catalog_integration(config.catalog)
328
+ return catalog.build_relation(config)
298
329
 
299
330
  ###
300
331
  # Methods to set / access a macro resolver
@@ -383,6 +414,9 @@ class BaseAdapter(metaclass=AdapterMeta):
383
414
  self.connections.query_header.reset()
384
415
 
385
416
  @available.parse(_parse_callback_empty_table)
417
+ @record_function(
418
+ AdapterExecuteRecord, method=True, index_on_thread_id=True, id_field_name="thread_id"
419
+ )
386
420
  def execute(
387
421
  self,
388
422
  sql: str,
@@ -414,6 +448,7 @@ class BaseAdapter(metaclass=AdapterMeta):
414
448
  """
415
449
  raise NotImplementedError("`validate_sql` is not implemented for this adapter!")
416
450
 
451
+ @auto_record_function("AdapterGetColumnSchemaFromQuery", group="Available")
417
452
  @available.parse(lambda *a, **k: [])
418
453
  def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]:
419
454
  """Get a list of the Columns with names and data types from the given sql."""
@@ -427,6 +462,12 @@ class BaseAdapter(metaclass=AdapterMeta):
427
462
  ]
428
463
  return columns
429
464
 
465
+ @record_function(
466
+ AdapterGetPartitionsMetadataRecord,
467
+ method=True,
468
+ index_on_thread_id=True,
469
+ id_field_name="thread_id",
470
+ )
430
471
  @available.parse(_parse_callback_empty_table)
431
472
  def get_partitions_metadata(self, table: str) -> Tuple["agate.Table"]:
432
473
  """
@@ -576,6 +617,7 @@ class BaseAdapter(metaclass=AdapterMeta):
576
617
  self.cache.clear()
577
618
  self._relations_cache_for_schemas(relation_configs, required_schemas)
578
619
 
620
+ @auto_record_function("AdapterCacheAdded", group="Available")
579
621
  @available
580
622
  def cache_added(self, relation: Optional[BaseRelation]) -> str:
581
623
  """Cache a new relation in dbt. It will show up in `list relations`."""
@@ -586,6 +628,7 @@ class BaseAdapter(metaclass=AdapterMeta):
586
628
  # so jinja doesn't render things
587
629
  return ""
588
630
 
631
+ @auto_record_function("AdapterCacheDropped", group="Available")
589
632
  @available
590
633
  def cache_dropped(self, relation: Optional[BaseRelation]) -> str:
591
634
  """Drop a relation in dbt. It will no longer show up in
@@ -597,6 +640,7 @@ class BaseAdapter(metaclass=AdapterMeta):
597
640
  self.cache.drop(relation)
598
641
  return ""
599
642
 
643
+ @auto_record_function("AdapterCacheRenamed", group="Available")
600
644
  @available
601
645
  def cache_renamed(
602
646
  self,
@@ -637,6 +681,7 @@ class BaseAdapter(metaclass=AdapterMeta):
637
681
  """Get a list of existing schemas in database"""
638
682
  raise NotImplementedError("`list_schemas` is not implemented for this adapter!")
639
683
 
684
+ @auto_record_function("AdapterCheckSchemaExists", group="Available")
640
685
  @available.parse(lambda *a, **k: False)
641
686
  def check_schema_exists(self, database: str, schema: str) -> bool:
642
687
  """Check if a schema exists.
@@ -651,6 +696,7 @@ class BaseAdapter(metaclass=AdapterMeta):
651
696
  ###
652
697
  # Abstract methods about relations
653
698
  ###
699
+ @auto_record_function("AdapterDropRelation", group="Available")
654
700
  @abc.abstractmethod
655
701
  @available.parse_none
656
702
  def drop_relation(self, relation: BaseRelation) -> None:
@@ -660,12 +706,14 @@ class BaseAdapter(metaclass=AdapterMeta):
660
706
  """
661
707
  raise NotImplementedError("`drop_relation` is not implemented for this adapter!")
662
708
 
709
+ @auto_record_function("AdapterTruncateRelation", group="Available")
663
710
  @abc.abstractmethod
664
711
  @available.parse_none
665
712
  def truncate_relation(self, relation: BaseRelation) -> None:
666
713
  """Truncate the given relation."""
667
714
  raise NotImplementedError("`truncate_relation` is not implemented for this adapter!")
668
715
 
716
+ @auto_record_function("AdapterRenameRelation", group="Available")
669
717
  @abc.abstractmethod
670
718
  @available.parse_none
671
719
  def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None:
@@ -675,6 +723,7 @@ class BaseAdapter(metaclass=AdapterMeta):
675
723
  """
676
724
  raise NotImplementedError("`rename_relation` is not implemented for this adapter!")
677
725
 
726
+ @auto_record_function("AdapterGetColumnsInRelation", group="Available")
678
727
  @abc.abstractmethod
679
728
  @available.parse_list
680
729
  def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
@@ -687,6 +736,7 @@ class BaseAdapter(metaclass=AdapterMeta):
687
736
  "`get_catalog_for_single_relation` is not implemented for this adapter!"
688
737
  )
689
738
 
739
+ @auto_record_function("AdapterGetColumnsInTable", group="Available")
690
740
  @available.deprecated("get_columns_in_relation", lambda *a, **k: [])
691
741
  def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
692
742
  """DEPRECATED: Get a list of the columns in the given table."""
@@ -729,6 +779,12 @@ class BaseAdapter(metaclass=AdapterMeta):
729
779
  ###
730
780
  # Methods about grants
731
781
  ###
782
+ @record_function(
783
+ AdapterStandardizeGrantsDictRecord,
784
+ method=True,
785
+ index_on_thread_id=True,
786
+ id_field_name="thread_id",
787
+ )
732
788
  @available
733
789
  def standardize_grants_dict(self, grants_table: "agate.Table") -> dict:
734
790
  """Translate the result of `show grants` (or equivalent) to match the
@@ -743,6 +799,7 @@ class BaseAdapter(metaclass=AdapterMeta):
743
799
  :return: A standardized dictionary matching the `grants` config
744
800
  :rtype: dict
745
801
  """
802
+
746
803
  grants_dict: Dict[str, List[str]] = {}
747
804
  for row in grants_table:
748
805
  grantee = row["grantee"]
@@ -756,6 +813,7 @@ class BaseAdapter(metaclass=AdapterMeta):
756
813
  ###
757
814
  # Provided methods about relations
758
815
  ###
816
+ @auto_record_function("AdapterGetMissingColumns", group="Available")
759
817
  @available.parse_list
760
818
  def get_missing_columns(
761
819
  self, from_relation: BaseRelation, to_relation: BaseRelation
@@ -787,6 +845,7 @@ class BaseAdapter(metaclass=AdapterMeta):
787
845
 
788
846
  return [col for (col_name, col) in from_columns.items() if col_name in missing_columns]
789
847
 
848
+ @auto_record_function("AdapterValidSnapshotTarget", group="Available")
790
849
  @available.parse_none
791
850
  def valid_snapshot_target(
792
851
  self, relation: BaseRelation, column_names: Optional[Dict[str, str]] = None
@@ -819,10 +878,12 @@ class BaseAdapter(metaclass=AdapterMeta):
819
878
  if missing:
820
879
  raise SnapshotTargetNotSnapshotTableError(missing)
821
880
 
881
+ @auto_record_function("AdapterAssertValidSnapshotTargetGivenStrategy", group="Available")
822
882
  @available.parse_none
823
883
  def assert_valid_snapshot_target_given_strategy(
824
884
  self, relation: BaseRelation, column_names: Dict[str, str], strategy: SnapshotStrategy
825
885
  ) -> None:
886
+
826
887
  # Assert everything we can with the legacy function.
827
888
  self.valid_snapshot_target(relation, column_names)
828
889
 
@@ -841,10 +902,12 @@ class BaseAdapter(metaclass=AdapterMeta):
841
902
  if missing:
842
903
  raise SnapshotTargetNotSnapshotTableError(missing)
843
904
 
905
+ @auto_record_function("AdapterExpandTargetColumnTypes", group="Available")
844
906
  @available.parse_none
845
907
  def expand_target_column_types(
846
908
  self, from_relation: BaseRelation, to_relation: BaseRelation
847
909
  ) -> None:
910
+
848
911
  if not isinstance(from_relation, self.Relation):
849
912
  raise MacroArgTypeError(
850
913
  method_name="expand_target_column_types",
@@ -935,8 +998,10 @@ class BaseAdapter(metaclass=AdapterMeta):
935
998
 
936
999
  return matches
937
1000
 
1001
+ @auto_record_function("AdapterGetRelation", group="Available")
938
1002
  @available.parse_none
939
1003
  def get_relation(self, database: str, schema: str, identifier: str) -> Optional[BaseRelation]:
1004
+
940
1005
  relations_list = self.list_relations(database, schema)
941
1006
 
942
1007
  matches = self._make_match(relations_list, database, schema, identifier)
@@ -954,9 +1019,11 @@ class BaseAdapter(metaclass=AdapterMeta):
954
1019
 
955
1020
  return None
956
1021
 
1022
+ @auto_record_function("AdapterAlreadyExists", group="Available")
957
1023
  @available.deprecated("get_relation", lambda *a, **k: False)
958
1024
  def already_exists(self, schema: str, name: str) -> bool:
959
1025
  """DEPRECATED: Return if a model already exists in the database"""
1026
+
960
1027
  database = self.config.credentials.database
961
1028
  relation = self.get_relation(database, schema, name)
962
1029
  return relation is not None
@@ -965,12 +1032,14 @@ class BaseAdapter(metaclass=AdapterMeta):
965
1032
  # ODBC FUNCTIONS -- these should not need to change for every adapter,
966
1033
  # although some adapters may override them
967
1034
  ###
1035
+ @auto_record_function("AdapterCreateSchema", group="Available")
968
1036
  @abc.abstractmethod
969
1037
  @available.parse_none
970
1038
  def create_schema(self, relation: BaseRelation):
971
1039
  """Create the given schema if it does not exist."""
972
1040
  raise NotImplementedError("`create_schema` is not implemented for this adapter!")
973
1041
 
1042
+ @auto_record_function("AdapterDropSchema", group="Available")
974
1043
  @abc.abstractmethod
975
1044
  @available.parse_none
976
1045
  def drop_schema(self, relation: BaseRelation):
@@ -979,11 +1048,13 @@ class BaseAdapter(metaclass=AdapterMeta):
979
1048
 
980
1049
  @available
981
1050
  @classmethod
1051
+ @auto_record_function("AdapterQuote", group="Available")
982
1052
  @abc.abstractmethod
983
1053
  def quote(cls, identifier: str) -> str:
984
1054
  """Quote the given identifier, as appropriate for the database."""
985
1055
  raise NotImplementedError("`quote` is not implemented for this adapter!")
986
1056
 
1057
+ @auto_record_function("AdapterQuoteAsConfigured", group="Available")
987
1058
  @available
988
1059
  def quote_as_configured(self, identifier: str, quote_key: str) -> str:
989
1060
  """Quote or do not quote the given identifer as configured in the
@@ -992,6 +1063,7 @@ class BaseAdapter(metaclass=AdapterMeta):
992
1063
  The quote key should be one of 'database' (on bigquery, 'profile'),
993
1064
  'identifier', or 'schema', or it will be treated as if you set `True`.
994
1065
  """
1066
+
995
1067
  try:
996
1068
  key = ComponentName(quote_key)
997
1069
  except ValueError:
@@ -1003,8 +1075,10 @@ class BaseAdapter(metaclass=AdapterMeta):
1003
1075
  else:
1004
1076
  return identifier
1005
1077
 
1078
+ @auto_record_function("AdapterQuoteSeedColumn", group="Available")
1006
1079
  @available
1007
1080
  def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str:
1081
+
1008
1082
  quote_columns: bool = True
1009
1083
  if isinstance(quote_config, bool):
1010
1084
  quote_columns = quote_config
@@ -1107,7 +1181,11 @@ class BaseAdapter(metaclass=AdapterMeta):
1107
1181
 
1108
1182
  @available
1109
1183
  @classmethod
1184
+ @record_function(
1185
+ AdapterConvertTypeRecord, method=True, index_on_thread_id=True, id_field_name="thread_id"
1186
+ )
1110
1187
  def convert_type(cls, agate_table: "agate.Table", col_idx: int) -> Optional[str]:
1188
+
1111
1189
  return cls.convert_agate_type(agate_table, col_idx)
1112
1190
 
1113
1191
  @classmethod
@@ -1709,7 +1787,9 @@ class BaseAdapter(metaclass=AdapterMeta):
1709
1787
 
1710
1788
  @available
1711
1789
  @classmethod
1712
- def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List:
1790
+ @auto_record_function("AdapterRenderRawColumnConstraints", group="Available")
1791
+ def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List[str]:
1792
+
1713
1793
  rendered_column_constraints = []
1714
1794
 
1715
1795
  for v in raw_columns.values():
@@ -1763,7 +1843,9 @@ class BaseAdapter(metaclass=AdapterMeta):
1763
1843
 
1764
1844
  @available
1765
1845
  @classmethod
1846
+ @auto_record_function("AdapterRenderRawModelConstraints", group="Available")
1766
1847
  def render_raw_model_constraints(cls, raw_constraints: List[Dict[str, Any]]) -> List[str]:
1848
+
1767
1849
  return [c for c in map(cls.render_raw_model_constraint, raw_constraints) if c is not None]
1768
1850
 
1769
1851
  @classmethod
@@ -1835,7 +1917,7 @@ class BaseAdapter(metaclass=AdapterMeta):
1835
1917
 
1836
1918
  @available.parse_none
1837
1919
  @classmethod
1838
- def get_hard_deletes_behavior(cls, config):
1920
+ def get_hard_deletes_behavior(cls, config: Dict[str, str]) -> str:
1839
1921
  """Check the hard_deletes config enum, and the legacy invalidate_hard_deletes
1840
1922
  config flag in order to determine which behavior should be used for deleted
1841
1923
  records in a snapshot. The default is to ignore them."""
@@ -60,6 +60,7 @@ class BaseRelation(FakeAPIObject, Hashable):
60
60
  require_alias: bool = (
61
61
  True # used to govern whether to add an alias when render_limited is called
62
62
  )
63
+ catalog: Optional[str] = None
63
64
 
64
65
  # register relation types that can be renamed for the purpose of replacing relations using stages and backups
65
66
  # adding a relation type here also requires defining the associated rename macro
@@ -305,6 +306,13 @@ class BaseRelation(FakeAPIObject, Hashable):
305
306
 
306
307
  config_quoting = relation_config.quoting_dict
307
308
  config_quoting.pop("column", None)
309
+
310
+ catalog_name = (
311
+ relation_config.catalog_name
312
+ if hasattr(relation_config, "catalog_name")
313
+ else relation_config.config.get("catalog", None) # type: ignore
314
+ )
315
+
308
316
  # precedence: kwargs quoting > relation config quoting > base quoting > default quoting
309
317
  quote_policy = deep_merge(
310
318
  cls.get_default_quote_policy().to_dict(omit_none=True),
@@ -318,6 +326,7 @@ class BaseRelation(FakeAPIObject, Hashable):
318
326
  schema=relation_config.schema,
319
327
  identifier=relation_config.identifier,
320
328
  quote_policy=quote_policy,
329
+ catalog_name=catalog_name,
321
330
  **kwargs,
322
331
  )
323
332
 
@@ -0,0 +1,11 @@
1
+ from dbt.adapters.catalogs._client import CatalogIntegrationClient
2
+ from dbt.adapters.catalogs._exceptions import (
3
+ DbtCatalogIntegrationAlreadyExistsError,
4
+ DbtCatalogIntegrationNotFoundError,
5
+ DbtCatalogIntegrationNotSupportedError,
6
+ )
7
+ from dbt.adapters.catalogs._integration import (
8
+ CatalogIntegration,
9
+ CatalogIntegrationConfig,
10
+ CatalogRelation,
11
+ )
@@ -0,0 +1,54 @@
1
+ from typing import Dict, Iterable, Type
2
+
3
+ from dbt.adapters.catalogs._exceptions import (
4
+ DbtCatalogIntegrationAlreadyExistsError,
5
+ DbtCatalogIntegrationNotFoundError,
6
+ DbtCatalogIntegrationNotSupportedError,
7
+ )
8
+ from dbt.adapters.catalogs._integration import (
9
+ CatalogIntegration,
10
+ CatalogIntegrationConfig,
11
+ )
12
+
13
+
14
+ class CatalogIntegrationClient:
15
+ """
16
+ A repository class that manages catalog integrations
17
+
18
+ This class manages all types of catalog integrations,
19
+ supporting operations like registering new integrations and retrieving existing ones.
20
+ There is only one instance of this class per adapter.
21
+
22
+ Attributes:
23
+ __supported_catalogs (Dict[str, Type[CatalogIntegration]]): a dictionary of supported
24
+ catalog types mapped to their corresponding factory classes
25
+ __catalog_integrations (Dict[str, CatalogIntegration]): a dictionary of catalog
26
+ integration names mapped to their instances
27
+ """
28
+
29
+ def __init__(self, supported_catalogs: Iterable[Type[CatalogIntegration]]):
30
+ self.__supported_catalogs: Dict[str, Type[CatalogIntegration]] = {
31
+ catalog.catalog_type: catalog for catalog in supported_catalogs
32
+ }
33
+ self.__catalog_integrations: Dict[str, CatalogIntegration] = {}
34
+
35
+ def add(self, config: CatalogIntegrationConfig) -> CatalogIntegration:
36
+ factory = self.__catalog_integration_factory(config.catalog_type)
37
+ if config.name in self.__catalog_integrations:
38
+ raise DbtCatalogIntegrationAlreadyExistsError(config.name)
39
+ self.__catalog_integrations[config.name] = factory(config)
40
+ return self.get(config.name)
41
+
42
+ def get(self, name: str) -> CatalogIntegration:
43
+ try:
44
+ return self.__catalog_integrations[name]
45
+ except KeyError:
46
+ raise DbtCatalogIntegrationNotFoundError(name, self.__catalog_integrations.keys())
47
+
48
+ def __catalog_integration_factory(self, catalog_type: str) -> Type[CatalogIntegration]:
49
+ try:
50
+ return self.__supported_catalogs[catalog_type]
51
+ except KeyError as e:
52
+ raise DbtCatalogIntegrationNotSupportedError(
53
+ catalog_type, self.__supported_catalogs.keys()
54
+ ) from e
@@ -0,0 +1,32 @@
1
+ from typing import Iterable
2
+
3
+ from dbt_common.exceptions import DbtConfigError
4
+
5
+
6
+ class DbtCatalogIntegrationAlreadyExistsError(DbtConfigError):
7
+ def __init__(self, catalog_name: str) -> None:
8
+ self.catalog_name = catalog_name
9
+ msg = f"Catalog already exists: {self.catalog_name}."
10
+ super().__init__(msg)
11
+
12
+
13
+ class DbtCatalogIntegrationNotFoundError(DbtConfigError):
14
+ def __init__(self, catalog_name: str, existing_catalog_names: Iterable[str]) -> None:
15
+ self.catalog_name = catalog_name
16
+ msg = (
17
+ f"Catalog not found."
18
+ f"Received: {self.catalog_name}"
19
+ f"Expected one of: {', '.join(existing_catalog_names)}?"
20
+ )
21
+ super().__init__(msg)
22
+
23
+
24
+ class DbtCatalogIntegrationNotSupportedError(DbtConfigError):
25
+ def __init__(self, catalog_type: str, supported_catalog_types: Iterable[str]) -> None:
26
+ self.catalog_type = catalog_type
27
+ msg = (
28
+ f"Catalog type is not supported."
29
+ f"Received: {catalog_type}"
30
+ f"Expected one of: {', '.join(supported_catalog_types)}"
31
+ )
32
+ super().__init__(msg)
@@ -0,0 +1,110 @@
1
+ import abc
2
+ from typing import Any, Dict, Optional
3
+ from typing_extensions import Protocol
4
+
5
+ from dbt.adapters.contracts.relation import RelationConfig
6
+
7
+
8
+ class CatalogIntegrationConfig(Protocol):
9
+ """
10
+ Represents the user configuration required to describe a catalog integration
11
+
12
+ This class serves as a blueprint for catalog integration configurations,
13
+ providing details about the catalog type, name, and other optional
14
+ properties necessary for integration. It is designed to be used with
15
+ any implementation that requires a catalog configuration protocol,
16
+ ensuring a standardized structure and attributes are in place.
17
+
18
+ Attributes:
19
+ name (str): the name of the catalog integration in the dbt project, e.g. "my_iceberg_operational_data"
20
+ - a unique name for this catalog integration to be referenced in a model configuration
21
+ catalog_type (str): the type of the catalog integration in the data platform, e.g. "iceberg_rest"
22
+ - this is required for dbt to determine the correct method for parsing user configuration
23
+ - usually a combination of the catalog and the way in which the data platform interacts with it
24
+ catalog_name (Optional[str]): the name of the catalog integration in the data platform, e.g. "my_favorite_iceberg_catalog"
25
+ - this is required for dbt to correctly reference catalogs by name from model configuration
26
+ - expected to be unique within the data platform, but many dbt catalog integrations can share the same catalog name
27
+ table_format (Optional[str]): the table format this catalog uses
28
+ - this is commonly unique to each catalog type, and should only be required from the user for catalogs that support multiple formats
29
+ external_volume (Optional[str]): external storage volume identifier
30
+ - while this is a separate concept from catalogs, we feel it is more user-friendly to group it with the catalog configuration
31
+ - it's possible to use a default external volume at the user, database, or account level, hence this is optional
32
+ - a result of this grouping is that there can only be one external volume per catalog integration, but many catalogs can share the same volume
33
+ - a user should create a new dbt catalog if they want to use a different external volume for a given catalog integration
34
+ adapter_properties (Optional[Dict[str, Any]]):
35
+ - additional, adapter-specific properties are nested here to avoid future collision when expanding the catalog integration protocol
36
+ """
37
+
38
+ name: str
39
+ catalog_type: str
40
+ catalog_name: Optional[str]
41
+ table_format: Optional[str]
42
+ external_volume: Optional[str]
43
+ adapter_properties: Dict[str, Any]
44
+
45
+
46
+ class CatalogRelation(Protocol):
47
+ catalog_name: Optional[str]
48
+ table_format: Optional[str]
49
+ external_volume: Optional[str]
50
+
51
+
52
+ class CatalogIntegration(abc.ABC):
53
+ """
54
+ Represent a catalog integration for a given user config
55
+
56
+ This class should be implemented by specific catalog integration types in an adapter.
57
+ A catalog integration is a specific platform's way of interacting with a specific catalog.
58
+
59
+ Attributes:
60
+ name (str): the name of the catalog integration in the dbt project, e.g. "my_iceberg_operational_data"
61
+ - a unique name for this catalog integration to be referenced in a model configuration
62
+ catalog_type (str): the type of the catalog integration in the data platform, e.g. "iceberg_rest"
63
+ - this is a name for this particular implementation of the catalog integration, hence it is a class attribute
64
+ catalog_name (Optional[str]): the name of the catalog integration in the data platform, e.g. "my_favorite_iceberg_catalog"
65
+ - this is required for dbt to correctly reference catalogs by name from model configuration
66
+ - expected to be unique within the data platform, but many dbt catalog integrations can share the same catalog name
67
+ table_format (Optional[str]): the table format this catalog uses
68
+ - this is commonly unique to each catalog type, and should only be required from the user for catalogs that support multiple formats
69
+ external_volume (Optional[str]): external storage volume identifier
70
+ - while this is a separate concept from catalogs, we feel it is more user-friendly to group it with the catalog configuration
71
+ - it's possible to use a default external volume at the user, database, or account level, hence this is optional
72
+ - a result of this grouping is that there can only be one external volume per catalog integration, but many catalogs can share the same volume
73
+ - a user should create a new dbt catalog if they want to use a different external volume for a given catalog integration
74
+ allows_writes (bool): identifies whether this catalog integration supports writes
75
+ - this is required for dbt to correctly identify whether a catalog is writable during parse time
76
+ - this is determined by the catalog integration type, hence it is a class attribute
77
+ """
78
+
79
+ catalog_type: str
80
+ table_format: Optional[str] = None
81
+ allows_writes: bool = False
82
+
83
+ def __init__(self, config: CatalogIntegrationConfig) -> None:
84
+ # table_format is often fixed for a catalog type, allow it to be defined at the class level
85
+ if config.table_format is not None:
86
+ self.table_format = config.table_format
87
+
88
+ self.name: str = config.name
89
+ self.catalog_name: Optional[str] = config.catalog_name
90
+ self.external_volume: Optional[str] = config.external_volume
91
+
92
+ def build_relation(self, config: RelationConfig) -> CatalogRelation:
93
+ """
94
+ Builds relation configuration within the context of this catalog integration.
95
+
96
+ This method is a placeholder and must be implemented in subclasses to provide
97
+ custom logic for building a relation.
98
+
99
+ Args:
100
+ config: User-provided model configuration.
101
+
102
+ Returns:
103
+ A `CatalogRelation` object constructed based on the input configuration.
104
+
105
+ Raises:
106
+ NotImplementedError: Raised when this method is not implemented in a subclass.
107
+ """
108
+ raise NotImplementedError(
109
+ f"`{self.__class__.__name__}.build_relation` must be implemented to use this feature"
110
+ )
@@ -58,6 +58,7 @@ class RelationConfig(Protocol):
58
58
  tags: List[str]
59
59
  quoting_dict: Dict[str, bool]
60
60
  config: Optional[MaterializationConfig]
61
+ catalog: Optional[str]
61
62
 
62
63
 
63
64
  class ComponentName(StrEnum):
@@ -39,6 +39,7 @@ message AdapterNodeInfo {
39
39
  string node_finished_at = 8;
40
40
  google.protobuf.Struct meta = 9;
41
41
  AdapterNodeRelation node_relation = 10;
42
+ string node_checksum = 11;
42
43
  }
43
44
 
44
45
  // ReferenceKey