dbt-adapters 1.22.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (173) hide show
  1. dbt/adapters/__about__.py +1 -0
  2. dbt/adapters/__init__.py +8 -0
  3. dbt/adapters/base/README.md +13 -0
  4. dbt/adapters/base/__init__.py +16 -0
  5. dbt/adapters/base/column.py +173 -0
  6. dbt/adapters/base/connections.py +429 -0
  7. dbt/adapters/base/impl.py +2036 -0
  8. dbt/adapters/base/meta.py +150 -0
  9. dbt/adapters/base/plugin.py +32 -0
  10. dbt/adapters/base/query_headers.py +106 -0
  11. dbt/adapters/base/relation.py +648 -0
  12. dbt/adapters/cache.py +521 -0
  13. dbt/adapters/capability.py +63 -0
  14. dbt/adapters/catalogs/__init__.py +14 -0
  15. dbt/adapters/catalogs/_client.py +54 -0
  16. dbt/adapters/catalogs/_constants.py +1 -0
  17. dbt/adapters/catalogs/_exceptions.py +39 -0
  18. dbt/adapters/catalogs/_integration.py +113 -0
  19. dbt/adapters/clients/__init__.py +0 -0
  20. dbt/adapters/clients/jinja.py +24 -0
  21. dbt/adapters/contracts/__init__.py +0 -0
  22. dbt/adapters/contracts/connection.py +229 -0
  23. dbt/adapters/contracts/macros.py +11 -0
  24. dbt/adapters/contracts/relation.py +160 -0
  25. dbt/adapters/events/README.md +51 -0
  26. dbt/adapters/events/__init__.py +0 -0
  27. dbt/adapters/events/adapter_types_pb2.py +2 -0
  28. dbt/adapters/events/base_types.py +36 -0
  29. dbt/adapters/events/logging.py +83 -0
  30. dbt/adapters/events/types.py +436 -0
  31. dbt/adapters/exceptions/__init__.py +40 -0
  32. dbt/adapters/exceptions/alias.py +24 -0
  33. dbt/adapters/exceptions/cache.py +68 -0
  34. dbt/adapters/exceptions/compilation.py +269 -0
  35. dbt/adapters/exceptions/connection.py +16 -0
  36. dbt/adapters/exceptions/database.py +51 -0
  37. dbt/adapters/factory.py +264 -0
  38. dbt/adapters/protocol.py +150 -0
  39. dbt/adapters/py.typed +0 -0
  40. dbt/adapters/record/__init__.py +2 -0
  41. dbt/adapters/record/base.py +291 -0
  42. dbt/adapters/record/cursor/cursor.py +69 -0
  43. dbt/adapters/record/cursor/description.py +37 -0
  44. dbt/adapters/record/cursor/execute.py +39 -0
  45. dbt/adapters/record/cursor/fetchall.py +69 -0
  46. dbt/adapters/record/cursor/fetchmany.py +23 -0
  47. dbt/adapters/record/cursor/fetchone.py +23 -0
  48. dbt/adapters/record/cursor/rowcount.py +23 -0
  49. dbt/adapters/record/handle.py +55 -0
  50. dbt/adapters/record/serialization.py +115 -0
  51. dbt/adapters/reference_keys.py +39 -0
  52. dbt/adapters/relation_configs/README.md +25 -0
  53. dbt/adapters/relation_configs/__init__.py +12 -0
  54. dbt/adapters/relation_configs/config_base.py +46 -0
  55. dbt/adapters/relation_configs/config_change.py +26 -0
  56. dbt/adapters/relation_configs/config_validation.py +57 -0
  57. dbt/adapters/sql/__init__.py +2 -0
  58. dbt/adapters/sql/connections.py +263 -0
  59. dbt/adapters/sql/impl.py +286 -0
  60. dbt/adapters/utils.py +69 -0
  61. dbt/include/__init__.py +3 -0
  62. dbt/include/global_project/__init__.py +4 -0
  63. dbt/include/global_project/dbt_project.yml +7 -0
  64. dbt/include/global_project/docs/overview.md +43 -0
  65. dbt/include/global_project/macros/adapters/apply_grants.sql +167 -0
  66. dbt/include/global_project/macros/adapters/columns.sql +144 -0
  67. dbt/include/global_project/macros/adapters/freshness.sql +32 -0
  68. dbt/include/global_project/macros/adapters/indexes.sql +41 -0
  69. dbt/include/global_project/macros/adapters/metadata.sql +105 -0
  70. dbt/include/global_project/macros/adapters/persist_docs.sql +33 -0
  71. dbt/include/global_project/macros/adapters/relation.sql +84 -0
  72. dbt/include/global_project/macros/adapters/schema.sql +20 -0
  73. dbt/include/global_project/macros/adapters/show.sql +26 -0
  74. dbt/include/global_project/macros/adapters/timestamps.sql +52 -0
  75. dbt/include/global_project/macros/adapters/validate_sql.sql +10 -0
  76. dbt/include/global_project/macros/etc/datetime.sql +62 -0
  77. dbt/include/global_project/macros/etc/statement.sql +52 -0
  78. dbt/include/global_project/macros/generic_test_sql/accepted_values.sql +27 -0
  79. dbt/include/global_project/macros/generic_test_sql/not_null.sql +9 -0
  80. dbt/include/global_project/macros/generic_test_sql/relationships.sql +23 -0
  81. dbt/include/global_project/macros/generic_test_sql/unique.sql +12 -0
  82. dbt/include/global_project/macros/get_custom_name/get_custom_alias.sql +36 -0
  83. dbt/include/global_project/macros/get_custom_name/get_custom_database.sql +32 -0
  84. dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql +60 -0
  85. dbt/include/global_project/macros/materializations/configs.sql +21 -0
  86. dbt/include/global_project/macros/materializations/functions/aggregate.sql +65 -0
  87. dbt/include/global_project/macros/materializations/functions/function.sql +20 -0
  88. dbt/include/global_project/macros/materializations/functions/helpers.sql +20 -0
  89. dbt/include/global_project/macros/materializations/functions/scalar.sql +69 -0
  90. dbt/include/global_project/macros/materializations/hooks.sql +35 -0
  91. dbt/include/global_project/macros/materializations/models/clone/can_clone_table.sql +7 -0
  92. dbt/include/global_project/macros/materializations/models/clone/clone.sql +67 -0
  93. dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql +7 -0
  94. dbt/include/global_project/macros/materializations/models/incremental/column_helpers.sql +80 -0
  95. dbt/include/global_project/macros/materializations/models/incremental/incremental.sql +99 -0
  96. dbt/include/global_project/macros/materializations/models/incremental/is_incremental.sql +13 -0
  97. dbt/include/global_project/macros/materializations/models/incremental/merge.sql +120 -0
  98. dbt/include/global_project/macros/materializations/models/incremental/on_schema_change.sql +159 -0
  99. dbt/include/global_project/macros/materializations/models/incremental/strategies.sql +92 -0
  100. dbt/include/global_project/macros/materializations/models/materialized_view.sql +121 -0
  101. dbt/include/global_project/macros/materializations/models/table.sql +64 -0
  102. dbt/include/global_project/macros/materializations/models/view.sql +72 -0
  103. dbt/include/global_project/macros/materializations/seeds/helpers.sql +128 -0
  104. dbt/include/global_project/macros/materializations/seeds/seed.sql +60 -0
  105. dbt/include/global_project/macros/materializations/snapshots/helpers.sql +345 -0
  106. dbt/include/global_project/macros/materializations/snapshots/snapshot.sql +109 -0
  107. dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql +34 -0
  108. dbt/include/global_project/macros/materializations/snapshots/strategies.sql +184 -0
  109. dbt/include/global_project/macros/materializations/tests/helpers.sql +44 -0
  110. dbt/include/global_project/macros/materializations/tests/test.sql +66 -0
  111. dbt/include/global_project/macros/materializations/tests/unit.sql +40 -0
  112. dbt/include/global_project/macros/materializations/tests/where_subquery.sql +15 -0
  113. dbt/include/global_project/macros/python_model/python.sql +114 -0
  114. dbt/include/global_project/macros/relations/column/columns_spec_ddl.sql +89 -0
  115. dbt/include/global_project/macros/relations/create.sql +23 -0
  116. dbt/include/global_project/macros/relations/create_backup.sql +17 -0
  117. dbt/include/global_project/macros/relations/create_intermediate.sql +17 -0
  118. dbt/include/global_project/macros/relations/drop.sql +41 -0
  119. dbt/include/global_project/macros/relations/drop_backup.sql +14 -0
  120. dbt/include/global_project/macros/relations/materialized_view/alter.sql +55 -0
  121. dbt/include/global_project/macros/relations/materialized_view/create.sql +10 -0
  122. dbt/include/global_project/macros/relations/materialized_view/drop.sql +14 -0
  123. dbt/include/global_project/macros/relations/materialized_view/refresh.sql +9 -0
  124. dbt/include/global_project/macros/relations/materialized_view/rename.sql +10 -0
  125. dbt/include/global_project/macros/relations/materialized_view/replace.sql +10 -0
  126. dbt/include/global_project/macros/relations/rename.sql +35 -0
  127. dbt/include/global_project/macros/relations/rename_intermediate.sql +14 -0
  128. dbt/include/global_project/macros/relations/replace.sql +50 -0
  129. dbt/include/global_project/macros/relations/schema.sql +8 -0
  130. dbt/include/global_project/macros/relations/table/create.sql +60 -0
  131. dbt/include/global_project/macros/relations/table/drop.sql +14 -0
  132. dbt/include/global_project/macros/relations/table/rename.sql +10 -0
  133. dbt/include/global_project/macros/relations/table/replace.sql +10 -0
  134. dbt/include/global_project/macros/relations/view/create.sql +27 -0
  135. dbt/include/global_project/macros/relations/view/drop.sql +14 -0
  136. dbt/include/global_project/macros/relations/view/rename.sql +10 -0
  137. dbt/include/global_project/macros/relations/view/replace.sql +66 -0
  138. dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql +107 -0
  139. dbt/include/global_project/macros/utils/any_value.sql +9 -0
  140. dbt/include/global_project/macros/utils/array_append.sql +8 -0
  141. dbt/include/global_project/macros/utils/array_concat.sql +7 -0
  142. dbt/include/global_project/macros/utils/array_construct.sql +12 -0
  143. dbt/include/global_project/macros/utils/bool_or.sql +9 -0
  144. dbt/include/global_project/macros/utils/cast.sql +7 -0
  145. dbt/include/global_project/macros/utils/cast_bool_to_text.sql +7 -0
  146. dbt/include/global_project/macros/utils/concat.sql +7 -0
  147. dbt/include/global_project/macros/utils/data_types.sql +129 -0
  148. dbt/include/global_project/macros/utils/date.sql +10 -0
  149. dbt/include/global_project/macros/utils/date_spine.sql +75 -0
  150. dbt/include/global_project/macros/utils/date_trunc.sql +7 -0
  151. dbt/include/global_project/macros/utils/dateadd.sql +14 -0
  152. dbt/include/global_project/macros/utils/datediff.sql +14 -0
  153. dbt/include/global_project/macros/utils/equals.sql +14 -0
  154. dbt/include/global_project/macros/utils/escape_single_quotes.sql +8 -0
  155. dbt/include/global_project/macros/utils/except.sql +9 -0
  156. dbt/include/global_project/macros/utils/generate_series.sql +53 -0
  157. dbt/include/global_project/macros/utils/hash.sql +7 -0
  158. dbt/include/global_project/macros/utils/intersect.sql +9 -0
  159. dbt/include/global_project/macros/utils/last_day.sql +15 -0
  160. dbt/include/global_project/macros/utils/length.sql +11 -0
  161. dbt/include/global_project/macros/utils/listagg.sql +30 -0
  162. dbt/include/global_project/macros/utils/literal.sql +7 -0
  163. dbt/include/global_project/macros/utils/position.sql +11 -0
  164. dbt/include/global_project/macros/utils/replace.sql +14 -0
  165. dbt/include/global_project/macros/utils/right.sql +12 -0
  166. dbt/include/global_project/macros/utils/safe_cast.sql +9 -0
  167. dbt/include/global_project/macros/utils/split_part.sql +26 -0
  168. dbt/include/global_project/tests/generic/builtin.sql +30 -0
  169. dbt/include/py.typed +0 -0
  170. dbt_adapters-1.22.2.dist-info/METADATA +124 -0
  171. dbt_adapters-1.22.2.dist-info/RECORD +173 -0
  172. dbt_adapters-1.22.2.dist-info/WHEEL +4 -0
  173. dbt_adapters-1.22.2.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,113 @@
1
+ import abc
2
+ from typing import Any, Dict, Optional
3
+ from typing_extensions import Protocol
4
+
5
+ from dbt.adapters.contracts.relation import RelationConfig
6
+
7
+
8
+ class CatalogIntegrationConfig(Protocol):
9
+ """
10
+ Represents the user configuration required to describe a catalog integration
11
+
12
+ This class serves as a blueprint for catalog integration configurations,
13
+ providing details about the catalog type, name, and other optional
14
+ properties necessary for integration. It is designed to be used with
15
+ any implementation that requires a catalog configuration protocol,
16
+ ensuring a standardized structure and attributes are in place.
17
+
18
+ Attributes:
19
+ name (str): the name of the catalog integration in the dbt project, e.g. "my_iceberg_operational_data"
20
+ - a unique name for this catalog integration to be referenced in a model configuration
21
+ catalog_type (str): the type of the catalog integration in the data platform, e.g. "iceberg_rest"
22
+ - this is required for dbt to determine the correct method for parsing user configuration
23
+ - usually a combination of the catalog and the way in which the data platform interacts with it
24
+ catalog_name (Optional[str]): the name of the catalog integration in the data platform, e.g. "my_favorite_iceberg_catalog"
25
+ - this is required for dbt to correctly reference catalogs by name from model configuration
26
+ - expected to be unique within the data platform, but many dbt catalog integrations can share the same catalog name
27
+ table_format (Optional[str]): the table format this catalog uses
28
+ - this is commonly unique to each catalog type, and should only be required from the user for catalogs that support multiple formats
29
+ external_volume (Optional[str]): external storage volume identifier
30
+ - while this is a separate concept from catalogs, we feel it is more user-friendly to group it with the catalog configuration
31
+ - it's possible to use a default external volume at the user, database, or account level, hence this is optional
32
+ - a result of this grouping is that there can only be one external volume per catalog integration, but many catalogs can share the same volume
33
+ - a user should create a new dbt catalog if they want to use a different external volume for a given catalog integration
34
+ adapter_properties (Optional[Dict[str, Any]]):
35
+ - additional, adapter-specific properties are nested here to avoid future collision when expanding the catalog integration protocol
36
+ """
37
+
38
+ name: str
39
+ catalog_type: str
40
+ catalog_name: Optional[str]
41
+ table_format: Optional[str]
42
+ external_volume: Optional[str]
43
+ file_format: Optional[str]
44
+ adapter_properties: Dict[str, Any]
45
+
46
+
47
+ class CatalogRelation(Protocol):
48
+ catalog_name: Optional[str]
49
+ table_format: Optional[str]
50
+ external_volume: Optional[str]
51
+ file_format: Optional[str]
52
+
53
+
54
+ class CatalogIntegration(abc.ABC):
55
+ """
56
+ Represent a catalog integration for a given user config
57
+
58
+ This class should be implemented by specific catalog integration types in an adapter.
59
+ A catalog integration is a specific platform's way of interacting with a specific catalog.
60
+
61
+ Attributes:
62
+ name (str): the name of the catalog integration in the dbt project, e.g. "my_iceberg_operational_data"
63
+ - a unique name for this catalog integration to be referenced in a model configuration
64
+ catalog_type (str): the type of the catalog integration in the data platform, e.g. "iceberg_rest"
65
+ - this is a name for this particular implementation of the catalog integration, hence it is a class attribute
66
+ catalog_name (Optional[str]): the name of the catalog integration in the data platform, e.g. "my_favorite_iceberg_catalog"
67
+ - this is required for dbt to correctly reference catalogs by name from model configuration
68
+ - expected to be unique within the data platform, but many dbt catalog integrations can share the same catalog name
69
+ table_format (Optional[str]): the table format this catalog uses
70
+ - this is commonly unique to each catalog type, and should only be required from the user for catalogs that support multiple formats
71
+ external_volume (Optional[str]): external storage volume identifier
72
+ - while this is a separate concept from catalogs, we feel it is more user-friendly to group it with the catalog configuration
73
+ - it's possible to use a default external volume at the user, database, or account level, hence this is optional
74
+ - a result of this grouping is that there can only be one external volume per catalog integration, but many catalogs can share the same volume
75
+ - a user should create a new dbt catalog if they want to use a different external volume for a given catalog integration
76
+ allows_writes (bool): identifies whether this catalog integration supports writes
77
+ - this is required for dbt to correctly identify whether a catalog is writable during parse time
78
+ - this is determined by the catalog integration type, hence it is a class attribute
79
+ """
80
+
81
+ catalog_type: str
82
+ table_format: Optional[str] = None
83
+ file_format: Optional[str] = None
84
+ allows_writes: bool = False
85
+
86
+ def __init__(self, config: CatalogIntegrationConfig) -> None:
87
+ # table_format is often fixed for a catalog type, allow it to be defined at the class level
88
+ if config.table_format is not None:
89
+ self.table_format = config.table_format
90
+ self.name: str = config.name
91
+ self.catalog_name: Optional[str] = config.catalog_name
92
+ self.external_volume: Optional[str] = config.external_volume
93
+ self.file_format: Optional[str] = config.file_format
94
+
95
+ def build_relation(self, config: RelationConfig) -> CatalogRelation:
96
+ """
97
+ Builds relation configuration within the context of this catalog integration.
98
+
99
+ This method is a placeholder and must be implemented in subclasses to provide
100
+ custom logic for building a relation.
101
+
102
+ Args:
103
+ config: User-provided model configuration.
104
+
105
+ Returns:
106
+ A `CatalogRelation` object constructed based on the input configuration.
107
+
108
+ Raises:
109
+ NotImplementedError: Raised when this method is not implemented in a subclass.
110
+ """
111
+ raise NotImplementedError(
112
+ f"`{self.__class__.__name__}.build_relation` must be implemented to use this feature"
113
+ )
File without changes
@@ -0,0 +1,24 @@
1
+ from typing import Any, Dict
2
+
3
+ from dbt_common.clients.jinja import BaseMacroGenerator, get_environment
4
+
5
+
6
+ class QueryStringGenerator(BaseMacroGenerator):
7
+ def __init__(self, template_str: str, context: Dict[str, Any]) -> None:
8
+ super().__init__(context)
9
+ self.template_str: str = template_str
10
+ env = get_environment()
11
+ self.template = env.from_string(
12
+ self.template_str,
13
+ globals=self.context,
14
+ )
15
+
16
+ def get_name(self) -> str:
17
+ return "query_comment_macro"
18
+
19
+ def get_template(self):
20
+ """Don't use the template cache, we don't have a node"""
21
+ return self.template
22
+
23
+ def __call__(self, connection_name: str, node) -> str:
24
+ return str(self.call_macro(connection_name, node))
File without changes
@@ -0,0 +1,229 @@
1
+ import abc
2
+ from dataclasses import dataclass, field
3
+ import itertools
4
+ from typing import (
5
+ Any,
6
+ Callable,
7
+ ClassVar,
8
+ Dict,
9
+ Iterable,
10
+ List,
11
+ Optional,
12
+ Tuple,
13
+ )
14
+
15
+ from dbt_common.contracts.util import Replaceable
16
+ from dbt_common.dataclass_schema import (
17
+ ExtensibleDbtClassMixin,
18
+ StrEnum,
19
+ ValidatedStringMixin,
20
+ dbtClassMixin,
21
+ )
22
+
23
+ # TODO: this is a very bad dependency - shared global state
24
+ from dbt_common.events.contextvars import get_node_info
25
+ from dbt_common.events.functions import fire_event
26
+ from dbt_common.exceptions import DbtInternalError
27
+ from dbt_common.utils import md5
28
+ from mashumaro.jsonschema.annotations import Pattern
29
+ from typing_extensions import Protocol, Annotated
30
+
31
+ from dbt.adapters.events.types import NewConnectionOpening
32
+ from dbt.adapters.utils import translate_aliases
33
+
34
+
35
+ class Identifier(ValidatedStringMixin):
36
+ ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$"
37
+
38
+
39
+ @dataclass
40
+ class AdapterResponse(dbtClassMixin):
41
+ _message: str
42
+ code: Optional[str] = None
43
+ rows_affected: Optional[int] = None
44
+ query_id: Optional[str] = None
45
+
46
+ def __str__(self):
47
+ return self._message
48
+
49
+
50
+ class ConnectionState(StrEnum):
51
+ INIT = "init"
52
+ OPEN = "open"
53
+ CLOSED = "closed"
54
+ FAIL = "fail"
55
+
56
+
57
+ @dataclass(init=False)
58
+ class Connection(ExtensibleDbtClassMixin, Replaceable):
59
+ # Annotated is used by mashumaro for jsonschema generation
60
+ type: Annotated[Identifier, Pattern(r"^[A-Za-z_][A-Za-z0-9_]+$")]
61
+ name: Optional[str] = None
62
+ state: ConnectionState = ConnectionState.INIT # type: ignore
63
+ transaction_open: bool = False
64
+ _handle: Optional[Any] = None
65
+ _credentials: Optional[Any] = None
66
+
67
+ def __init__(
68
+ self,
69
+ type: Identifier,
70
+ name: Optional[str],
71
+ credentials: dbtClassMixin,
72
+ state: ConnectionState = ConnectionState.INIT, # type: ignore
73
+ transaction_open: bool = False,
74
+ handle: Optional[Any] = None,
75
+ ) -> None:
76
+ self.type = type
77
+ self.name = name
78
+ self.state = state
79
+ self.credentials = credentials
80
+ self.transaction_open = transaction_open
81
+ self.handle = handle
82
+
83
+ @property
84
+ def credentials(self):
85
+ return self._credentials
86
+
87
+ @credentials.setter
88
+ def credentials(self, value):
89
+ self._credentials = value
90
+
91
+ @property
92
+ def handle(self):
93
+ if isinstance(self._handle, LazyHandle):
94
+ try:
95
+ # this will actually change 'self._handle'.
96
+ self._handle.resolve(self)
97
+ except RecursionError as exc:
98
+ raise DbtInternalError(
99
+ "A connection's open() method attempted to read the handle value"
100
+ ) from exc
101
+ return self._handle
102
+
103
+ @handle.setter
104
+ def handle(self, value):
105
+ self._handle = value
106
+
107
+
108
+ class LazyHandle:
109
+ """The opener must be a callable that takes a Connection object and opens the
110
+ connection, updating the handle on the Connection.
111
+ """
112
+
113
+ def __init__(self, opener: Callable[[Connection], Connection]) -> None:
114
+ self.opener = opener
115
+
116
+ def resolve(self, connection: Connection) -> Connection:
117
+ fire_event(
118
+ NewConnectionOpening(connection_state=connection.state, node_info=get_node_info())
119
+ )
120
+ return self.opener(connection)
121
+
122
+
123
+ # see https://github.com/python/mypy/issues/4717#issuecomment-373932080
124
+ # and https://github.com/python/mypy/issues/5374
125
+ # for why we have type: ignore. Maybe someday dataclasses + abstract classes
126
+ # will work.
127
+ @dataclass
128
+ class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta):
129
+ database: str
130
+ schema: str
131
+ _ALIASES: ClassVar[Dict[str, str]] = field(default={}, init=False)
132
+
133
+ @abc.abstractproperty
134
+ def type(self) -> str:
135
+ raise NotImplementedError("type not implemented for base credentials class")
136
+
137
+ @property
138
+ def unique_field(self) -> str:
139
+ """Hashed and included in anonymous telemetry to track adapter adoption.
140
+ Return the field from Credentials that can uniquely identify
141
+ one team/organization using this adapter
142
+ """
143
+ raise NotImplementedError("unique_field not implemented for base credentials class")
144
+
145
+ def hashed_unique_field(self) -> str:
146
+ return md5(self.unique_field)
147
+
148
+ def connection_info(self, *, with_aliases: bool = False) -> Iterable[Tuple[str, Any]]:
149
+ """Return an ordered iterator of key/value pairs for pretty-printing."""
150
+ as_dict = self.to_dict(omit_none=False)
151
+ connection_keys = set(self._connection_keys())
152
+ aliases: List[str] = []
153
+ if with_aliases:
154
+ aliases = [k for k, v in self._ALIASES.items() if v in connection_keys]
155
+ for key in itertools.chain(self._connection_keys(), aliases):
156
+ if key in as_dict:
157
+ yield key, as_dict[key]
158
+
159
+ @abc.abstractmethod
160
+ def _connection_keys(self) -> Tuple[str, ...]:
161
+ raise NotImplementedError
162
+
163
+ @classmethod
164
+ def __pre_deserialize__(cls, data):
165
+ data = super().__pre_deserialize__(data)
166
+ # Need to fixup dbname => database, pass => password
167
+ data = cls.translate_aliases(data)
168
+ return data
169
+
170
+ @classmethod
171
+ def translate_aliases(cls, kwargs: Dict[str, Any], recurse: bool = False) -> Dict[str, Any]:
172
+ return translate_aliases(kwargs, cls._ALIASES, recurse)
173
+
174
+ def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
175
+ # no super() -- do we need it?
176
+ if self._ALIASES:
177
+ dct.update(
178
+ {
179
+ new_name: dct[canonical_name]
180
+ for new_name, canonical_name in self._ALIASES.items()
181
+ if canonical_name in dct
182
+ }
183
+ )
184
+ return dct
185
+
186
+
187
+ class HasCredentials(Protocol):
188
+ credentials: Credentials
189
+ profile_name: str
190
+ target_name: str
191
+ threads: int
192
+
193
+ def to_target_dict(self):
194
+ raise NotImplementedError("to_target_dict not implemented")
195
+
196
+
197
+ DEFAULT_QUERY_COMMENT = """
198
+ {%- set comment_dict = {} -%}
199
+ {%- do comment_dict.update(
200
+ app='dbt',
201
+ dbt_version=dbt_version,
202
+ profile_name=target.get('profile_name'),
203
+ target_name=target.get('target_name'),
204
+ ) -%}
205
+ {%- if node is not none -%}
206
+ {%- do comment_dict.update(
207
+ node_id=node.unique_id,
208
+ ) -%}
209
+ {% else %}
210
+ {# in the node context, the connection name is the node_id #}
211
+ {%- do comment_dict.update(connection_name=connection_name) -%}
212
+ {%- endif -%}
213
+ {{ return(tojson(comment_dict)) }}
214
+ """
215
+
216
+
217
+ @dataclass
218
+ class QueryComment(dbtClassMixin):
219
+ comment: str = DEFAULT_QUERY_COMMENT
220
+ append: Optional[bool] = None
221
+ job_label: bool = field(default=False, metadata={"alias": "job-label"})
222
+
223
+
224
+ class AdapterRequiredConfig(HasCredentials, Protocol):
225
+ project_name: str
226
+ query_comment: QueryComment
227
+ cli_vars: Dict[str, Any]
228
+ target_path: str
229
+ log_cache_events: bool
@@ -0,0 +1,11 @@
1
+ from typing import Optional
2
+
3
+ from dbt_common.clients.jinja import MacroProtocol
4
+ from typing_extensions import Protocol
5
+
6
+
7
+ class MacroResolverProtocol(Protocol):
8
+ def find_macro_by_name(
9
+ self, name: str, root_project_name: str, package: Optional[str]
10
+ ) -> Optional[MacroProtocol]:
11
+ raise NotImplementedError("find_macro_by_name not implemented")
@@ -0,0 +1,160 @@
1
+ from abc import ABC
2
+
3
+ from collections.abc import Mapping
4
+ from dataclasses import dataclass
5
+ from typing import Dict, Optional, Any, Union, List
6
+
7
+
8
+ from dbt_common.contracts.config.materialization import OnConfigurationChangeOption
9
+ from dbt_common.contracts.util import Replaceable
10
+ from dbt_common.dataclass_schema import StrEnum, dbtClassMixin
11
+ from dbt_common.exceptions import CompilationError, DataclassNotDictError
12
+ from dbt_common.utils import deep_merge
13
+ from typing_extensions import Protocol
14
+
15
+
16
+ class RelationType(StrEnum):
17
+ Table = "table"
18
+ View = "view"
19
+ CTE = "cte"
20
+ MaterializedView = "materialized_view"
21
+ Ephemeral = "ephemeral"
22
+ # this is a "catch all" that is better than `None` == external to anything dbt is aware of
23
+ External = "external"
24
+ PointerTable = "pointer_table"
25
+ Function = "function"
26
+
27
+
28
+ class MaterializationContract(Protocol):
29
+ enforced: bool
30
+ alias_types: bool
31
+
32
+
33
+ class MaterializationConfig(Mapping, ABC):
34
+ materialized: str
35
+ incremental_strategy: Optional[str]
36
+ persist_docs: Dict[str, Any]
37
+ column_types: Dict[str, Any]
38
+ full_refresh: Optional[bool]
39
+ quoting: Dict[str, Any]
40
+ unique_key: Union[str, List[str], None]
41
+ on_schema_change: Optional[str]
42
+ on_configuration_change: OnConfigurationChangeOption
43
+ contract: MaterializationContract
44
+ extra: Dict[str, Any]
45
+
46
+ def __contains__(self, item): ...
47
+
48
+ def __delitem__(self, key): ...
49
+
50
+
51
+ class RelationConfig(Protocol):
52
+ resource_type: str
53
+ name: str
54
+ description: str
55
+ database: str
56
+ schema: str
57
+ identifier: str
58
+ compiled_code: Optional[str]
59
+ meta: Dict[str, Any]
60
+ tags: List[str]
61
+ quoting_dict: Dict[str, bool]
62
+ config: Optional[MaterializationConfig]
63
+
64
+
65
+ class ComponentName(StrEnum):
66
+ Database = "database"
67
+ Schema = "schema"
68
+ Identifier = "identifier"
69
+
70
+
71
+ class HasQuoting(Protocol):
72
+ quoting: Dict[str, bool]
73
+
74
+
75
+ class FakeAPIObject(dbtClassMixin, Replaceable, Mapping):
76
+ # override the mapping truthiness, len is always >1
77
+ def __bool__(self):
78
+ return True
79
+
80
+ def __getitem__(self, key):
81
+ try:
82
+ return getattr(self, key)
83
+ except AttributeError:
84
+ raise KeyError(key) from None
85
+
86
+ def __iter__(self):
87
+ raise DataclassNotDictError(self)
88
+
89
+ def __len__(self):
90
+ raise DataclassNotDictError(self)
91
+
92
+ def incorporate(self, **kwargs):
93
+ value = self.to_dict(omit_none=True)
94
+ value = deep_merge(value, kwargs)
95
+ return self.from_dict(value)
96
+
97
+
98
+ @dataclass
99
+ class Policy(FakeAPIObject):
100
+ database: bool = True
101
+ schema: bool = True
102
+ identifier: bool = True
103
+
104
+ def get_part(self, key: ComponentName) -> bool:
105
+ if key == ComponentName.Database:
106
+ return self.database
107
+ elif key == ComponentName.Schema:
108
+ return self.schema
109
+ elif key == ComponentName.Identifier:
110
+ return self.identifier
111
+ else:
112
+ raise ValueError(
113
+ "Got a key of {}, expected one of {}".format(key, list(ComponentName))
114
+ )
115
+
116
+ def replace_dict(self, dct: Dict[ComponentName, bool]):
117
+ kwargs: Dict[str, bool] = {}
118
+ for k, v in dct.items():
119
+ kwargs[str(k)] = v
120
+ return self.replace(**kwargs)
121
+
122
+
123
+ @dataclass
124
+ class Path(FakeAPIObject):
125
+ database: Optional[str] = None
126
+ schema: Optional[str] = None
127
+ identifier: Optional[str] = None
128
+
129
+ def __post_init__(self):
130
+ # handle pesky jinja2.Undefined sneaking in here and messing up rende
131
+ if not isinstance(self.database, (type(None), str)):
132
+ raise CompilationError("Got an invalid path database: {}".format(self.database))
133
+ if not isinstance(self.schema, (type(None), str)):
134
+ raise CompilationError("Got an invalid path schema: {}".format(self.schema))
135
+ if not isinstance(self.identifier, (type(None), str)):
136
+ raise CompilationError("Got an invalid path identifier: {}".format(self.identifier))
137
+
138
+ def get_lowered_part(self, key: ComponentName) -> Optional[str]:
139
+ part = self.get_part(key)
140
+ if part is not None:
141
+ part = part.lower()
142
+ return part
143
+
144
+ def get_part(self, key: ComponentName) -> Optional[str]:
145
+ if key == ComponentName.Database:
146
+ return self.database
147
+ elif key == ComponentName.Schema:
148
+ return self.schema
149
+ elif key == ComponentName.Identifier:
150
+ return self.identifier
151
+ else:
152
+ raise ValueError(
153
+ "Got a key of {}, expected one of {}".format(key, list(ComponentName))
154
+ )
155
+
156
+ def replace_dict(self, dct: Dict[ComponentName, str]):
157
+ kwargs: Dict[str, str] = {}
158
+ for k, v in dct.items():
159
+ kwargs[str(k)] = v
160
+ return self.replace(**kwargs)
@@ -0,0 +1,51 @@
1
+ # Events Module
2
+ The Events module is responsible for communicating internal dbt structures into a consumable interface. Because the "event" classes are based entirely on protobuf definitions, the interface is really clearly defined, whether or not protobufs are used to consume it. We use Betterproto for compiling the protobuf message definitions into Python classes.
3
+
4
+ # Using the Events Module
5
+ The event module provides types that represent what is happening in dbt in `events.types`. These types are intended to represent an exhaustive list of all things happening within dbt that will need to be logged, streamed, or printed. To fire an event, `common.events.functions::fire_event` is the entry point to the module from everywhere in dbt.
6
+
7
+ # Logging
8
+ When events are processed via `fire_event`, nearly everything is logged. Whether or not the user has enabled the debug flag, all debug messages are still logged to the file. However, some events are particularly time consuming to construct because they return a huge amount of data. Today, the only messages in this category are cache events and are only logged if the `--log-cache-events` flag is on. This is important because these messages should not be created unless they are going to be logged, because they cause a noticable performance degredation. These events use a "fire_event_if" functions.
9
+
10
+ # Adding a New Event
11
+ All protos have been moved into the central protos repository. To edit an event proto, edit https://github.com/dbt-labs/proto-python-public or open an issue on that repository.
12
+
13
+ ## Required for Every Event
14
+
15
+ - a method `code`, that's unique across events
16
+ - assign a log level by using the Level mixin: `DebugLevel`, `InfoLevel`, `WarnLevel`, or `ErrorLevel`
17
+ - a message()
18
+
19
+ Example
20
+ ```
21
+ class PartialParsingDeletedExposure(DebugLevel):
22
+ def code(self):
23
+ return "I049"
24
+
25
+ def message(self) -> str:
26
+ return f"Partial parsing: deleted exposure {self.unique_id}"
27
+
28
+ ```
29
+
30
+
31
+ # Adapter Maintainers
32
+ To integrate existing log messages from adapters, you likely have a line of code like this in your adapter already:
33
+ ```python
34
+ from dbt.logger import GLOBAL_LOGGER as logger
35
+ ```
36
+
37
+ Simply change it to these two lines with your adapter's database name, and all your existing call sites will now use the new system for v1.0:
38
+
39
+ ```python
40
+
41
+ from dbt.adapters.events.logging import AdapterLogger
42
+
43
+ logger = AdapterLogger("<database name>")
44
+ # e.g. AdapterLogger("Snowflake")
45
+ ```
46
+
47
+ ## Compiling types.proto
48
+
49
+ After adding a new message in `adapter_types.proto`, either:
50
+ - In the repository root directory: `make adapter_proto_types`
51
+ - In the `core/dbt/adapters/events` directory: `protoc -I=. --python_out=. types.proto`
File without changes
@@ -0,0 +1,2 @@
1
+ # preserving import path during dbtlabs.proto refactor
2
+ from dbtlabs.proto.public.v1.fields.adapter_types_pb2 import * # noqa
@@ -0,0 +1,36 @@
1
+ from dbt_common.events.base_types import BaseEvent
2
+ from dbt_common.events.base_types import DebugLevel as CommonDebugLevel
3
+ from dbt_common.events.base_types import DynamicLevel as CommonDynamicLevel
4
+ from dbt_common.events.base_types import ErrorLevel as CommonErrorLevel
5
+ from dbt_common.events.base_types import InfoLevel as CommonInfoLevel
6
+ from dbt_common.events.base_types import TestLevel as CommonTestLevel
7
+ from dbt_common.events.base_types import WarnLevel as CommonWarnLevel
8
+ from dbt.adapters.events import adapter_types_pb2
9
+
10
+
11
+ class AdapterBaseEvent(BaseEvent):
12
+ PROTO_TYPES_MODULE = adapter_types_pb2
13
+
14
+
15
+ class DynamicLevel(CommonDynamicLevel, AdapterBaseEvent):
16
+ pass
17
+
18
+
19
+ class TestLevel(CommonTestLevel, AdapterBaseEvent):
20
+ pass
21
+
22
+
23
+ class DebugLevel(CommonDebugLevel, AdapterBaseEvent):
24
+ pass
25
+
26
+
27
+ class InfoLevel(CommonInfoLevel, AdapterBaseEvent):
28
+ pass
29
+
30
+
31
+ class WarnLevel(CommonWarnLevel, AdapterBaseEvent):
32
+ pass
33
+
34
+
35
+ class ErrorLevel(CommonErrorLevel, AdapterBaseEvent):
36
+ pass