dbt-adapters 1.11.0__py3-none-any.whl → 1.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-adapters might be problematic. Click here for more details.

dbt/adapters/__about__.py CHANGED
@@ -1 +1 @@
1
- version = "1.11.0"
1
+ version = "1.12.0"
dbt/adapters/base/impl.py CHANGED
@@ -98,6 +98,13 @@ GET_CATALOG_MACRO_NAME = "get_catalog"
98
98
  GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
99
99
  FRESHNESS_MACRO_NAME = "collect_freshness"
100
100
  GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
101
+ DEFAULT_BASE_BEHAVIOR_FLAGS = [
102
+ {
103
+ "name": "require_batched_execution_for_custom_microbatch_strategy",
104
+ "default": False,
105
+ "docs_url": "https://docs.getdbt.com/docs/build/incremental-microbatch",
106
+ }
107
+ ]
101
108
 
102
109
 
103
110
  class ConstraintSupport(str, Enum):
@@ -199,6 +206,14 @@ class FreshnessResponse(TypedDict):
199
206
  age: float # age in seconds
200
207
 
201
208
 
209
+ class SnapshotStrategy(TypedDict):
210
+ unique_key: Optional[str]
211
+ updated_at: Optional[str]
212
+ row_changed: Optional[str]
213
+ scd_id: Optional[str]
214
+ hard_deletes: Optional[str]
215
+
216
+
202
217
  class BaseAdapter(metaclass=AdapterMeta):
203
218
  """The BaseAdapter provides an abstract base class for adapters.
204
219
 
@@ -273,8 +288,7 @@ class BaseAdapter(metaclass=AdapterMeta):
273
288
  self.connections = self.ConnectionManager(config, mp_context)
274
289
  self._macro_resolver: Optional[MacroResolverProtocol] = None
275
290
  self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None
276
- # this will be updated to include global behavior flags once they exist
277
- self.behavior = [] # type: ignore
291
+ self.behavior = DEFAULT_BASE_BEHAVIOR_FLAGS # type: ignore
278
292
 
279
293
  ###
280
294
  # Methods to set / access a macro resolver
@@ -314,14 +328,10 @@ class BaseAdapter(metaclass=AdapterMeta):
314
328
  def _behavior_flags(self) -> List[BehaviorFlag]:
315
329
  """
316
330
  This method should be overwritten by adapter maintainers to provide platform-specific flags
331
+
332
+ The BaseAdapter should NOT include any global flags here as those should be defined via DEFAULT_BASE_BEHAVIOR_FLAGS
317
333
  """
318
- return [
319
- {
320
- "name": "require_batched_execution_for_custom_microbatch_strategy",
321
- "default": False,
322
- "docs_url": "https://docs.getdbt.com/docs/build/incremental-microbatch",
323
- }
324
- ]
334
+ return []
325
335
 
326
336
  ###
327
337
  # Methods that pass through to the connection manager
@@ -793,8 +803,8 @@ class BaseAdapter(metaclass=AdapterMeta):
793
803
  columns = self.get_columns_in_relation(relation)
794
804
  names = set(c.name.lower() for c in columns)
795
805
  missing = []
796
- # Note: we're not checking dbt_updated_at here because it's not
797
- # always present.
806
+ # Note: we're not checking dbt_updated_at or dbt_is_deleted here because they
807
+ # aren't always present.
798
808
  for column in ("dbt_scd_id", "dbt_valid_from", "dbt_valid_to"):
799
809
  desired = column_names[column] if column_names else column
800
810
  if desired not in names:
@@ -803,6 +813,28 @@ class BaseAdapter(metaclass=AdapterMeta):
803
813
  if missing:
804
814
  raise SnapshotTargetNotSnapshotTableError(missing)
805
815
 
816
+ @available.parse_none
817
+ def assert_valid_snapshot_target_given_strategy(
818
+ self, relation: BaseRelation, column_names: Dict[str, str], strategy: SnapshotStrategy
819
+ ) -> None:
820
+ # Assert everything we can with the legacy function.
821
+ self.valid_snapshot_target(relation, column_names)
822
+
823
+ # Now do strategy-specific checks.
824
+ # TODO: Make these checks more comprehensive.
825
+ if strategy.get("hard_deletes", None) == "new_record":
826
+ columns = self.get_columns_in_relation(relation)
827
+ names = set(c.name.lower() for c in columns)
828
+ missing = []
829
+
830
+ for column in ("dbt_is_deleted",):
831
+ desired = column_names[column] if column_names else column
832
+ if desired not in names:
833
+ missing.append(desired)
834
+
835
+ if missing:
836
+ raise SnapshotTargetNotSnapshotTableError(missing)
837
+
806
838
  @available.parse_none
807
839
  def expand_target_column_types(
808
840
  self, from_relation: BaseRelation, to_relation: BaseRelation
@@ -1578,8 +1610,14 @@ class BaseAdapter(metaclass=AdapterMeta):
1578
1610
  return ["append"]
1579
1611
 
1580
1612
  def builtin_incremental_strategies(self):
1613
+ """
1614
+ List of possible builtin strategies for adapters
1615
+
1616
+ Microbatch is added by _default_. It is only not added when the behavior flag
1617
+ `require_batched_execution_for_custom_microbatch_strategy` is True.
1618
+ """
1581
1619
  builtin_strategies = ["append", "delete+insert", "merge", "insert_overwrite"]
1582
- if self.behavior.require_batched_execution_for_custom_microbatch_strategy.no_warn:
1620
+ if not self.behavior.require_batched_execution_for_custom_microbatch_strategy.no_warn:
1583
1621
  builtin_strategies.append("microbatch")
1584
1622
 
1585
1623
  return builtin_strategies
@@ -1787,6 +1825,29 @@ class BaseAdapter(metaclass=AdapterMeta):
1787
1825
  """
1788
1826
  return {}
1789
1827
 
1828
+ @available.parse_none
1829
+ @classmethod
1830
+ def get_hard_deletes_behavior(cls, config):
1831
+ """Check the hard_deletes config enum, and the legacy invalidate_hard_deletes
1832
+ config flag in order to determine which behavior should be used for deleted
1833
+ records in a snapshot. The default is to ignore them."""
1834
+ invalidate_hard_deletes = config.get("invalidate_hard_deletes", None)
1835
+ hard_deletes = config.get("hard_deletes", None)
1836
+
1837
+ if invalidate_hard_deletes is not None and hard_deletes is not None:
1838
+ raise DbtValidationError(
1839
+ "You cannot set both the invalidate_hard_deletes and hard_deletes config properties on the same snapshot."
1840
+ )
1841
+
1842
+ if invalidate_hard_deletes or hard_deletes == "invalidate":
1843
+ return "invalidate"
1844
+ elif hard_deletes == "new_record":
1845
+ return "new_record"
1846
+ elif hard_deletes is None or hard_deletes == "ignore":
1847
+ return "ignore"
1848
+
1849
+ raise DbtValidationError("Invalid setting for property hard_deletes.")
1850
+
1790
1851
 
1791
1852
  COLUMNS_EQUAL_SQL = """
1792
1853
  with diff_count as (
@@ -21,6 +21,9 @@ class Capability(str, Enum):
21
21
  """Indicates support for getting catalog information including table-level and column-level metadata for a single
22
22
  relation."""
23
23
 
24
+ MicrobatchConcurrency = "MicrobatchConcurrency"
25
+ """Indicates support running the microbatch incremental materialization strategy concurrently across threads."""
26
+
24
27
 
25
28
  class Support(str, Enum):
26
29
  Unknown = "Unknown"
@@ -1,11 +1,22 @@
1
1
  # -*- coding: utf-8 -*-
2
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # NO CHECKED-IN PROTOBUF GENCODE
3
4
  # source: adapter_types.proto
5
+ # Protobuf Python Version: 5.28.3
4
6
  """Generated protocol buffer code."""
5
7
  from google.protobuf import descriptor as _descriptor
6
8
  from google.protobuf import descriptor_pool as _descriptor_pool
9
+ from google.protobuf import runtime_version as _runtime_version
7
10
  from google.protobuf import symbol_database as _symbol_database
8
11
  from google.protobuf.internal import builder as _builder
12
+ _runtime_version.ValidateProtobufRuntimeVersion(
13
+ _runtime_version.Domain.PUBLIC,
14
+ 5,
15
+ 28,
16
+ 3,
17
+ '',
18
+ 'adapter_types.proto'
19
+ )
9
20
  # @@protoc_insertion_point(imports)
10
21
 
11
22
  _sym_db = _symbol_database.Default()
@@ -20,13 +31,12 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13\x61\x64\x61pt
20
31
  _globals = globals()
21
32
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
22
33
  _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'adapter_types_pb2', _globals)
23
- if _descriptor._USE_C_DESCRIPTORS == False:
24
-
25
- DESCRIPTOR._options = None
26
- _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._options = None
27
- _ADAPTERCOMMONEVENTINFO_EXTRAENTRY._serialized_options = b'8\001'
28
- _CACHEDUMPGRAPH_DUMPENTRY._options = None
29
- _CACHEDUMPGRAPH_DUMPENTRY._serialized_options = b'8\001'
34
+ if not _descriptor._USE_C_DESCRIPTORS:
35
+ DESCRIPTOR._loaded_options = None
36
+ _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._loaded_options = None
37
+ _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._serialized_options = b'8\001'
38
+ _globals['_CACHEDUMPGRAPH_DUMPENTRY']._loaded_options = None
39
+ _globals['_CACHEDUMPGRAPH_DUMPENTRY']._serialized_options = b'8\001'
30
40
  _globals['_ADAPTERCOMMONEVENTINFO']._serialized_start=100
31
41
  _globals['_ADAPTERCOMMONEVENTINFO']._serialized_end=399
32
42
  _globals['_ADAPTERCOMMONEVENTINFO_EXTRAENTRY']._serialized_start=355
@@ -1,6 +1,16 @@
1
1
  import abc
2
2
  import time
3
- from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple, TYPE_CHECKING
3
+ from typing import (
4
+ Any,
5
+ Dict,
6
+ Iterable,
7
+ Iterator,
8
+ List,
9
+ Optional,
10
+ Tuple,
11
+ TYPE_CHECKING,
12
+ Type,
13
+ )
4
14
 
5
15
  from dbt_common.events.contextvars import get_node_info
6
16
  from dbt_common.events.functions import fire_event
@@ -18,6 +28,7 @@ from dbt.adapters.events.types import (
18
28
  SQLCommit,
19
29
  SQLQuery,
20
30
  SQLQueryStatus,
31
+ AdapterEventDebug,
21
32
  )
22
33
 
23
34
  if TYPE_CHECKING:
@@ -61,7 +72,50 @@ class SQLConnectionManager(BaseConnectionManager):
61
72
  auto_begin: bool = True,
62
73
  bindings: Optional[Any] = None,
63
74
  abridge_sql_log: bool = False,
75
+ retryable_exceptions: Tuple[Type[Exception], ...] = tuple(),
76
+ retry_limit: int = 1,
64
77
  ) -> Tuple[Connection, Any]:
78
+ """
79
+ Retry function encapsulated here to avoid commitment to some
80
+ user-facing interface. Right now, Redshift commits to a 1 second
81
+ retry timeout so this serves as a default.
82
+ """
83
+
84
+ def _execute_query_with_retry(
85
+ cursor: Any,
86
+ sql: str,
87
+ bindings: Optional[Any],
88
+ retryable_exceptions: Tuple[Type[Exception], ...],
89
+ retry_limit: int,
90
+ attempt: int,
91
+ ):
92
+ """
93
+ A success sees the try exit cleanly and avoid any recursive
94
+ retries. Failure begins a sleep and retry routine.
95
+ """
96
+ try:
97
+ cursor.execute(sql, bindings)
98
+ except retryable_exceptions as e:
99
+ # Cease retries and fail when limit is hit.
100
+ if attempt >= retry_limit:
101
+ raise e
102
+
103
+ fire_event(
104
+ AdapterEventDebug(
105
+ message=f"Got a retryable error {type(e)}. {retry_limit-attempt} retries left. Retrying in 1 second.\nError:\n{e}"
106
+ )
107
+ )
108
+ time.sleep(1)
109
+
110
+ return _execute_query_with_retry(
111
+ cursor=cursor,
112
+ sql=sql,
113
+ bindings=bindings,
114
+ retryable_exceptions=retryable_exceptions,
115
+ retry_limit=retry_limit,
116
+ attempt=attempt + 1,
117
+ )
118
+
65
119
  connection = self.get_thread_connection()
66
120
  if auto_begin and connection.transaction_open is False:
67
121
  self.begin()
@@ -90,7 +144,14 @@ class SQLConnectionManager(BaseConnectionManager):
90
144
  pre = time.perf_counter()
91
145
 
92
146
  cursor = connection.handle.cursor()
93
- cursor.execute(sql, bindings)
147
+ _execute_query_with_retry(
148
+ cursor=cursor,
149
+ sql=sql,
150
+ bindings=bindings,
151
+ retryable_exceptions=retryable_exceptions,
152
+ retry_limit=retry_limit,
153
+ attempt=1,
154
+ )
94
155
 
95
156
  result = self.get_response(cursor)
96
157
 
@@ -7,6 +7,11 @@
7
7
  {% endmacro %}
8
8
 
9
9
  {% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}
10
+ {#-- This ensures microbatch batches get unique temp relations to avoid clobbering --#}
11
+ {% if suffix == '__dbt_tmp' and model.batch %}
12
+ {% set suffix = suffix ~ '_' ~ model.batch.id %}
13
+ {% endif %}
14
+
10
15
  {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}
11
16
  {% endmacro %}
12
17
 
@@ -35,7 +35,7 @@
35
35
  {% endmacro %}
36
36
 
37
37
  {% macro get_snapshot_table_column_names() %}
38
- {{ return({'dbt_valid_to': 'dbt_valid_to', 'dbt_valid_from': 'dbt_valid_from', 'dbt_scd_id': 'dbt_scd_id', 'dbt_updated_at': 'dbt_updated_at'}) }}
38
+ {{ return({'dbt_valid_to': 'dbt_valid_to', 'dbt_valid_from': 'dbt_valid_from', 'dbt_scd_id': 'dbt_scd_id', 'dbt_updated_at': 'dbt_updated_at', 'dbt_is_deleted': 'dbt_is_deleted'}) }}
39
39
  {% endmacro %}
40
40
 
41
41
  {% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}
@@ -82,7 +82,7 @@
82
82
  from snapshot_query
83
83
  ),
84
84
 
85
- {%- if strategy.invalidate_hard_deletes %}
85
+ {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %}
86
86
 
87
87
  deletes_source_data as (
88
88
 
@@ -96,6 +96,9 @@
96
96
  select
97
97
  'insert' as dbt_change_type,
98
98
  source_data.*
99
+ {%- if strategy.hard_deletes == 'new_record' -%}
100
+ ,'False' as {{ columns.dbt_is_deleted }}
101
+ {%- endif %}
99
102
 
100
103
  from insertions_source_data as source_data
101
104
  left outer join snapshotted_data
@@ -113,6 +116,9 @@
113
116
  'update' as dbt_change_type,
114
117
  source_data.*,
115
118
  snapshotted_data.{{ columns.dbt_scd_id }}
119
+ {%- if strategy.hard_deletes == 'new_record' -%}
120
+ , snapshotted_data.{{ columns.dbt_is_deleted }}
121
+ {%- endif %}
116
122
 
117
123
  from updates_source_data as source_data
118
124
  join snapshotted_data
@@ -122,9 +128,8 @@
122
128
  )
123
129
  )
124
130
 
125
- {%- if strategy.invalidate_hard_deletes -%}
131
+ {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %}
126
132
  ,
127
-
128
133
  deletes as (
129
134
 
130
135
  select
@@ -134,7 +139,38 @@
134
139
  {{ snapshot_get_time() }} as {{ columns.dbt_updated_at }},
135
140
  {{ snapshot_get_time() }} as {{ columns.dbt_valid_to }},
136
141
  snapshotted_data.{{ columns.dbt_scd_id }}
142
+ {%- if strategy.hard_deletes == 'new_record' -%}
143
+ , snapshotted_data.{{ columns.dbt_is_deleted }}
144
+ {%- endif %}
145
+ from snapshotted_data
146
+ left join deletes_source_data as source_data
147
+ on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }}
148
+ where {{ unique_key_is_null(strategy.unique_key, "source_data") }}
149
+ )
150
+ {%- endif %}
151
+
152
+ {%- if strategy.hard_deletes == 'new_record' %}
153
+ {% set source_sql_cols = get_column_schema_from_query(source_sql) %}
154
+ ,
155
+ deletion_records as (
137
156
 
157
+ select
158
+ 'insert' as dbt_change_type,
159
+ {%- for col in source_sql_cols -%}
160
+ snapshotted_data.{{ adapter.quote(col.column) }},
161
+ {% endfor -%}
162
+ {%- if strategy.unique_key | is_list -%}
163
+ {%- for key in strategy.unique_key -%}
164
+ snapshotted_data.{{ key }} as dbt_unique_key_{{ loop.index }},
165
+ {% endfor -%}
166
+ {%- else -%}
167
+ snapshotted_data.dbt_unique_key as dbt_unique_key,
168
+ {% endif -%}
169
+ {{ snapshot_get_time() }} as {{ columns.dbt_valid_from }},
170
+ {{ snapshot_get_time() }} as {{ columns.dbt_updated_at }},
171
+ snapshotted_data.{{ columns.dbt_valid_to }} as {{ columns.dbt_valid_to }},
172
+ snapshotted_data.{{ columns.dbt_scd_id }},
173
+ 'True' as {{ columns.dbt_is_deleted }}
138
174
  from snapshotted_data
139
175
  left join deletes_source_data as source_data
140
176
  on {{ unique_key_join_on(strategy.unique_key, "snapshotted_data", "source_data") }}
@@ -145,10 +181,15 @@
145
181
  select * from insertions
146
182
  union all
147
183
  select * from updates
148
- {%- if strategy.invalidate_hard_deletes %}
184
+ {%- if strategy.hard_deletes == 'invalidate' or strategy.hard_deletes == 'new_record' %}
149
185
  union all
150
186
  select * from deletes
151
187
  {%- endif %}
188
+ {%- if strategy.hard_deletes == 'new_record' %}
189
+ union all
190
+ select * from deletion_records
191
+ {%- endif %}
192
+
152
193
 
153
194
  {%- endmacro %}
154
195
 
@@ -165,6 +206,9 @@
165
206
  {{ strategy.updated_at }} as {{ columns.dbt_updated_at }},
166
207
  {{ strategy.updated_at }} as {{ columns.dbt_valid_from }},
167
208
  {{ get_dbt_valid_to_current(strategy, columns) }}
209
+ {%- if strategy.hard_deletes == 'new_record' -%}
210
+ , 'False' as {{ columns.dbt_is_deleted }}
211
+ {% endif -%}
168
212
  from (
169
213
  {{ sql }}
170
214
  ) sbq
@@ -37,7 +37,7 @@
37
37
 
38
38
  {% set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() %}
39
39
 
40
- {{ adapter.valid_snapshot_target(target_relation, columns) }}
40
+ {{ adapter.assert_valid_snapshot_target_given_strategy(target_relation, columns, strategy) }}
41
41
 
42
42
  {% set build_or_select_sql = snapshot_staging_table(strategy, sql, target_relation) %}
43
43
  {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}
@@ -54,7 +54,8 @@
54
54
  {# The model_config parameter is no longer used, but is passed in anyway for compatibility. #}
55
55
  {% set primary_key = config.get('unique_key') %}
56
56
  {% set updated_at = config.get('updated_at') %}
57
- {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes') or false %}
57
+ {% set hard_deletes = adapter.get_hard_deletes_behavior(config) %}
58
+ {% set invalidate_hard_deletes = hard_deletes == 'invalidate' %}
58
59
  {% set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() %}
59
60
 
60
61
  {#/*
@@ -78,7 +79,8 @@
78
79
  "updated_at": updated_at,
79
80
  "row_changed": row_changed_expr,
80
81
  "scd_id": scd_id_expr,
81
- "invalidate_hard_deletes": invalidate_hard_deletes
82
+ "invalidate_hard_deletes": invalidate_hard_deletes,
83
+ "hard_deletes": hard_deletes
82
84
  }) %}
83
85
  {% endmacro %}
84
86
 
@@ -141,7 +143,8 @@
141
143
  {# The model_config parameter is no longer used, but is passed in anyway for compatibility. #}
142
144
  {% set check_cols_config = config.get('check_cols') %}
143
145
  {% set primary_key = config.get('unique_key') %}
144
- {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes') or false %}
146
+ {% set hard_deletes = adapter.get_hard_deletes_behavior(config) %}
147
+ {% set invalidate_hard_deletes = hard_deletes == 'invalidate' %}
145
148
  {% set updated_at = config.get('updated_at') or snapshot_get_time() %}
146
149
 
147
150
  {% set column_added = false %}
@@ -175,6 +178,7 @@
175
178
  "updated_at": updated_at,
176
179
  "row_changed": row_changed_expr,
177
180
  "scd_id": scd_id_expr,
178
- "invalidate_hard_deletes": invalidate_hard_deletes
181
+ "invalidate_hard_deletes": invalidate_hard_deletes,
182
+ "hard_deletes": hard_deletes
179
183
  }) %}
180
184
  {% endmacro %}
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: dbt-adapters
3
- Version: 1.11.0
3
+ Version: 1.12.0
4
4
  Summary: The set of adapter protocols and base functionality that supports integration with dbt-core
5
5
  Project-URL: Homepage, https://github.com/dbt-labs/dbt-adapters
6
6
  Project-URL: Documentation, https://docs.getdbt.com
@@ -9,6 +9,7 @@ Project-URL: Issues, https://github.com/dbt-labs/dbt-adapters/issues
9
9
  Project-URL: Changelog, https://github.com/dbt-labs/dbt-adapters/blob/main/CHANGELOG.md
10
10
  Author-email: dbt Labs <info@dbtlabs.com>
11
11
  Maintainer-email: dbt Labs <info@dbtlabs.com>
12
+ License-File: LICENSE
12
13
  Keywords: adapter,adapters,database,dbt,dbt Cloud,dbt Core,dbt Labs,dbt-core,elt
13
14
  Classifier: Development Status :: 5 - Production/Stable
14
15
  Classifier: License :: OSI Approved :: Apache Software License
@@ -21,9 +22,9 @@ Classifier: Programming Language :: Python :: 3.11
21
22
  Classifier: Programming Language :: Python :: 3.12
22
23
  Requires-Python: >=3.9.0
23
24
  Requires-Dist: agate<2.0,>=1.0
24
- Requires-Dist: dbt-common<2.0,>=1.11
25
- Requires-Dist: mashumaro[msgpack]<4.0,>=3.0
26
- Requires-Dist: protobuf<5.0,>=3.0
25
+ Requires-Dist: dbt-common<2.0,>=1.13
26
+ Requires-Dist: mashumaro[msgpack]<3.15,>=3.9
27
+ Requires-Dist: protobuf<6.0,>=5.0
27
28
  Requires-Dist: pytz>=2015.7
28
29
  Requires-Dist: typing-extensions<5.0,>=4.0
29
30
  Description-Content-Type: text/markdown
@@ -32,7 +33,7 @@ Description-Content-Type: text/markdown
32
33
  <img src="https://raw.githubusercontent.com/dbt-labs/dbt/ec7dee39f793aa4f7dd3dae37282cc87664813e4/etc/dbt-logo-full.svg" alt="dbt logo" width="500"/>
33
34
  </p>
34
35
 
35
- # dbt-tests-adapter
36
+ # dbt-adapters
36
37
 
37
38
  This package is responsible for:
38
39
 
@@ -40,9 +41,11 @@ This package is responsible for:
40
41
  - caching information from databases
41
42
  - determining how relations are defined
42
43
 
43
- There are two major adapter types: base and sql
44
+ In this repo there is also our testing suite used for tesing adapter functionality
45
+
46
+ # Adapters
44
47
 
45
- # Directories
48
+ There are two major adapter types: base and sql
46
49
 
47
50
  ## `base`
48
51
 
@@ -1,8 +1,8 @@
1
1
  dbt/__init__.py,sha256=iY4jdvOxcDhkdr5FiyOTZPHadKtMZDQ-qC6Fw6_EHPM,277
2
- dbt/adapters/__about__.py,sha256=mD8RxZIPreXVMDcN4OLaJBmakjmOUITJu4JM34eJwD8,19
2
+ dbt/adapters/__about__.py,sha256=321j5SZZ1oxJ8inLWCUMR7BK6FyNniIooozPekshf70,19
3
3
  dbt/adapters/__init__.py,sha256=3noHsg-64qI0_Pw6OR9F7l1vU2_qrJvinq8POTtuaZM,252
4
4
  dbt/adapters/cache.py,sha256=WGy4ewnz-J13LverTACBW2iFhGswrWLgm-wiBrQnMzo,20084
5
- dbt/adapters/capability.py,sha256=-Mbej2AL_bjQatHpFWUgsQ8z0zwnotyE9Y5DYHnX7NE,2364
5
+ dbt/adapters/capability.py,sha256=M3FkC9veKnNB7a7uQyl7EHX_AGNXPChbHAkcY4cgXCY,2534
6
6
  dbt/adapters/factory.py,sha256=9N-LjTnyqBKqK7KARjJdAPdQIRXQbVRfd2cBNDtU4Dc,9378
7
7
  dbt/adapters/protocol.py,sha256=qRsEFAKjUMVnoBspAiCUTICez1ckson-dFS04dTXSco,3818
8
8
  dbt/adapters/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -12,7 +12,7 @@ dbt/adapters/base/README.md,sha256=muHQntC07Lh6L1XfVgwKhV5RltOPBLYPdQqd8_7l34c,5
12
12
  dbt/adapters/base/__init__.py,sha256=Nc8lQVkOzAqdcxk4cw4E_raxN9CAWMwhQx4STdiicxg,456
13
13
  dbt/adapters/base/column.py,sha256=Uj20UixoxCn2rlv4QDNONyys6CDkDFyG3anCXKf0T2c,5350
14
14
  dbt/adapters/base/connections.py,sha256=-C5dOwGgMKH8n_v6wjwOxV7chBdS0GjOGwNQCUbhhWc,16951
15
- dbt/adapters/base/impl.py,sha256=IFu1rQkT-eLjosQ2wJOykWp4iBgppEH4smd3ASA8v5w,72501
15
+ dbt/adapters/base/impl.py,sha256=jUcI0rIL_HCOQ-vRTvu-Cg_hsR5zYLqMhZXvp8V5N2c,74986
16
16
  dbt/adapters/base/meta.py,sha256=IKqviGf7gK_qGtrn0t8NaSdUaw8g_M8SjICacMvNwGY,5702
17
17
  dbt/adapters/base/plugin.py,sha256=rm0GjNHnWM2mn0GJOjciZLwn-02xlzWCoMT9u-epwP0,1076
18
18
  dbt/adapters/base/query_headers.py,sha256=UluGd9IYCYkoMiDi5Yx_lnrCOSjWppjwRro4SIGgx8I,3496
@@ -26,7 +26,7 @@ dbt/adapters/contracts/relation.py,sha256=H_IYxRtg9LV8kYAfAiWeQAf-2ByMRN-EkfxHim
26
26
  dbt/adapters/events/README.md,sha256=kVUFIsDQrHTUmk9Mmu-yXYkWh4pA5MJK_H6739rQr5I,3521
27
27
  dbt/adapters/events/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  dbt/adapters/events/adapter_types.proto,sha256=Cjs_XEZFGlKDj_dGbtjSUcAuXm6jRBRMnZEXHCQd5-I,9673
29
- dbt/adapters/events/adapter_types_pb2.py,sha256=HCk8PoldHIsAC5zLfcRr0gmIeNey7N9e8s-5NZ24CJk,26471
29
+ dbt/adapters/events/adapter_types_pb2.py,sha256=CvkiYSbT6WKOKJgzgDBBbWta-LE4TChnNn26I9r2cbw,26808
30
30
  dbt/adapters/events/base_types.py,sha256=sTlNRl15GaRIrIDVxalf7sK08dfo3Ol1Ua2jbFO7-7c,966
31
31
  dbt/adapters/events/logging.py,sha256=1nRFswQubgUrVHL5DB9ewBtbEv1-OcIXC7mMmu3NOaM,2350
32
32
  dbt/adapters/events/types.py,sha256=nW7_FgrEmWlM-HWPHrYcJ5K5QLZtfspLizyqlXrJaoE,12189
@@ -51,7 +51,7 @@ dbt/adapters/relation_configs/config_base.py,sha256=IK9oKf9TuOTLIiKX8ms_X-p4yxZv
51
51
  dbt/adapters/relation_configs/config_change.py,sha256=hf6fDWbZpKvZdM6z-OtY-GveipzfLRR_dsUZmYmXkdk,713
52
52
  dbt/adapters/relation_configs/config_validation.py,sha256=wlJUMwOEPhYFch-LRtEWfLNJMq8jL1tRhOUHmNX8nFw,1978
53
53
  dbt/adapters/sql/__init__.py,sha256=WLWZJfqc8pr1N1BMVe9gM-KQ4URJIeKfLqTuJBD1VN0,107
54
- dbt/adapters/sql/connections.py,sha256=qWsDFdenuzPDzhawPktrrHOf-5GKeMwXi4iWoQJVu4A,6547
54
+ dbt/adapters/sql/connections.py,sha256=XDmzgQ6uoTIdQNJrEsdjmaOuAZYTvaMzH03ZJueKMy8,8422
55
55
  dbt/adapters/sql/impl.py,sha256=HmH3eC-qVeCAAukjEOnUNZbH-UK32X-NL4kwb_EHzs0,10763
56
56
  dbt/include/__init__.py,sha256=qEFeq3yuf3lQKVseALmL8aPM8fpCS54B_5pry00M3hk,76
57
57
  dbt/include/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -64,7 +64,7 @@ dbt/include/global_project/macros/adapters/freshness.sql,sha256=FKi-xsBCOYjGYp10
64
64
  dbt/include/global_project/macros/adapters/indexes.sql,sha256=DasPn32Cm0OZyjBPBWzL4BpK9PZ3xF_Pu8Nh4NgASaw,1366
65
65
  dbt/include/global_project/macros/adapters/metadata.sql,sha256=meNIc3z4lXdh1lDb-K1utKb8VzAVuN23E6XWgMZGDhQ,3512
66
66
  dbt/include/global_project/macros/adapters/persist_docs.sql,sha256=TUazJHSaMIDlELqALLRMC2kYj5DGZ9U-6K8RbgwRXw4,1369
67
- dbt/include/global_project/macros/adapters/relation.sql,sha256=18lE088vj0JZW8af2byoGLFhxYxekyXKTrX7Dj-B-E0,2809
67
+ dbt/include/global_project/macros/adapters/relation.sql,sha256=yT4YVCipxeQAjkj98rscCNt-slC8oIJXriHlkvzbkQ0,3016
68
68
  dbt/include/global_project/macros/adapters/schema.sql,sha256=XElo0cfvdEipI5hpulLXLBEXP_YnilG-1kRwDMqDD5I,594
69
69
  dbt/include/global_project/macros/adapters/show.sql,sha256=mFDQZxvvDzafTeh9v90ttks-VCjUUxbrw_YA02MV1Jk,785
70
70
  dbt/include/global_project/macros/adapters/timestamps.sql,sha256=FvPwWkmM00r9rs2DjR5wrI-U9ah3-8VMIwk5wRRzuPw,1910
@@ -94,10 +94,10 @@ dbt/include/global_project/macros/materializations/models/incremental/on_schema_
94
94
  dbt/include/global_project/macros/materializations/models/incremental/strategies.sql,sha256=ORGWiYfj-b3_VIps9FDlyx-Q4A2hZzX2aYLocW8b6pU,2613
95
95
  dbt/include/global_project/macros/materializations/seeds/helpers.sql,sha256=Y15ej-D3gm1ExIOMNT208q43gRk8d985WQBuGSooNL0,3920
96
96
  dbt/include/global_project/macros/materializations/seeds/seed.sql,sha256=YSoGzVO3iIUiOKIUM9G7yApGLFH4O9bv_d4KjHo3p4Q,2155
97
- dbt/include/global_project/macros/materializations/snapshots/helpers.sql,sha256=h7VuSOq7gYwh2G2sh16AjP-57SsDG1M_ail22RNHAOk,8550
98
- dbt/include/global_project/macros/materializations/snapshots/snapshot.sql,sha256=zxFMzo9MHii__ciSOGSoUT3dtBmSqHjtkIc9xah8axw,4011
97
+ dbt/include/global_project/macros/materializations/snapshots/helpers.sql,sha256=_NVJngRLB7N7E__FGH81UkPhyMK204lTsXMnlzaE-2g,10679
98
+ dbt/include/global_project/macros/materializations/snapshots/snapshot.sql,sha256=clIZtCE7vvOXxzz1t2KlmPZM7AuSGsK7MInspo0N5Qg,4043
99
99
  dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql,sha256=-uCvd2_E4AfWWEBRyQLiGCpuHgOG-MczlbLIWyGfAzM,1287
100
- dbt/include/global_project/macros/materializations/snapshots/strategies.sql,sha256=5NB6nHW6qi-IcSf-ALwokTKHfOfV5Hzz4tGotgrV7JA,6746
100
+ dbt/include/global_project/macros/materializations/snapshots/strategies.sql,sha256=AfIsRiw0YnQym5wUiWR2JpiEEky4_WBTpTtE0HJvpZw,6928
101
101
  dbt/include/global_project/macros/materializations/tests/helpers.sql,sha256=rxUxDZm4EvrDbi0H_ePghE34_QLmxGEY2o_LTMc9CU0,1731
102
102
  dbt/include/global_project/macros/materializations/tests/test.sql,sha256=Rz3O_3dWHlIofG3d2CwsP2bXFimRZUIwOevyB0iz1J4,1831
103
103
  dbt/include/global_project/macros/materializations/tests/unit.sql,sha256=KonePuFfwcz5uJ-JW0CrEy8_q-Gl45fonngGmFvQcNU,1252
@@ -157,7 +157,7 @@ dbt/include/global_project/macros/utils/right.sql,sha256=EwNG98CAFIwNDmarwopf7Rk
157
157
  dbt/include/global_project/macros/utils/safe_cast.sql,sha256=1mswwkDACmIi1I99JKb_-vq3kjMe4HhMRV70mW8Bt4Y,298
158
158
  dbt/include/global_project/macros/utils/split_part.sql,sha256=fXEIS0oIiYR7-4lYbb0QbZdG-q2TpV63AFd1ky4I5UM,714
159
159
  dbt/include/global_project/tests/generic/builtin.sql,sha256=p94xdyPwb2TlxgLBqCfrcRfJ1QNgsjPvBm8f0Q5eqZM,1022
160
- dbt_adapters-1.11.0.dist-info/METADATA,sha256=KDzBUhhSZK_DBdJWz5CEDcZFLHmTXmHC5K679nlBx48,2477
161
- dbt_adapters-1.11.0.dist-info/WHEEL,sha256=3U_NnUcV_1B1kPkYaPzN-irRckL5VW_lytn0ytO_kRY,87
162
- dbt_adapters-1.11.0.dist-info/licenses/LICENSE,sha256=9yjigiJhWcCZvQjdagGKDwrRph58QWc5P2bVSQwXo6s,11344
163
- dbt_adapters-1.11.0.dist-info/RECORD,,
160
+ dbt_adapters-1.12.0.dist-info/METADATA,sha256=AHsPu7P_IJ4P5nf3z-RcufHB6_KCz99B2fTbK8AUkKQ,2576
161
+ dbt_adapters-1.12.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
162
+ dbt_adapters-1.12.0.dist-info/licenses/LICENSE,sha256=9yjigiJhWcCZvQjdagGKDwrRph58QWc5P2bVSQwXo6s,11344
163
+ dbt_adapters-1.12.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.26.1
2
+ Generator: hatchling 1.27.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any