dbt-adapters 0.1.0a5__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-adapters might be problematic. Click here for more details.

dbt/adapters/__about__.py CHANGED
@@ -1 +1 @@
1
- version = "0.1.0a5"
1
+ version = "1.0.0"
@@ -18,9 +18,9 @@ from typing import (
18
18
  Tuple,
19
19
  Type,
20
20
  Union,
21
+ TYPE_CHECKING,
21
22
  )
22
23
 
23
- import agate
24
24
  from dbt_common.events.contextvars import get_node_info
25
25
  from dbt_common.events.functions import fire_event
26
26
  from dbt_common.exceptions import DbtInternalError, NotImplementedError
@@ -48,6 +48,9 @@ from dbt.adapters.events.types import (
48
48
  )
49
49
  from dbt.adapters.exceptions import FailedToConnectError, InvalidConnectionError
50
50
 
51
+ if TYPE_CHECKING:
52
+ import agate
53
+
51
54
 
52
55
  SleepTime = Union[int, float] # As taken by time.sleep.
53
56
  AdapterHandle = Any # Adapter connection handle objects can be any class.
@@ -162,9 +165,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
162
165
  conn.handle = LazyHandle(self.open)
163
166
  # Add the connection to thread_connections for this thread
164
167
  self.set_thread_connection(conn)
165
- fire_event(
166
- NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info())
167
- )
168
+ fire_event(NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info()))
168
169
  else: # existing connection either wasn't open or didn't have the right name
169
170
  if conn.state != "open":
170
171
  conn.handle = LazyHandle(self.open)
@@ -396,7 +397,7 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
396
397
  auto_begin: bool = False,
397
398
  fetch: bool = False,
398
399
  limit: Optional[int] = None,
399
- ) -> Tuple[AdapterResponse, agate.Table]:
400
+ ) -> Tuple[AdapterResponse, "agate.Table"]:
400
401
  """Execute the given SQL.
401
402
 
402
403
  :param str sql: The sql to execute.
dbt/adapters/base/impl.py CHANGED
@@ -20,16 +20,9 @@ from typing import (
20
20
  Type,
21
21
  TypedDict,
22
22
  Union,
23
+ TYPE_CHECKING,
23
24
  )
24
25
 
25
- import agate
26
- from dbt_common.clients.agate_helper import (
27
- Integer,
28
- empty_table,
29
- get_column_value_uncased,
30
- merge_tables,
31
- table_from_rows,
32
- )
33
26
  from dbt_common.clients.jinja import CallableMacroGenerator
34
27
  from dbt_common.contracts.constraints import (
35
28
  ColumnLevelConstraint,
@@ -94,6 +87,9 @@ from dbt.adapters.exceptions import (
94
87
  )
95
88
  from dbt.adapters.protocol import AdapterConfig, MacroContextGeneratorCallable
96
89
 
90
+ if TYPE_CHECKING:
91
+ import agate
92
+
97
93
 
98
94
  GET_CATALOG_MACRO_NAME = "get_catalog"
99
95
  GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
@@ -107,7 +103,14 @@ class ConstraintSupport(str, Enum):
107
103
  NOT_SUPPORTED = "not_supported"
108
104
 
109
105
 
110
- def _expect_row_value(key: str, row: agate.Row):
106
+ def _parse_callback_empty_table(*args, **kwargs) -> Tuple[str, "agate.Table"]:
107
+ # Lazy load agate_helper to avoid importing agate when it is not necessary.
108
+ from dbt_common.clients.agate_helper import empty_table
109
+
110
+ return "", empty_table()
111
+
112
+
113
+ def _expect_row_value(key: str, row: "agate.Row"):
111
114
  if key not in row.keys():
112
115
  raise DbtInternalError(
113
116
  'Got a row without "{}" column, columns: {}'.format(key, row.keys())
@@ -117,13 +120,13 @@ def _expect_row_value(key: str, row: agate.Row):
117
120
 
118
121
  def _catalog_filter_schemas(
119
122
  used_schemas: FrozenSet[Tuple[str, str]]
120
- ) -> Callable[[agate.Row], bool]:
123
+ ) -> Callable[["agate.Row"], bool]:
121
124
  """Return a function that takes a row and decides if the row should be
122
125
  included in the catalog output.
123
126
  """
124
127
  schemas = frozenset((d.lower(), s.lower()) for d, s in used_schemas)
125
128
 
126
- def test(row: agate.Row) -> bool:
129
+ def test(row: "agate.Row") -> bool:
127
130
  table_database = _expect_row_value("table_database", row)
128
131
  table_schema = _expect_row_value("table_schema", row)
129
132
  # the schema may be present but None, which is not an error and should
@@ -254,6 +257,8 @@ class BaseAdapter(metaclass=AdapterMeta):
254
257
  ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
255
258
  }
256
259
 
260
+ MAX_SCHEMA_METADATA_RELATIONS = 100
261
+
257
262
  # This static member variable can be overriden in concrete adapter
258
263
  # implementations to indicate adapter support for optional capabilities.
259
264
  _capabilities = CapabilityDict({})
@@ -323,14 +328,14 @@ class BaseAdapter(metaclass=AdapterMeta):
323
328
  if self.connections.query_header is not None:
324
329
  self.connections.query_header.reset()
325
330
 
326
- @available.parse(lambda *a, **k: ("", empty_table()))
331
+ @available.parse(_parse_callback_empty_table)
327
332
  def execute(
328
333
  self,
329
334
  sql: str,
330
335
  auto_begin: bool = False,
331
336
  fetch: bool = False,
332
337
  limit: Optional[int] = None,
333
- ) -> Tuple[AdapterResponse, agate.Table]:
338
+ ) -> Tuple[AdapterResponse, "agate.Table"]:
334
339
  """Execute the given SQL. This is a thin wrapper around
335
340
  ConnectionManager.execute.
336
341
 
@@ -340,7 +345,7 @@ class BaseAdapter(metaclass=AdapterMeta):
340
345
  :param bool fetch: If set, fetch results.
341
346
  :param Optional[int] limit: If set, only fetch n number of rows
342
347
  :return: A tuple of the query status and results (empty if fetch=False).
343
- :rtype: Tuple[AdapterResponse, agate.Table]
348
+ :rtype: Tuple[AdapterResponse, "agate.Table"]
344
349
  """
345
350
  return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch, limit=limit)
346
351
 
@@ -368,8 +373,8 @@ class BaseAdapter(metaclass=AdapterMeta):
368
373
  ]
369
374
  return columns
370
375
 
371
- @available.parse(lambda *a, **k: ("", empty_table()))
372
- def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
376
+ @available.parse(_parse_callback_empty_table)
377
+ def get_partitions_metadata(self, table: str) -> Tuple["agate.Table"]:
373
378
  """
374
379
  TODO: Can we move this to dbt-bigquery?
375
380
  Obtain partitions metadata for a BigQuery partitioned table.
@@ -377,7 +382,7 @@ class BaseAdapter(metaclass=AdapterMeta):
377
382
  :param str table: a partitioned table id, in standard SQL format.
378
383
  :return: a partition metadata tuple, as described in
379
384
  https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
380
- :rtype: agate.Table
385
+ :rtype: "agate.Table"
381
386
  """
382
387
  if hasattr(self.connections, "get_partitions_metadata"):
383
388
  return self.connections.get_partitions_metadata(table=table)
@@ -421,7 +426,9 @@ class BaseAdapter(metaclass=AdapterMeta):
421
426
  populate.
422
427
  """
423
428
  return {
424
- self.Relation.create_from(quoting=self.config, relation_config=relation_config)
429
+ self.Relation.create_from(
430
+ quoting=self.config, relation_config=relation_config
431
+ ).without_identifier()
425
432
  for relation_config in relation_configs
426
433
  }
427
434
 
@@ -663,7 +670,7 @@ class BaseAdapter(metaclass=AdapterMeta):
663
670
  # Methods about grants
664
671
  ###
665
672
  @available
666
- def standardize_grants_dict(self, grants_table: agate.Table) -> dict:
673
+ def standardize_grants_dict(self, grants_table: "agate.Table") -> dict:
667
674
  """Translate the result of `show grants` (or equivalent) to match the
668
675
  grants which a user would configure in their project.
669
676
 
@@ -938,7 +945,7 @@ class BaseAdapter(metaclass=AdapterMeta):
938
945
  ###
939
946
  @classmethod
940
947
  @abc.abstractmethod
941
- def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
948
+ def convert_text_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
942
949
  """Return the type in the database that best maps to the agate.Text
943
950
  type for the given agate table and column index.
944
951
 
@@ -950,7 +957,7 @@ class BaseAdapter(metaclass=AdapterMeta):
950
957
 
951
958
  @classmethod
952
959
  @abc.abstractmethod
953
- def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
960
+ def convert_number_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
954
961
  """Return the type in the database that best maps to the agate.Number
955
962
  type for the given agate table and column index.
956
963
 
@@ -961,7 +968,7 @@ class BaseAdapter(metaclass=AdapterMeta):
961
968
  raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
962
969
 
963
970
  @classmethod
964
- def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
971
+ def convert_integer_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
965
972
  """Return the type in the database that best maps to the agate.Number
966
973
  type for the given agate table and column index.
967
974
 
@@ -973,7 +980,7 @@ class BaseAdapter(metaclass=AdapterMeta):
973
980
 
974
981
  @classmethod
975
982
  @abc.abstractmethod
976
- def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
983
+ def convert_boolean_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
977
984
  """Return the type in the database that best maps to the agate.Boolean
978
985
  type for the given agate table and column index.
979
986
 
@@ -985,7 +992,7 @@ class BaseAdapter(metaclass=AdapterMeta):
985
992
 
986
993
  @classmethod
987
994
  @abc.abstractmethod
988
- def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
995
+ def convert_datetime_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
989
996
  """Return the type in the database that best maps to the agate.DateTime
990
997
  type for the given agate table and column index.
991
998
 
@@ -997,7 +1004,7 @@ class BaseAdapter(metaclass=AdapterMeta):
997
1004
 
998
1005
  @classmethod
999
1006
  @abc.abstractmethod
1000
- def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
1007
+ def convert_date_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
1001
1008
  """Return the type in the database that best maps to the agate.Date
1002
1009
  type for the given agate table and column index.
1003
1010
 
@@ -1009,7 +1016,7 @@ class BaseAdapter(metaclass=AdapterMeta):
1009
1016
 
1010
1017
  @classmethod
1011
1018
  @abc.abstractmethod
1012
- def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
1019
+ def convert_time_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
1013
1020
  """Return the type in the database that best maps to the
1014
1021
  agate.TimeDelta type for the given agate table and column index.
1015
1022
 
@@ -1021,11 +1028,14 @@ class BaseAdapter(metaclass=AdapterMeta):
1021
1028
 
1022
1029
  @available
1023
1030
  @classmethod
1024
- def convert_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
1031
+ def convert_type(cls, agate_table: "agate.Table", col_idx: int) -> Optional[str]:
1025
1032
  return cls.convert_agate_type(agate_table, col_idx)
1026
1033
 
1027
1034
  @classmethod
1028
- def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
1035
+ def convert_agate_type(cls, agate_table: "agate.Table", col_idx: int) -> Optional[str]:
1036
+ import agate
1037
+ from dbt_common.clients.agate_helper import Integer
1038
+
1029
1039
  agate_type: Type = agate_table.column_types[col_idx]
1030
1040
  conversions: List[Tuple[Type, Callable[..., str]]] = [
1031
1041
  (Integer, cls.convert_integer_type),
@@ -1102,11 +1112,13 @@ class BaseAdapter(metaclass=AdapterMeta):
1102
1112
 
1103
1113
  @classmethod
1104
1114
  def _catalog_filter_table(
1105
- cls, table: agate.Table, used_schemas: FrozenSet[Tuple[str, str]]
1106
- ) -> agate.Table:
1115
+ cls, table: "agate.Table", used_schemas: FrozenSet[Tuple[str, str]]
1116
+ ) -> "agate.Table":
1107
1117
  """Filter the table as appropriate for catalog entries. Subclasses can
1108
1118
  override this to change filtering rules on a per-adapter basis.
1109
1119
  """
1120
+ from dbt_common.clients.agate_helper import table_from_rows
1121
+
1110
1122
  # force database + schema to be strings
1111
1123
  table = table_from_rows(
1112
1124
  table.rows,
@@ -1120,7 +1132,7 @@ class BaseAdapter(metaclass=AdapterMeta):
1120
1132
  information_schema: InformationSchema,
1121
1133
  schemas: Set[str],
1122
1134
  used_schemas: FrozenSet[Tuple[str, str]],
1123
- ) -> agate.Table:
1135
+ ) -> "agate.Table":
1124
1136
  kwargs = {"information_schema": information_schema, "schemas": schemas}
1125
1137
  table = self.execute_macro(GET_CATALOG_MACRO_NAME, kwargs=kwargs)
1126
1138
 
@@ -1132,7 +1144,7 @@ class BaseAdapter(metaclass=AdapterMeta):
1132
1144
  information_schema: InformationSchema,
1133
1145
  relations: List[BaseRelation],
1134
1146
  used_schemas: FrozenSet[Tuple[str, str]],
1135
- ) -> agate.Table:
1147
+ ) -> "agate.Table":
1136
1148
  kwargs = {
1137
1149
  "information_schema": information_schema,
1138
1150
  "relations": relations,
@@ -1148,10 +1160,10 @@ class BaseAdapter(metaclass=AdapterMeta):
1148
1160
  used_schemas: FrozenSet[Tuple[str, str]],
1149
1161
  relations: Optional[Set[BaseRelation]] = None,
1150
1162
  ):
1151
- catalogs: agate.Table
1163
+ catalogs: "agate.Table"
1152
1164
  if (
1153
1165
  relations is None
1154
- or len(relations) > 100
1166
+ or len(relations) > self.MAX_SCHEMA_METADATA_RELATIONS
1155
1167
  or not self.supports(Capability.SchemaMetadataByRelations)
1156
1168
  ):
1157
1169
  # Do it the traditional way. We get the full catalog.
@@ -1171,7 +1183,7 @@ class BaseAdapter(metaclass=AdapterMeta):
1171
1183
  for r in relations
1172
1184
  }
1173
1185
 
1174
- def in_map(row: agate.Row):
1186
+ def in_map(row: "agate.Row"):
1175
1187
  d = _expect_row_value("table_database", row)
1176
1188
  s = _expect_row_value("table_schema", row)
1177
1189
  i = _expect_row_value("table_name", row)
@@ -1184,16 +1196,16 @@ class BaseAdapter(metaclass=AdapterMeta):
1184
1196
 
1185
1197
  return catalogs, exceptions
1186
1198
 
1187
- def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]):
1199
+ def row_matches_relation(self, row: "agate.Row", relations: Set[BaseRelation]):
1188
1200
  pass
1189
1201
 
1190
1202
  def get_catalog(
1191
1203
  self,
1192
1204
  relation_configs: Iterable[RelationConfig],
1193
1205
  used_schemas: FrozenSet[Tuple[str, str]],
1194
- ) -> Tuple[agate.Table, List[Exception]]:
1206
+ ) -> Tuple["agate.Table", List[Exception]]:
1195
1207
  with executor(self.config) as tpe:
1196
- futures: List[Future[agate.Table]] = []
1208
+ futures: List[Future["agate.Table"]] = []
1197
1209
  schema_map: SchemaSearchMap = self._get_catalog_schemas(relation_configs)
1198
1210
  for info, schemas in schema_map.items():
1199
1211
  if len(schemas) == 0:
@@ -1209,9 +1221,9 @@ class BaseAdapter(metaclass=AdapterMeta):
1209
1221
 
1210
1222
  def get_catalog_by_relations(
1211
1223
  self, used_schemas: FrozenSet[Tuple[str, str]], relations: Set[BaseRelation]
1212
- ) -> Tuple[agate.Table, List[Exception]]:
1224
+ ) -> Tuple["agate.Table", List[Exception]]:
1213
1225
  with executor(self.config) as tpe:
1214
- futures: List[Future[agate.Table]] = []
1226
+ futures: List[Future["agate.Table"]] = []
1215
1227
  relations_by_schema = self._get_catalog_relations_by_info_schema(relations)
1216
1228
  for info_schema in relations_by_schema:
1217
1229
  name = ".".join([str(info_schema.database), "information_schema"])
@@ -1241,6 +1253,8 @@ class BaseAdapter(metaclass=AdapterMeta):
1241
1253
  macro_resolver: Optional[MacroResolverProtocol] = None,
1242
1254
  ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
1243
1255
  """Calculate the freshness of sources in dbt, and return it"""
1256
+ import agate
1257
+
1244
1258
  kwargs: Dict[str, Any] = {
1245
1259
  "source": source,
1246
1260
  "loaded_at_field": loaded_at_field,
@@ -1251,8 +1265,8 @@ class BaseAdapter(metaclass=AdapterMeta):
1251
1265
  # in older versions of dbt-core, the 'collect_freshness' macro returned the table of results directly
1252
1266
  # starting in v1.5, by default, we return both the table and the adapter response (metadata about the query)
1253
1267
  result: Union[
1254
- AttrDict, # current: contains AdapterResponse + agate.Table
1255
- agate.Table, # previous: just table
1268
+ AttrDict, # current: contains AdapterResponse + "agate.Table"
1269
+ "agate.Table", # previous: just table
1256
1270
  ]
1257
1271
  result = self.execute_macro(
1258
1272
  FRESHNESS_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver
@@ -1300,6 +1314,8 @@ class BaseAdapter(metaclass=AdapterMeta):
1300
1314
  adapter_response, table = result.response, result.table # type: ignore[attr-defined]
1301
1315
 
1302
1316
  try:
1317
+ from dbt_common.clients.agate_helper import get_column_value_uncased
1318
+
1303
1319
  row = table[0]
1304
1320
  last_modified_val = get_column_value_uncased("last_modified", row)
1305
1321
  snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
@@ -1535,6 +1551,9 @@ class BaseAdapter(metaclass=AdapterMeta):
1535
1551
  parsed_constraint: Union[ColumnLevelConstraint, ModelLevelConstraint],
1536
1552
  render_func,
1537
1553
  ) -> Optional[str]:
1554
+ # skip checking enforcement if this is a 'custom' constraint
1555
+ if parsed_constraint.type == ConstraintType.custom:
1556
+ return render_func(parsed_constraint)
1538
1557
  if (
1539
1558
  parsed_constraint.warn_unsupported
1540
1559
  and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_SUPPORTED
@@ -1633,10 +1652,12 @@ join diff_count using (id)
1633
1652
 
1634
1653
 
1635
1654
  def catch_as_completed(
1636
- futures, # typing: List[Future[agate.Table]]
1637
- ) -> Tuple[agate.Table, List[Exception]]:
1638
- # catalogs: agate.Table = agate.Table(rows=[])
1639
- tables: List[agate.Table] = []
1655
+ futures, # typing: List[Future["agate.Table"]]
1656
+ ) -> Tuple["agate.Table", List[Exception]]:
1657
+ from dbt_common.clients.agate_helper import merge_tables
1658
+
1659
+ # catalogs: "agate.Table" =".Table(rows=[])
1660
+ tables: List["agate.Table"] = []
1640
1661
  exceptions: List[Exception] = []
1641
1662
 
1642
1663
  for future in as_completed(futures):
@@ -52,13 +52,13 @@ class BaseRelation(FakeAPIObject, Hashable):
52
52
  # adding a relation type here also requires defining the associated rename macro
53
53
  # e.g. adding RelationType.View in dbt-postgres requires that you define:
54
54
  # include/postgres/macros/relations/view/rename.sql::postgres__get_rename_view_sql()
55
- renameable_relations: SerializableIterable = ()
55
+ renameable_relations: SerializableIterable = field(default_factory=frozenset)
56
56
 
57
57
  # register relation types that are atomically replaceable, e.g. they have "create or replace" syntax
58
58
  # adding a relation type here also requires defining the associated replace macro
59
59
  # e.g. adding RelationType.View in dbt-postgres requires that you define:
60
60
  # include/postgres/macros/relations/view/replace.sql::postgres__get_replace_view_sql()
61
- replaceable_relations: SerializableIterable = ()
61
+ replaceable_relations: SerializableIterable = field(default_factory=frozenset)
62
62
 
63
63
  def _is_exactish_match(self, field: ComponentName, value: str) -> bool:
64
64
  if self.dbt_created and self.quote_policy.get_part(field) is False:
@@ -1,7 +1,11 @@
1
+ from abc import ABC
2
+
1
3
  from collections.abc import Mapping
2
4
  from dataclasses import dataclass
3
- from typing import Dict, Optional
5
+ from typing import Dict, Optional, Any, Union, List
6
+
4
7
 
8
+ from dbt_common.contracts.config.materialization import OnConfigurationChangeOption
5
9
  from dbt_common.contracts.util import Replaceable
6
10
  from dbt_common.dataclass_schema import StrEnum, dbtClassMixin
7
11
  from dbt_common.exceptions import CompilationError, DataclassNotDictError
@@ -18,13 +22,43 @@ class RelationType(StrEnum):
18
22
  Ephemeral = "ephemeral"
19
23
 
20
24
 
25
+ class MaterializationContract(Protocol):
26
+ enforced: bool
27
+ alias_types: bool
28
+
29
+
30
+ class MaterializationConfig(Mapping, ABC):
31
+ materialized: str
32
+ incremental_strategy: Optional[str]
33
+ persist_docs: Dict[str, Any]
34
+ column_types: Dict[str, Any]
35
+ full_refresh: Optional[bool]
36
+ quoting: Dict[str, Any]
37
+ unique_key: Union[str, List[str], None]
38
+ on_schema_change: Optional[str]
39
+ on_configuration_change: OnConfigurationChangeOption
40
+ contract: MaterializationContract
41
+ extra: Dict[str, Any]
42
+
43
+ def __contains__(self, item):
44
+ ...
45
+
46
+ def __delitem__(self, key):
47
+ ...
48
+
49
+
21
50
  class RelationConfig(Protocol):
51
+ resource_type: str
22
52
  name: str
53
+ description: str
23
54
  database: str
24
55
  schema: str
25
56
  identifier: str
57
+ compiled_code: Optional[str]
58
+ meta: Dict[str, Any]
59
+ tags: List[str]
26
60
  quoting_dict: Dict[str, bool]
27
- config: Dict[str, str]
61
+ config: Optional[MaterializationConfig]
28
62
 
29
63
 
30
64
  class ComponentName(StrEnum):
dbt/adapters/factory.py CHANGED
@@ -100,9 +100,7 @@ class AdapterContainer:
100
100
  adapter_name = config.credentials.type
101
101
  adapter_type = self.get_adapter_class_by_name(adapter_name)
102
102
  adapter_version = self._adapter_version(adapter_name)
103
- fire_event(
104
- AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version)
105
- )
103
+ fire_event(AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version))
106
104
  with self.lock:
107
105
  if adapter_name in self.adapters:
108
106
  # this shouldn't really happen...
dbt/adapters/protocol.py CHANGED
@@ -10,10 +10,10 @@ from typing import (
10
10
  Type,
11
11
  TypeVar,
12
12
  Tuple,
13
+ TYPE_CHECKING,
13
14
  )
14
15
  from typing_extensions import Protocol
15
16
 
16
- import agate
17
17
  from dbt_common.clients.jinja import MacroProtocol
18
18
  from dbt_common.contracts.config.base import BaseConfig
19
19
 
@@ -25,6 +25,9 @@ from dbt.adapters.contracts.connection import (
25
25
  from dbt.adapters.contracts.macros import MacroResolverProtocol
26
26
  from dbt.adapters.contracts.relation import HasQuoting, Policy, RelationConfig
27
27
 
28
+ if TYPE_CHECKING:
29
+ import agate
30
+
28
31
 
29
32
  @dataclass
30
33
  class AdapterConfig(BaseConfig):
@@ -169,5 +172,5 @@ class AdapterProtocol( # type: ignore[misc]
169
172
 
170
173
  def execute(
171
174
  self, sql: str, auto_begin: bool = False, fetch: bool = False
172
- ) -> Tuple[AdapterResponse, agate.Table]:
175
+ ) -> Tuple[AdapterResponse, "agate.Table"]:
173
176
  ...
@@ -1,9 +1,11 @@
1
1
  from dataclasses import dataclass
2
- from typing import Dict, Union
2
+ from typing import Dict, Union, TYPE_CHECKING
3
3
 
4
- import agate
5
4
  from dbt_common.utils import filter_null_values
6
5
 
6
+ if TYPE_CHECKING:
7
+ import agate
8
+
7
9
 
8
10
  """
9
11
  This is what relation metadata from the database looks like. It's a dictionary because there will be
@@ -18,7 +20,7 @@ like name. But it also can have multiple indexes, which needs to be a separate q
18
20
  ])
19
21
  }
20
22
  """
21
- RelationResults = Dict[str, Union[agate.Row, agate.Table]]
23
+ RelationResults = Dict[str, Union["agate.Row", "agate.Table"]]
22
24
 
23
25
 
24
26
  @dataclass(frozen=True)
@@ -1,9 +1,7 @@
1
1
  import abc
2
2
  import time
3
- from typing import Any, Dict, Iterable, List, Optional, Tuple
3
+ from typing import Any, Dict, Iterable, List, Optional, Tuple, TYPE_CHECKING
4
4
 
5
- import agate
6
- from dbt_common.clients.agate_helper import empty_table, table_from_data_flat
7
5
  from dbt_common.events.contextvars import get_node_info
8
6
  from dbt_common.events.functions import fire_event
9
7
  from dbt_common.exceptions import DbtInternalError, NotImplementedError
@@ -22,6 +20,9 @@ from dbt.adapters.events.types import (
22
20
  SQLQueryStatus,
23
21
  )
24
22
 
23
+ if TYPE_CHECKING:
24
+ import agate
25
+
25
26
 
26
27
  class SQLConnectionManager(BaseConnectionManager):
27
28
  """The default connection manager with some common SQL methods implemented.
@@ -126,7 +127,9 @@ class SQLConnectionManager(BaseConnectionManager):
126
127
  return [dict(zip(column_names, row)) for row in rows]
127
128
 
128
129
  @classmethod
129
- def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table:
130
+ def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> "agate.Table":
131
+ from dbt_common.clients.agate_helper import table_from_data_flat
132
+
130
133
  data: List[Any] = []
131
134
  column_names: List[str] = []
132
135
 
@@ -146,7 +149,9 @@ class SQLConnectionManager(BaseConnectionManager):
146
149
  auto_begin: bool = False,
147
150
  fetch: bool = False,
148
151
  limit: Optional[int] = None,
149
- ) -> Tuple[AdapterResponse, agate.Table]:
152
+ ) -> Tuple[AdapterResponse, "agate.Table"]:
153
+ from dbt_common.clients.agate_helper import empty_table
154
+
150
155
  sql = self._add_query_comment(sql)
151
156
  _, cursor = self.add_query(sql, auto_begin)
152
157
  response = self.get_response(cursor)
dbt/adapters/sql/impl.py CHANGED
@@ -1,6 +1,5 @@
1
- from typing import Any, List, Optional, Tuple, Type
1
+ from typing import Any, List, Optional, Tuple, Type, TYPE_CHECKING
2
2
 
3
- import agate
4
3
  from dbt_common.events.functions import fire_event
5
4
 
6
5
  from dbt.adapters.base import BaseAdapter, BaseRelation, available
@@ -23,6 +22,9 @@ DROP_RELATION_MACRO_NAME = "drop_relation"
23
22
  ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type"
24
23
  VALIDATE_SQL_MACRO_NAME = "validate_sql"
25
24
 
25
+ if TYPE_CHECKING:
26
+ import agate
27
+
26
28
 
27
29
  class SQLAdapter(BaseAdapter):
28
30
  """The default adapter with the common agate conversions and some SQL
@@ -65,33 +67,35 @@ class SQLAdapter(BaseAdapter):
65
67
  return self.connections.add_query(sql, auto_begin, bindings, abridge_sql_log)
66
68
 
67
69
  @classmethod
68
- def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
70
+ def convert_text_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
69
71
  return "text"
70
72
 
71
73
  @classmethod
72
- def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
74
+ def convert_number_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
75
+ import agate
76
+
73
77
  # TODO CT-211
74
78
  decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore[attr-defined]
75
79
  return "float8" if decimals else "integer"
76
80
 
77
81
  @classmethod
78
- def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
82
+ def convert_integer_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
79
83
  return "integer"
80
84
 
81
85
  @classmethod
82
- def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
86
+ def convert_boolean_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
83
87
  return "boolean"
84
88
 
85
89
  @classmethod
86
- def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
90
+ def convert_datetime_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
87
91
  return "timestamp without time zone"
88
92
 
89
93
  @classmethod
90
- def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
94
+ def convert_date_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
91
95
  return "date"
92
96
 
93
97
  @classmethod
94
- def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
98
+ def convert_time_type(cls, agate_table: "agate.Table", col_idx: int) -> str:
95
99
  return "time"
96
100
 
97
101
  @classmethod
dbt/include/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  from pkgutil import extend_path
2
2
 
3
- __path__ = extend_path(__path__, __name__)
3
+ __path__ = extend_path(__path__, __name__)
@@ -22,17 +22,17 @@
22
22
 
23
23
  {% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}
24
24
  -- Build actual result given inputs
25
- with dbt_internal_unit_test_actual AS (
25
+ with dbt_internal_unit_test_actual as (
26
26
  select
27
- {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal("actual") }} as actual_or_expected
27
+ {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal("actual") }} as {{ adapter.quote("actual_or_expected") }}
28
28
  from (
29
29
  {{ main_sql }}
30
30
  ) _dbt_internal_unit_test_actual
31
31
  ),
32
32
  -- Build expected result
33
- dbt_internal_unit_test_expected AS (
33
+ dbt_internal_unit_test_expected as (
34
34
  select
35
- {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal("expected") }} as actual_or_expected
35
+ {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal("expected") }} as {{ adapter.quote("actual_or_expected") }}
36
36
  from (
37
37
  {{ expected_fixture_sql }}
38
38
  ) _dbt_internal_unit_test_expected
@@ -11,7 +11,7 @@
11
11
  {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}
12
12
  {%- set column_name_to_data_types = {} -%}
13
13
  {%- for column in columns_in_relation -%}
14
- {%- do column_name_to_data_types.update({column.name: column.dtype}) -%}
14
+ {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}
15
15
  {%- endfor -%}
16
16
 
17
17
  {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %}
@@ -5,7 +5,7 @@ actually executes the drop, and `get_drop_sql`, which returns the template.
5
5
  */ #}
6
6
 
7
7
  {% macro drop_materialized_view(relation) -%}
8
- {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}
8
+ {{- adapter.dispatch('drop_materialized_view', 'dbt')(relation) -}}
9
9
  {%- endmacro %}
10
10
 
11
11
 
@@ -5,7 +5,7 @@ actually executes the drop, and `get_drop_sql`, which returns the template.
5
5
  */ #}
6
6
 
7
7
  {% macro drop_table(relation) -%}
8
- {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}
8
+ {{- adapter.dispatch('drop_table', 'dbt')(relation) -}}
9
9
  {%- endmacro %}
10
10
 
11
11
 
@@ -5,7 +5,7 @@ actually executes the drop, and `get_drop_sql`, which returns the template.
5
5
  */ #}
6
6
 
7
7
  {% macro drop_view(relation) -%}
8
- {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}
8
+ {{- adapter.dispatch('drop_view', 'dbt')(relation) -}}
9
9
  {%- endmacro %}
10
10
 
11
11
 
@@ -3,10 +3,14 @@
3
3
  {% set default_row = {} %}
4
4
 
5
5
  {%- if not column_name_to_data_types -%}
6
- {%- set columns_in_relation = adapter.get_columns_in_relation(this) -%}
6
+ {#-- Use defer_relation IFF it is available in the manifest and 'this' is missing from the database --#}
7
+ {%- set this_or_defer_relation = defer_relation if (defer_relation and not load_relation(this)) else this -%}
8
+ {%- set columns_in_relation = adapter.get_columns_in_relation(this_or_defer_relation) -%}
9
+
7
10
  {%- set column_name_to_data_types = {} -%}
8
11
  {%- for column in columns_in_relation -%}
9
- {%- do column_name_to_data_types.update({column.name: column.dtype}) -%}
12
+ {#-- This needs to be a case-insensitive comparison --#}
13
+ {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}
10
14
  {%- endfor -%}
11
15
  {%- endif -%}
12
16
 
@@ -18,12 +22,13 @@
18
22
  {%- do default_row.update({column_name: (safe_cast("null", column_type) | trim )}) -%}
19
23
  {%- endfor -%}
20
24
 
25
+
21
26
  {%- for row in rows -%}
22
- {%- do format_row(row, column_name_to_data_types) -%}
27
+ {%- set formatted_row = format_row(row, column_name_to_data_types) -%}
23
28
  {%- set default_row_copy = default_row.copy() -%}
24
- {%- do default_row_copy.update(row) -%}
29
+ {%- do default_row_copy.update(formatted_row) -%}
25
30
  select
26
- {%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}
31
+ {%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}
27
32
  {%- endfor %}
28
33
  {%- if not loop.last %}
29
34
  union all
@@ -32,7 +37,7 @@ union all
32
37
 
33
38
  {%- if (rows | length) == 0 -%}
34
39
  select
35
- {%- for column_name, column_value in default_row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%},{%- endif %}
40
+ {%- for column_name, column_value in default_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%},{%- endif %}
36
41
  {%- endfor %}
37
42
  limit 0
38
43
  {%- endif -%}
@@ -42,13 +47,13 @@ union all
42
47
  {% macro get_expected_sql(rows, column_name_to_data_types) %}
43
48
 
44
49
  {%- if (rows | length) == 0 -%}
45
- select * FROM dbt_internal_unit_test_actual
50
+ select * from dbt_internal_unit_test_actual
46
51
  limit 0
47
52
  {%- else -%}
48
53
  {%- for row in rows -%}
49
- {%- do format_row(row, column_name_to_data_types) -%}
54
+ {%- set formatted_row = format_row(row, column_name_to_data_types) -%}
50
55
  select
51
- {%- for column_name, column_value in row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}
56
+ {%- for column_name, column_value in formatted_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}
52
57
  {%- endfor %}
53
58
  {%- if not loop.last %}
54
59
  union all
@@ -59,18 +64,32 @@ union all
59
64
  {% endmacro %}
60
65
 
61
66
  {%- macro format_row(row, column_name_to_data_types) -%}
67
+ {#-- generate case-insensitive formatted row --#}
68
+ {% set formatted_row = {} %}
69
+ {%- for column_name, column_value in row.items() -%}
70
+ {% set column_name = column_name|lower %}
62
71
 
63
- {#-- wrap yaml strings in quotes, apply cast --#}
64
- {%- for column_name, column_value in row.items() -%}
65
- {% set row_update = {column_name: column_value} %}
66
- {%- if column_value is string -%}
67
- {%- set row_update = {column_name: safe_cast(dbt.string_literal(column_value), column_name_to_data_types[column_name]) } -%}
68
- {%- elif column_value is none -%}
69
- {%- set row_update = {column_name: safe_cast('null', column_name_to_data_types[column_name]) } -%}
70
- {%- else -%}
71
- {%- set row_update = {column_name: safe_cast(column_value, column_name_to_data_types[column_name]) } -%}
72
- {%- endif -%}
73
- {%- do row.update(row_update) -%}
74
- {%- endfor -%}
72
+ {%- if column_name not in column_name_to_data_types %}
73
+ {#-- if user-provided row contains column name that relation does not contain, raise an error --#}
74
+ {% set fixture_name = "expected output" if model.resource_type == 'unit_test' else ("'" ~ model.name ~ "'") %}
75
+ {{ exceptions.raise_compiler_error(
76
+ "Invalid column name: '" ~ column_name ~ "' in unit test fixture for " ~ fixture_name ~ "."
77
+ "\nAccepted columns for " ~ fixture_name ~ " are: " ~ (column_name_to_data_types.keys()|list)
78
+ ) }}
79
+ {%- endif -%}
80
+
81
+ {%- set column_type = column_name_to_data_types[column_name] %}
82
+
83
+ {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#}
84
+ {%- set column_value_clean = column_value -%}
85
+ {%- if column_value is string -%}
86
+ {%- set column_value_clean = dbt.string_literal(dbt.escape_single_quotes(column_value)) -%}
87
+ {%- elif column_value is none -%}
88
+ {%- set column_value_clean = 'null' -%}
89
+ {%- endif -%}
75
90
 
91
+ {%- set row_update = {column_name: safe_cast(column_value_clean, column_type) } -%}
92
+ {%- do formatted_row.update(row_update) -%}
93
+ {%- endfor -%}
94
+ {{ return(formatted_row) }}
76
95
  {%- endmacro -%}
@@ -0,0 +1,7 @@
1
+ {% macro cast(field, type) %}
2
+ {{ return(adapter.dispatch('cast', 'dbt') (field, type)) }}
3
+ {% endmacro %}
4
+
5
+ {% macro default__cast(field, type) %}
6
+ cast({{field}} as {{type}})
7
+ {% endmacro %}
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: dbt-adapters
3
- Version: 0.1.0a5
3
+ Version: 1.0.0
4
4
  Summary: The set of adapter protocols and base functionality that supports integration with dbt-core
5
5
  Project-URL: Homepage, https://github.com/dbt-labs/dbt-adapters
6
6
  Project-URL: Documentation, https://docs.getdbt.com
@@ -21,29 +21,12 @@ Classifier: Programming Language :: Python :: 3.9
21
21
  Classifier: Programming Language :: Python :: 3.10
22
22
  Classifier: Programming Language :: Python :: 3.11
23
23
  Requires-Python: >=3.8.0
24
- Requires-Dist: agate<2.0
25
- Requires-Dist: dbt-common<1.0
26
- Requires-Dist: mashumaro[msgpack]<4.0
27
- Requires-Dist: protobuf<5.0
24
+ Requires-Dist: agate<2.0,>=1.0
25
+ Requires-Dist: dbt-common<2.0
26
+ Requires-Dist: mashumaro[msgpack]<4.0,>=3.0
27
+ Requires-Dist: protobuf<5.0,>=3.0
28
28
  Requires-Dist: pytz>=2015.7
29
- Requires-Dist: typing-extensions<5.0
30
- Provides-Extra: build
31
- Requires-Dist: check-wheel-contents; extra == 'build'
32
- Requires-Dist: twine; extra == 'build'
33
- Requires-Dist: wheel; extra == 'build'
34
- Provides-Extra: lint
35
- Requires-Dist: black; extra == 'lint'
36
- Requires-Dist: flake8; extra == 'lint'
37
- Requires-Dist: flake8-pyproject; extra == 'lint'
38
- Provides-Extra: test
39
- Requires-Dist: pytest; extra == 'test'
40
- Requires-Dist: pytest-dotenv; extra == 'test'
41
- Requires-Dist: pytest-xdist; extra == 'test'
42
- Provides-Extra: typecheck
43
- Requires-Dist: mypy; extra == 'typecheck'
44
- Requires-Dist: types-protobuf; extra == 'typecheck'
45
- Requires-Dist: types-pytz; extra == 'typecheck'
46
- Requires-Dist: types-pyyaml; extra == 'typecheck'
29
+ Requires-Dist: typing-extensions<5.0,>=4.0
47
30
  Description-Content-Type: text/markdown
48
31
 
49
32
  <p align="center">
@@ -1,28 +1,28 @@
1
1
  dbt/__init__.py,sha256=iY4jdvOxcDhkdr5FiyOTZPHadKtMZDQ-qC6Fw6_EHPM,277
2
- dbt/adapters/__about__.py,sha256=oK81lI4KRkN8gJdIrdjuVkNUAW-U8XhHa6Ad_Ug4oqA,20
2
+ dbt/adapters/__about__.py,sha256=4se2-QRIPsQy0Qla6DZQFQ90RVImEWlZaaA4AT2r29U,18
3
3
  dbt/adapters/__init__.py,sha256=5Cy35DPhUOt8EdFO-2dplHygsmUVONNwrCtGfUPApQA,251
4
4
  dbt/adapters/cache.py,sha256=WGy4ewnz-J13LverTACBW2iFhGswrWLgm-wiBrQnMzo,20084
5
5
  dbt/adapters/capability.py,sha256=MZgzXipXQt12lYSTqa_lF9RBbEN8cDz_YIG7iBvC9W4,1952
6
- dbt/adapters/factory.py,sha256=S8CShdkpTfClnvmzuIriKU19wewROWRYtoaIKhfwOVA,9043
7
- dbt/adapters/protocol.py,sha256=XdYLQE-KCLnhA3AwS0vgjqkTLAa-e3rzaD5FXXJleU0,3982
6
+ dbt/adapters/factory.py,sha256=gKguDR9ArsPLA-LZdFdpal0zOTJaQcEVnMD0OslkQ5M,9021
7
+ dbt/adapters/protocol.py,sha256=il8qcg78mXc3kHyVO0FPFwTkrq_cjQO69BuR7KGBU3I,4026
8
8
  dbt/adapters/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
9
9
  dbt/adapters/reference_keys.py,sha256=lRN3gPdQD6Qciy-BAGx_rz3CFlbS7zMSZ43pZ_9ondE,1046
10
10
  dbt/adapters/utils.py,sha256=OtakbxPgxwrxN5Yd2vAO-cvLETSgzBwMWebhgegAVyA,2414
11
11
  dbt/adapters/base/README.md,sha256=muHQntC07Lh6L1XfVgwKhV5RltOPBLYPdQqd8_7l34c,516
12
12
  dbt/adapters/base/__init__.py,sha256=KGGGbj8jGMjAFJdQ5YHcOpApMMVZ_6Xuni1swhpkqRY,423
13
13
  dbt/adapters/base/column.py,sha256=M3iotEY5yi4xikXyXzD9oshBF9-xcJrIeQVu1sB85DI,5450
14
- dbt/adapters/base/connections.py,sha256=8Yxd3yIWDAVf-WJBz4TAJfNRTL29uzotJpmb_9zCcdU,16907
15
- dbt/adapters/base/impl.py,sha256=NFZ8jdvUelhebHexM1fhTwmK8Tj4sLEitRB8uSwVxl0,63591
14
+ dbt/adapters/base/connections.py,sha256=Poll7ofPUw16MafuzhZbSZ4DIVbiUC5H69OB5_Surs4,16921
15
+ dbt/adapters/base/impl.py,sha256=DC0Up4ch0aj3eBVS1vVC3kIdudWjgwBJExhF9Z9t-BU,64403
16
16
  dbt/adapters/base/meta.py,sha256=MMqL2xBqdvoacNs9JcL0E38NZIhCP4RH4OD_z_jo7GQ,4644
17
17
  dbt/adapters/base/plugin.py,sha256=rm0GjNHnWM2mn0GJOjciZLwn-02xlzWCoMT9u-epwP0,1076
18
18
  dbt/adapters/base/query_headers.py,sha256=UluGd9IYCYkoMiDi5Yx_lnrCOSjWppjwRro4SIGgx8I,3496
19
- dbt/adapters/base/relation.py,sha256=QK7fLndm5Y2KMtLTJNQYVhLbweXL_P7k7YqjX87dlnY,15582
19
+ dbt/adapters/base/relation.py,sha256=vCylzcBRi3aW-vp-731pmsCTQJoSTXxjR-lS9DPjxHo,15642
20
20
  dbt/adapters/clients/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
21
  dbt/adapters/clients/jinja.py,sha256=NsZOiBpOLunS46hRL5OcX1MpY3Ih6_87Vgz4qd_PNbc,768
22
22
  dbt/adapters/contracts/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  dbt/adapters/contracts/connection.py,sha256=F1Lw75trgYUX_FZOYzYguJgOzO8TPT4qnwP3gzm2aeY,6860
24
24
  dbt/adapters/contracts/macros.py,sha256=NYVDi5ww7v4ksKBwF836TXE-2xU4IBaUINqvxMY-ieU,366
25
- dbt/adapters/contracts/relation.py,sha256=wDO3lbgiDvbqg80UwQSpwi9RJVj1DnmoLs19kKpGyLk,3778
25
+ dbt/adapters/contracts/relation.py,sha256=bow0ijWEMlcNpsd7HmHQlDNJ1kB8NpAs_2i5LCBK-P0,4652
26
26
  dbt/adapters/events/README.md,sha256=5Vd6xD9HlwYMknUeZVtBa06tiM2Lvn6abiYvjdIV9wc,3522
27
27
  dbt/adapters/events/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  dbt/adapters/events/adapter_types.proto,sha256=YKawfGW-2OscpEIMoH3wNrdAUUN4U7p3d1tOoK0Jg-M,9484
@@ -38,13 +38,13 @@ dbt/adapters/exceptions/connection.py,sha256=x82j2Ix242Slm6Ima8Tol3GLOB9yZYH5lq6
38
38
  dbt/adapters/exceptions/database.py,sha256=nIXJdQyPQOZaiKvCkQ3MoKlKOiaN58rtDZflw3CSkug,1618
39
39
  dbt/adapters/relation_configs/README.md,sha256=BIRqn7gUwIHzDlFueD2eq6HVAjd6gN7vGTU3hG4looo,1810
40
40
  dbt/adapters/relation_configs/__init__.py,sha256=Il1HHEI8HJGHEi2B8qsgv_CoNA2STO7SULDi78fQwZg,354
41
- dbt/adapters/relation_configs/config_base.py,sha256=v4uZtdpB9NTJc0qpoLx5Jf5DSy0yOSVlpLA5p0PZozU,1714
41
+ dbt/adapters/relation_configs/config_base.py,sha256=IK9oKf9TuOTLIiKX8ms_X-p4yxZvPAlM7qg94mozvrA,1756
42
42
  dbt/adapters/relation_configs/config_change.py,sha256=0yF-wRmXgxydDXCErK2waz5eY2aknLs5dVBLw2u4CCo,697
43
43
  dbt/adapters/relation_configs/config_validation.py,sha256=wlJUMwOEPhYFch-LRtEWfLNJMq8jL1tRhOUHmNX8nFw,1978
44
44
  dbt/adapters/sql/__init__.py,sha256=WLWZJfqc8pr1N1BMVe9gM-KQ4URJIeKfLqTuJBD1VN0,107
45
- dbt/adapters/sql/connections.py,sha256=ryCjRu2s48P_dk_GrTvMTGizfBlKQ5fByJD7ThwkwkU,6462
46
- dbt/adapters/sql/impl.py,sha256=MCSv0vvgWzmxVGeQJBU27IBuV86OUBwGrwqEm5LUS5I,10648
47
- dbt/include/__init__.py,sha256=dByFIWyaplDqvXUB4hdVAqSR5A9VLc6wQAYlBeQRw2Q,75
45
+ dbt/adapters/sql/connections.py,sha256=aRZGkiMYwfuA7hR08LOVDzDLbBQP05KUVIHOCuU0KOU,6565
46
+ dbt/adapters/sql/impl.py,sha256=DNhNcv7kMrPYn2ZviRbj23z77_SUFBbqd4HYmRJ7tGk,10722
47
+ dbt/include/__init__.py,sha256=qEFeq3yuf3lQKVseALmL8aPM8fpCS54B_5pry00M3hk,76
48
48
  dbt/include/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  dbt/include/global_project/__init__.py,sha256=-0HL5OkeJSrxglm1Y-UltTiBPY2BbWx8ZpTiJ7ypSvw,73
50
50
  dbt/include/global_project/dbt_project.yml,sha256=RTtOhnBpEL0gbd1GlpxuVr6eZJBPvgWfNw-F88sKQ-w,109
@@ -89,9 +89,9 @@ dbt/include/global_project/macros/materializations/snapshots/helpers.sql,sha256=
89
89
  dbt/include/global_project/macros/materializations/snapshots/snapshot.sql,sha256=q-Uaz9B2070fpruz5HEJiCqPUJNgXl7dsM5a0_v0fkg,3680
90
90
  dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql,sha256=Ik3OyDqqkt0z4lrNUvMKYVmZxqAdtaN1FvtMgg0VF6g,849
91
91
  dbt/include/global_project/macros/materializations/snapshots/strategies.sql,sha256=ahWDMnD-Q_fTGKSjvm5ZwvypmNC6BDVguk-LNk-nHhU,6286
92
- dbt/include/global_project/macros/materializations/tests/helpers.sql,sha256=gzcZjwCAwBAHnJW7Ld0ESVmVtQZH4M4lLLWr1fv91FE,1684
92
+ dbt/include/global_project/macros/materializations/tests/helpers.sql,sha256=tuiLwdkruULYzFJ8vSJTg5hG89VYh-iCCP3NzJw1KbM,1730
93
93
  dbt/include/global_project/macros/materializations/tests/test.sql,sha256=Rz3O_3dWHlIofG3d2CwsP2bXFimRZUIwOevyB0iz1J4,1831
94
- dbt/include/global_project/macros/materializations/tests/unit.sql,sha256=Y_48B4ExQb35DU4_jxXVaHteDUf3eew3b1xcesJX7Q0,1102
94
+ dbt/include/global_project/macros/materializations/tests/unit.sql,sha256=pSfK8BsG5NAqNtqavC5gM1asFRbBkm6U7-sJPTZPa14,1112
95
95
  dbt/include/global_project/macros/materializations/tests/where_subquery.sql,sha256=xjuYd18tXo99OReJGQsfgEPYljUUyF00XzK4h0SJjdM,497
96
96
  dbt/include/global_project/macros/python_model/python.sql,sha256=OKy-IwnklJPXfjXinVwlS9_enmXKQWoibnLOZRNR28U,3466
97
97
  dbt/include/global_project/macros/relations/create.sql,sha256=99LLak1bhlhRw7yiI0c_4CKPlGyzqPBeBYBNeBPSmDo,701
@@ -106,24 +106,25 @@ dbt/include/global_project/macros/relations/schema.sql,sha256=kOQeEZQwycGGtAoq_K
106
106
  dbt/include/global_project/macros/relations/column/columns_spec_ddl.sql,sha256=ukW4iLuAXYfsrnlfeY26cFMMFxATcNV8hlp9valOx8U,3676
107
107
  dbt/include/global_project/macros/relations/materialized_view/alter.sql,sha256=pZcZa1xfcZZpVVSvvJ3YR0zn6noIKBfkTSbrqKohAcU,1806
108
108
  dbt/include/global_project/macros/relations/materialized_view/create.sql,sha256=C8BpyEhxETU3N46I4WNoCwB0fTb3aOhJj6EfTkNoTd0,400
109
- dbt/include/global_project/macros/relations/materialized_view/drop.sql,sha256=TjcfI9J76M98koSo4JcCRuNJfu8jx4i1oYOfxgsP7ZI,546
109
+ dbt/include/global_project/macros/relations/materialized_view/drop.sql,sha256=tYCwNlcoH8RmqkIumioO7nmhWWAVcwDIfly5Z7PGh3g,540
110
110
  dbt/include/global_project/macros/relations/materialized_view/refresh.sql,sha256=tTHxhzHp8mLziJgzTm7nkooC5-fAlsClOf_9-kMR794,380
111
111
  dbt/include/global_project/macros/relations/materialized_view/rename.sql,sha256=E1IQoaocaV2bt-vAYwBwAevSRaSsRisCZBY-l9dk_-Y,400
112
112
  dbt/include/global_project/macros/relations/materialized_view/replace.sql,sha256=WxbFchYzHVulEhdPa0crFoID3GR17Hw_mFdcyGKs7f0,389
113
113
  dbt/include/global_project/macros/relations/table/create.sql,sha256=-IKYB4GC1_YmGIiJ-AKUkfvq8UCvd5Exe91ojt0r41o,2140
114
- dbt/include/global_project/macros/relations/table/drop.sql,sha256=yegi7DCqksrdM4lJMq1NY4_t3y9UvrdQWQ123z85zj0,498
114
+ dbt/include/global_project/macros/relations/table/drop.sql,sha256=SvXZX3OLNB8oGYmfZ83eHsIWiOLyyuS9Po6Dr6g4oa8,492
115
115
  dbt/include/global_project/macros/relations/table/rename.sql,sha256=GMmz83Sius6Y3fPdlnjMYXrVRDP8INO6tLAn3vfgzYI,352
116
116
  dbt/include/global_project/macros/relations/table/replace.sql,sha256=xlTB2pI_fEkAPJdmbrirSrnzvFYTPtz9ROMCMxANFCo,341
117
117
  dbt/include/global_project/macros/relations/view/create.sql,sha256=FkLYXnCPj2HLCbtN47KR45L6hFxqPiBLcSPCfs0v2YU,839
118
- dbt/include/global_project/macros/relations/view/drop.sql,sha256=bZt-JH7oYWpAo0ezVEIleCePiY-g5mhKT2jCVo0iYpk,494
118
+ dbt/include/global_project/macros/relations/view/drop.sql,sha256=WszUTZrkd93_OCEha4OuRWyCucqxGRTm07Zvn25RHXs,488
119
119
  dbt/include/global_project/macros/relations/view/rename.sql,sha256=P4hpxlrR0wiBTZFJ8N3GyUUbqgKgMfgzUUbIWw8fui0,348
120
120
  dbt/include/global_project/macros/relations/view/replace.sql,sha256=5_Lky7KUixyYOOOahooD0VnmHOiOVqmxrI0ihwRjX08,2584
121
- dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql,sha256=BXXjimZ8PuBQBkkwBGUmUwcOaJ2xyO4DrNExzgPAOGg,2640
121
+ dbt/include/global_project/macros/unit_test_sql/get_fixture_sql.sql,sha256=Woe20TPF54pd6RN6C1wi3BxNQ8wo1sS2JZtdWoEtuCk,3841
122
122
  dbt/include/global_project/macros/utils/any_value.sql,sha256=leK-fCUhDNt6MFkGofafYjv-0LtL0fkb3sJXe-aIorU,213
123
123
  dbt/include/global_project/macros/utils/array_append.sql,sha256=XsC-kchlWxVwc-_1CoBs1RkGYt8qsOAVbq5JlsV2WIc,357
124
124
  dbt/include/global_project/macros/utils/array_concat.sql,sha256=0c4w5kP1N_9BY-wppx1OBCCIDOsC1HhimkSDghjjx2Y,248
125
125
  dbt/include/global_project/macros/utils/array_construct.sql,sha256=BIA7tiLvXDzTvnuRq_F2VIjkotV8hxUOq1tD4reWSUU,461
126
126
  dbt/include/global_project/macros/utils/bool_or.sql,sha256=oin0FkG9cbS81eRIHwKYHkldG15HWBVkkFwVBcCAXkc,205
127
+ dbt/include/global_project/macros/utils/cast.sql,sha256=YQRXZEIvrXDDs4sqvFCJk7a9xniugjRG5YWsAid1FyI,194
127
128
  dbt/include/global_project/macros/utils/cast_bool_to_text.sql,sha256=fOIW7AM7_BJIHU5GnhwMYGghh8mvkc27_sqdW0rdszQ,242
128
129
  dbt/include/global_project/macros/utils/concat.sql,sha256=qHrVhra5QSwBskYYCpaeJvsCAIFZ_eyeF4h3kgjs8B0,186
129
130
  dbt/include/global_project/macros/utils/data_types.sql,sha256=Rw6xhK02NB9TlKolqyGcUGoWgHBkKPpJ1Xh3FOBxrMc,4416
@@ -146,7 +147,7 @@ dbt/include/global_project/macros/utils/right.sql,sha256=EwNG98CAFIwNDmarwopf7Rk
146
147
  dbt/include/global_project/macros/utils/safe_cast.sql,sha256=1mswwkDACmIi1I99JKb_-vq3kjMe4HhMRV70mW8Bt4Y,298
147
148
  dbt/include/global_project/macros/utils/split_part.sql,sha256=fXEIS0oIiYR7-4lYbb0QbZdG-q2TpV63AFd1ky4I5UM,714
148
149
  dbt/include/global_project/tests/generic/builtin.sql,sha256=p94xdyPwb2TlxgLBqCfrcRfJ1QNgsjPvBm8f0Q5eqZM,1022
149
- dbt_adapters-0.1.0a5.dist-info/METADATA,sha256=AfsRBbcK286nDHgccbSVvOOh_H8K07qHiWMccr7oqe0,3130
150
- dbt_adapters-0.1.0a5.dist-info/WHEEL,sha256=TJPnKdtrSue7xZ_AVGkp9YXcvDrobsjBds1du3Nx6dc,87
151
- dbt_adapters-0.1.0a5.dist-info/licenses/LICENSE,sha256=9yjigiJhWcCZvQjdagGKDwrRph58QWc5P2bVSQwXo6s,11344
152
- dbt_adapters-0.1.0a5.dist-info/RECORD,,
150
+ dbt_adapters-1.0.0.dist-info/METADATA,sha256=TgGiyYlR0VQnAINZYDSGhocfHoVYf5ff98U7nU277f8,2482
151
+ dbt_adapters-1.0.0.dist-info/WHEEL,sha256=uNdcs2TADwSd5pVaP0Z_kcjcvvTUklh2S7bxZMF8Uj0,87
152
+ dbt_adapters-1.0.0.dist-info/licenses/LICENSE,sha256=9yjigiJhWcCZvQjdagGKDwrRph58QWc5P2bVSQwXo6s,11344
153
+ dbt_adapters-1.0.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.21.1
2
+ Generator: hatchling 1.22.4
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any