dbt-adapters 0.1.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbt-adapters might be problematic. Click here for more details.

Files changed (147) hide show
  1. dbt/__init__.py +0 -0
  2. dbt/adapters/__about__.py +1 -0
  3. dbt/adapters/__init__.py +7 -0
  4. dbt/adapters/base/README.md +13 -0
  5. dbt/adapters/base/__init__.py +15 -0
  6. dbt/adapters/base/column.py +166 -0
  7. dbt/adapters/base/connections.py +426 -0
  8. dbt/adapters/base/impl.py +1654 -0
  9. dbt/adapters/base/meta.py +131 -0
  10. dbt/adapters/base/plugin.py +32 -0
  11. dbt/adapters/base/query_headers.py +101 -0
  12. dbt/adapters/base/relation.py +471 -0
  13. dbt/adapters/cache.py +521 -0
  14. dbt/adapters/capability.py +52 -0
  15. dbt/adapters/clients/__init__.py +0 -0
  16. dbt/adapters/clients/jinja.py +24 -0
  17. dbt/adapters/contracts/__init__.py +0 -0
  18. dbt/adapters/contracts/connection.py +228 -0
  19. dbt/adapters/contracts/macros.py +11 -0
  20. dbt/adapters/contracts/relation.py +125 -0
  21. dbt/adapters/events/README.md +57 -0
  22. dbt/adapters/events/__init__.py +0 -0
  23. dbt/adapters/events/adapter_types.proto +517 -0
  24. dbt/adapters/events/adapter_types_pb2.py +208 -0
  25. dbt/adapters/events/base_types.py +40 -0
  26. dbt/adapters/events/logging.py +83 -0
  27. dbt/adapters/events/types.py +423 -0
  28. dbt/adapters/exceptions/__init__.py +40 -0
  29. dbt/adapters/exceptions/alias.py +24 -0
  30. dbt/adapters/exceptions/cache.py +68 -0
  31. dbt/adapters/exceptions/compilation.py +255 -0
  32. dbt/adapters/exceptions/connection.py +16 -0
  33. dbt/adapters/exceptions/database.py +51 -0
  34. dbt/adapters/factory.py +246 -0
  35. dbt/adapters/protocol.py +173 -0
  36. dbt/adapters/reference_keys.py +39 -0
  37. dbt/adapters/relation_configs/README.md +25 -0
  38. dbt/adapters/relation_configs/__init__.py +12 -0
  39. dbt/adapters/relation_configs/config_base.py +44 -0
  40. dbt/adapters/relation_configs/config_change.py +24 -0
  41. dbt/adapters/relation_configs/config_validation.py +57 -0
  42. dbt/adapters/sql/__init__.py +2 -0
  43. dbt/adapters/sql/connections.py +195 -0
  44. dbt/adapters/sql/impl.py +273 -0
  45. dbt/adapters/utils.py +69 -0
  46. dbt/include/global_project/__init__.py +4 -0
  47. dbt/include/global_project/dbt_project.yml +7 -0
  48. dbt/include/global_project/docs/overview.md +43 -0
  49. dbt/include/global_project/macros/adapters/apply_grants.sql +167 -0
  50. dbt/include/global_project/macros/adapters/columns.sql +137 -0
  51. dbt/include/global_project/macros/adapters/freshness.sql +16 -0
  52. dbt/include/global_project/macros/adapters/indexes.sql +41 -0
  53. dbt/include/global_project/macros/adapters/metadata.sql +96 -0
  54. dbt/include/global_project/macros/adapters/persist_docs.sql +33 -0
  55. dbt/include/global_project/macros/adapters/relation.sql +79 -0
  56. dbt/include/global_project/macros/adapters/schema.sql +20 -0
  57. dbt/include/global_project/macros/adapters/show.sql +22 -0
  58. dbt/include/global_project/macros/adapters/timestamps.sql +44 -0
  59. dbt/include/global_project/macros/adapters/validate_sql.sql +10 -0
  60. dbt/include/global_project/macros/etc/datetime.sql +62 -0
  61. dbt/include/global_project/macros/etc/statement.sql +52 -0
  62. dbt/include/global_project/macros/generic_test_sql/accepted_values.sql +27 -0
  63. dbt/include/global_project/macros/generic_test_sql/not_null.sql +9 -0
  64. dbt/include/global_project/macros/generic_test_sql/relationships.sql +23 -0
  65. dbt/include/global_project/macros/generic_test_sql/unique.sql +12 -0
  66. dbt/include/global_project/macros/get_custom_name/get_custom_alias.sql +36 -0
  67. dbt/include/global_project/macros/get_custom_name/get_custom_database.sql +32 -0
  68. dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql +60 -0
  69. dbt/include/global_project/macros/materializations/configs.sql +21 -0
  70. dbt/include/global_project/macros/materializations/hooks.sql +35 -0
  71. dbt/include/global_project/macros/materializations/models/clone/can_clone_table.sql +7 -0
  72. dbt/include/global_project/macros/materializations/models/clone/clone.sql +67 -0
  73. dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql +7 -0
  74. dbt/include/global_project/macros/materializations/models/incremental/column_helpers.sql +80 -0
  75. dbt/include/global_project/macros/materializations/models/incremental/incremental.sql +92 -0
  76. dbt/include/global_project/macros/materializations/models/incremental/is_incremental.sql +13 -0
  77. dbt/include/global_project/macros/materializations/models/incremental/merge.sql +131 -0
  78. dbt/include/global_project/macros/materializations/models/incremental/on_schema_change.sql +144 -0
  79. dbt/include/global_project/macros/materializations/models/incremental/strategies.sql +79 -0
  80. dbt/include/global_project/macros/materializations/models/materialized_view.sql +121 -0
  81. dbt/include/global_project/macros/materializations/models/table.sql +64 -0
  82. dbt/include/global_project/macros/materializations/models/view.sql +72 -0
  83. dbt/include/global_project/macros/materializations/seeds/helpers.sql +128 -0
  84. dbt/include/global_project/macros/materializations/seeds/seed.sql +60 -0
  85. dbt/include/global_project/macros/materializations/snapshots/helpers.sql +181 -0
  86. dbt/include/global_project/macros/materializations/snapshots/snapshot.sql +99 -0
  87. dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql +25 -0
  88. dbt/include/global_project/macros/materializations/snapshots/strategies.sql +174 -0
  89. dbt/include/global_project/macros/materializations/tests/helpers.sql +14 -0
  90. dbt/include/global_project/macros/materializations/tests/test.sql +60 -0
  91. dbt/include/global_project/macros/materializations/tests/where_subquery.sql +15 -0
  92. dbt/include/global_project/macros/python_model/python.sql +103 -0
  93. dbt/include/global_project/macros/relations/column/columns_spec_ddl.sql +89 -0
  94. dbt/include/global_project/macros/relations/create.sql +23 -0
  95. dbt/include/global_project/macros/relations/create_backup.sql +17 -0
  96. dbt/include/global_project/macros/relations/create_intermediate.sql +17 -0
  97. dbt/include/global_project/macros/relations/drop.sql +41 -0
  98. dbt/include/global_project/macros/relations/drop_backup.sql +14 -0
  99. dbt/include/global_project/macros/relations/materialized_view/alter.sql +55 -0
  100. dbt/include/global_project/macros/relations/materialized_view/create.sql +10 -0
  101. dbt/include/global_project/macros/relations/materialized_view/drop.sql +14 -0
  102. dbt/include/global_project/macros/relations/materialized_view/refresh.sql +9 -0
  103. dbt/include/global_project/macros/relations/materialized_view/rename.sql +10 -0
  104. dbt/include/global_project/macros/relations/materialized_view/replace.sql +10 -0
  105. dbt/include/global_project/macros/relations/rename.sql +35 -0
  106. dbt/include/global_project/macros/relations/rename_intermediate.sql +14 -0
  107. dbt/include/global_project/macros/relations/replace.sql +50 -0
  108. dbt/include/global_project/macros/relations/schema.sql +8 -0
  109. dbt/include/global_project/macros/relations/table/create.sql +60 -0
  110. dbt/include/global_project/macros/relations/table/drop.sql +14 -0
  111. dbt/include/global_project/macros/relations/table/rename.sql +10 -0
  112. dbt/include/global_project/macros/relations/table/replace.sql +10 -0
  113. dbt/include/global_project/macros/relations/view/create.sql +27 -0
  114. dbt/include/global_project/macros/relations/view/drop.sql +14 -0
  115. dbt/include/global_project/macros/relations/view/rename.sql +10 -0
  116. dbt/include/global_project/macros/relations/view/replace.sql +66 -0
  117. dbt/include/global_project/macros/utils/any_value.sql +9 -0
  118. dbt/include/global_project/macros/utils/array_append.sql +8 -0
  119. dbt/include/global_project/macros/utils/array_concat.sql +7 -0
  120. dbt/include/global_project/macros/utils/array_construct.sql +12 -0
  121. dbt/include/global_project/macros/utils/bool_or.sql +9 -0
  122. dbt/include/global_project/macros/utils/cast_bool_to_text.sql +7 -0
  123. dbt/include/global_project/macros/utils/concat.sql +7 -0
  124. dbt/include/global_project/macros/utils/data_types.sql +129 -0
  125. dbt/include/global_project/macros/utils/date_spine.sql +75 -0
  126. dbt/include/global_project/macros/utils/date_trunc.sql +7 -0
  127. dbt/include/global_project/macros/utils/dateadd.sql +14 -0
  128. dbt/include/global_project/macros/utils/datediff.sql +14 -0
  129. dbt/include/global_project/macros/utils/escape_single_quotes.sql +8 -0
  130. dbt/include/global_project/macros/utils/except.sql +9 -0
  131. dbt/include/global_project/macros/utils/generate_series.sql +53 -0
  132. dbt/include/global_project/macros/utils/hash.sql +7 -0
  133. dbt/include/global_project/macros/utils/intersect.sql +9 -0
  134. dbt/include/global_project/macros/utils/last_day.sql +15 -0
  135. dbt/include/global_project/macros/utils/length.sql +11 -0
  136. dbt/include/global_project/macros/utils/listagg.sql +30 -0
  137. dbt/include/global_project/macros/utils/literal.sql +7 -0
  138. dbt/include/global_project/macros/utils/position.sql +11 -0
  139. dbt/include/global_project/macros/utils/replace.sql +14 -0
  140. dbt/include/global_project/macros/utils/right.sql +12 -0
  141. dbt/include/global_project/macros/utils/safe_cast.sql +9 -0
  142. dbt/include/global_project/macros/utils/split_part.sql +26 -0
  143. dbt/include/global_project/tests/generic/builtin.sql +30 -0
  144. dbt_adapters-0.1.0a1.dist-info/METADATA +81 -0
  145. dbt_adapters-0.1.0a1.dist-info/RECORD +147 -0
  146. dbt_adapters-0.1.0a1.dist-info/WHEEL +4 -0
  147. dbt_adapters-0.1.0a1.dist-info/licenses/LICENSE +201 -0
@@ -0,0 +1,1654 @@
1
+ import abc
2
+ from concurrent.futures import as_completed, Future
3
+ from contextlib import contextmanager
4
+ from datetime import datetime
5
+ from enum import Enum
6
+ from multiprocessing.context import SpawnContext
7
+ import time
8
+ from typing import (
9
+ Any,
10
+ Callable,
11
+ Dict,
12
+ FrozenSet,
13
+ Iterable,
14
+ Iterator,
15
+ List,
16
+ Mapping,
17
+ Optional,
18
+ Set,
19
+ Tuple,
20
+ Type,
21
+ TypedDict,
22
+ Union,
23
+ )
24
+
25
+ import agate
26
+ from dbt_common.clients.agate_helper import (
27
+ Integer,
28
+ empty_table,
29
+ get_column_value_uncased,
30
+ merge_tables,
31
+ table_from_rows,
32
+ )
33
+ from dbt_common.clients.jinja import CallableMacroGenerator
34
+ from dbt_common.contracts.constraints import (
35
+ ColumnLevelConstraint,
36
+ ConstraintType,
37
+ ModelLevelConstraint,
38
+ )
39
+ from dbt_common.exceptions import (
40
+ DbtInternalError,
41
+ DbtRuntimeError,
42
+ DbtValidationError,
43
+ MacroArgTypeError,
44
+ MacroResultError,
45
+ NotImplementedError,
46
+ UnexpectedNullError,
47
+ )
48
+ from dbt_common.events.functions import fire_event, warn_or_error
49
+ from dbt_common.utils import (
50
+ AttrDict,
51
+ cast_to_str,
52
+ executor,
53
+ filter_null_values,
54
+ )
55
+ import pytz
56
+
57
+ from dbt.adapters.base.column import Column as BaseColumn
58
+ from dbt.adapters.base.connections import (
59
+ AdapterResponse,
60
+ BaseConnectionManager,
61
+ Connection,
62
+ )
63
+ from dbt.adapters.base.meta import AdapterMeta, available
64
+ from dbt.adapters.base.relation import (
65
+ BaseRelation,
66
+ ComponentName,
67
+ InformationSchema,
68
+ SchemaSearchMap,
69
+ )
70
+ from dbt.adapters.cache import RelationsCache, _make_ref_key_dict
71
+ from dbt.adapters.capability import Capability, CapabilityDict
72
+ from dbt.adapters.contracts.connection import Credentials
73
+ from dbt.adapters.contracts.macros import MacroResolverProtocol
74
+ from dbt.adapters.contracts.relation import RelationConfig
75
+ from dbt.adapters.events.types import (
76
+ CacheMiss,
77
+ CatalogGenerationError,
78
+ CodeExecution,
79
+ CodeExecutionStatus,
80
+ CollectFreshnessReturnSignature,
81
+ ConstraintNotEnforced,
82
+ ConstraintNotSupported,
83
+ ListRelations,
84
+ )
85
+ from dbt.adapters.exceptions import (
86
+ NullRelationCacheAttemptedError,
87
+ NullRelationDropAttemptedError,
88
+ QuoteConfigTypeError,
89
+ RelationReturnedMultipleResultsError,
90
+ RenameToNoneAttemptedError,
91
+ SnapshotTargetIncompleteError,
92
+ SnapshotTargetNotSnapshotTableError,
93
+ UnexpectedNonTimestampError,
94
+ )
95
+ from dbt.adapters.protocol import AdapterConfig, MacroContextGeneratorCallable
96
+
97
+
98
+ GET_CATALOG_MACRO_NAME = "get_catalog"
99
+ GET_CATALOG_RELATIONS_MACRO_NAME = "get_catalog_relations"
100
+ FRESHNESS_MACRO_NAME = "collect_freshness"
101
+ GET_RELATION_LAST_MODIFIED_MACRO_NAME = "get_relation_last_modified"
102
+
103
+
104
+ class ConstraintSupport(str, Enum):
105
+ ENFORCED = "enforced"
106
+ NOT_ENFORCED = "not_enforced"
107
+ NOT_SUPPORTED = "not_supported"
108
+
109
+
110
+ def _expect_row_value(key: str, row: agate.Row):
111
+ if key not in row.keys():
112
+ raise DbtInternalError(
113
+ 'Got a row without "{}" column, columns: {}'.format(key, row.keys())
114
+ )
115
+ return row[key]
116
+
117
+
118
+ def _catalog_filter_schemas(
119
+ used_schemas: FrozenSet[Tuple[str, str]]
120
+ ) -> Callable[[agate.Row], bool]:
121
+ """Return a function that takes a row and decides if the row should be
122
+ included in the catalog output.
123
+ """
124
+ schemas = frozenset((d.lower(), s.lower()) for d, s in used_schemas)
125
+
126
+ def test(row: agate.Row) -> bool:
127
+ table_database = _expect_row_value("table_database", row)
128
+ table_schema = _expect_row_value("table_schema", row)
129
+ # the schema may be present but None, which is not an error and should
130
+ # be filtered out
131
+ if table_schema is None:
132
+ return False
133
+ return (table_database.lower(), table_schema.lower()) in schemas
134
+
135
+ return test
136
+
137
+
138
+ def _utc(dt: Optional[datetime], source: Optional[BaseRelation], field_name: str) -> datetime:
139
+ """If dt has a timezone, return a new datetime that's in UTC. Otherwise,
140
+ assume the datetime is already for UTC and add the timezone.
141
+ """
142
+ if dt is None:
143
+ raise UnexpectedNullError(field_name, source)
144
+
145
+ elif not hasattr(dt, "tzinfo"):
146
+ raise UnexpectedNonTimestampError(field_name, source, dt)
147
+
148
+ elif dt.tzinfo:
149
+ return dt.astimezone(pytz.UTC)
150
+ else:
151
+ return dt.replace(tzinfo=pytz.UTC)
152
+
153
+
154
+ def _relation_name(rel: Optional[BaseRelation]) -> str:
155
+ if rel is None:
156
+ return "null relation"
157
+ else:
158
+ return str(rel)
159
+
160
+
161
+ def log_code_execution(code_execution_function):
162
+ # decorator to log code and execution time
163
+ if code_execution_function.__name__ != "submit_python_job":
164
+ raise ValueError("this should be only used to log submit_python_job now")
165
+
166
+ def execution_with_log(*args):
167
+ self = args[0]
168
+ connection_name = self.connections.get_thread_connection().name
169
+ fire_event(CodeExecution(conn_name=connection_name, code_content=args[2]))
170
+ start_time = time.time()
171
+ response = code_execution_function(*args)
172
+ fire_event(
173
+ CodeExecutionStatus(
174
+ status=response._message, elapsed=round((time.time() - start_time), 2)
175
+ )
176
+ )
177
+ return response
178
+
179
+ return execution_with_log
180
+
181
+
182
+ class PythonJobHelper:
183
+ def __init__(self, parsed_model: Dict, credential: Credentials) -> None:
184
+ raise NotImplementedError("PythonJobHelper is not implemented yet")
185
+
186
+ def submit(self, compiled_code: str) -> Any:
187
+ raise NotImplementedError("PythonJobHelper submit function is not implemented yet")
188
+
189
+
190
+ class FreshnessResponse(TypedDict):
191
+ max_loaded_at: datetime
192
+ snapshotted_at: datetime
193
+ age: float # age in seconds
194
+
195
+
196
+ class BaseAdapter(metaclass=AdapterMeta):
197
+ """The BaseAdapter provides an abstract base class for adapters.
198
+
199
+ Adapters must implement the following methods and macros. Some of the
200
+ methods can be safely overridden as a noop, where it makes sense
201
+ (transactions on databases that don't support them, for instance). Those
202
+ methods are marked with a (passable) in their docstrings. Check docstrings
203
+ for type information, etc.
204
+
205
+ To implement a macro, implement "${adapter_type}__${macro_name}" in the
206
+ adapter's internal project.
207
+
208
+ To invoke a method in an adapter macro, call it on the 'adapter' Jinja
209
+ object using dot syntax.
210
+
211
+ To invoke a method in model code, add the @available decorator atop a method
212
+ declaration. Methods are invoked as macros.
213
+
214
+ Methods:
215
+ - exception_handler
216
+ - date_function
217
+ - list_schemas
218
+ - drop_relation
219
+ - truncate_relation
220
+ - rename_relation
221
+ - get_columns_in_relation
222
+ - get_column_schema_from_query
223
+ - expand_column_types
224
+ - list_relations_without_caching
225
+ - is_cancelable
226
+ - create_schema
227
+ - drop_schema
228
+ - quote
229
+ - convert_text_type
230
+ - convert_number_type
231
+ - convert_boolean_type
232
+ - convert_datetime_type
233
+ - convert_date_type
234
+ - convert_time_type
235
+ - standardize_grants_dict
236
+
237
+ Macros:
238
+ - get_catalog
239
+ """
240
+
241
+ Relation: Type[BaseRelation] = BaseRelation
242
+ Column: Type[BaseColumn] = BaseColumn
243
+ ConnectionManager: Type[BaseConnectionManager]
244
+
245
+ # A set of clobber config fields accepted by this adapter
246
+ # for use in materializations
247
+ AdapterSpecificConfigs: Type[AdapterConfig] = AdapterConfig
248
+
249
+ CONSTRAINT_SUPPORT = {
250
+ ConstraintType.check: ConstraintSupport.NOT_SUPPORTED,
251
+ ConstraintType.not_null: ConstraintSupport.ENFORCED,
252
+ ConstraintType.unique: ConstraintSupport.NOT_ENFORCED,
253
+ ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED,
254
+ ConstraintType.foreign_key: ConstraintSupport.ENFORCED,
255
+ }
256
+
257
+ # This static member variable can be overriden in concrete adapter
258
+ # implementations to indicate adapter support for optional capabilities.
259
+ _capabilities = CapabilityDict({})
260
+
261
+ def __init__(self, config, mp_context: SpawnContext) -> None:
262
+ self.config = config
263
+ self.cache = RelationsCache(log_cache_events=config.log_cache_events)
264
+ self.connections = self.ConnectionManager(config, mp_context)
265
+ self._macro_resolver: Optional[MacroResolverProtocol] = None
266
+ self._macro_context_generator: Optional[MacroContextGeneratorCallable] = None
267
+
268
+ ###
269
+ # Methods to set / access a macro resolver
270
+ ###
271
+ def set_macro_resolver(self, macro_resolver: MacroResolverProtocol) -> None:
272
+ self._macro_resolver = macro_resolver
273
+
274
+ def get_macro_resolver(self) -> Optional[MacroResolverProtocol]:
275
+ return self._macro_resolver
276
+
277
+ def clear_macro_resolver(self) -> None:
278
+ if self._macro_resolver is not None:
279
+ self._macro_resolver = None
280
+
281
+ def set_macro_context_generator(
282
+ self,
283
+ macro_context_generator: MacroContextGeneratorCallable,
284
+ ) -> None:
285
+ self._macro_context_generator = macro_context_generator
286
+
287
+ ###
288
+ # Methods that pass through to the connection manager
289
+ ###
290
+ def acquire_connection(self, name=None) -> Connection:
291
+ return self.connections.set_connection_name(name)
292
+
293
+ def release_connection(self) -> None:
294
+ self.connections.release()
295
+
296
+ def cleanup_connections(self) -> None:
297
+ self.connections.cleanup_all()
298
+
299
+ def clear_transaction(self) -> None:
300
+ self.connections.clear_transaction()
301
+
302
+ def commit_if_has_connection(self) -> None:
303
+ self.connections.commit_if_has_connection()
304
+
305
+ def debug_query(self) -> None:
306
+ self.execute("select 1 as id")
307
+
308
+ def nice_connection_name(self) -> str:
309
+ conn = self.connections.get_if_exists()
310
+ if conn is None or conn.name is None:
311
+ return "<None>"
312
+ return conn.name
313
+
314
+ @contextmanager
315
+ def connection_named(self, name: str, query_header_context: Any = None) -> Iterator[None]:
316
+ try:
317
+ if self.connections.query_header is not None:
318
+ self.connections.query_header.set(name, query_header_context)
319
+ self.acquire_connection(name)
320
+ yield
321
+ finally:
322
+ self.release_connection()
323
+ if self.connections.query_header is not None:
324
+ self.connections.query_header.reset()
325
+
326
+ @available.parse(lambda *a, **k: ("", empty_table()))
327
+ def execute(
328
+ self,
329
+ sql: str,
330
+ auto_begin: bool = False,
331
+ fetch: bool = False,
332
+ limit: Optional[int] = None,
333
+ ) -> Tuple[AdapterResponse, agate.Table]:
334
+ """Execute the given SQL. This is a thin wrapper around
335
+ ConnectionManager.execute.
336
+
337
+ :param str sql: The sql to execute.
338
+ :param bool auto_begin: If set, and dbt is not currently inside a
339
+ transaction, automatically begin one.
340
+ :param bool fetch: If set, fetch results.
341
+ :param Optional[int] limit: If set, only fetch n number of rows
342
+ :return: A tuple of the query status and results (empty if fetch=False).
343
+ :rtype: Tuple[AdapterResponse, agate.Table]
344
+ """
345
+ return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch, limit=limit)
346
+
347
+ def validate_sql(self, sql: str) -> AdapterResponse:
348
+ """Submit the given SQL to the engine for validation, but not execution.
349
+
350
+ This should throw an appropriate exception if the input SQL is invalid, although
351
+ in practice that will generally be handled by delegating to an existing method
352
+ for execution and allowing the error handler to take care of the rest.
353
+
354
+ :param str sql: The sql to validate
355
+ """
356
+ raise NotImplementedError("`validate_sql` is not implemented for this adapter!")
357
+
358
+ @available.parse(lambda *a, **k: [])
359
+ def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]:
360
+ """Get a list of the Columns with names and data types from the given sql."""
361
+ _, cursor = self.connections.add_select_query(sql)
362
+ columns = [
363
+ self.Column.create(
364
+ column_name, self.connections.data_type_code_to_name(column_type_code)
365
+ )
366
+ # https://peps.python.org/pep-0249/#description
367
+ for column_name, column_type_code, *_ in cursor.description
368
+ ]
369
+ return columns
370
+
371
+ @available.parse(lambda *a, **k: ("", empty_table()))
372
+ def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]:
373
+ """
374
+ TODO: Can we move this to dbt-bigquery?
375
+ Obtain partitions metadata for a BigQuery partitioned table.
376
+
377
+ :param str table: a partitioned table id, in standard SQL format.
378
+ :return: a partition metadata tuple, as described in
379
+ https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables.
380
+ :rtype: agate.Table
381
+ """
382
+ if hasattr(self.connections, "get_partitions_metadata"):
383
+ return self.connections.get_partitions_metadata(table=table)
384
+ else:
385
+ raise NotImplementedError(
386
+ "`get_partitions_metadata` is not implemented for this adapter!"
387
+ )
388
+
389
+ ###
390
+ # Methods that should never be overridden
391
+ ###
392
+ @classmethod
393
+ def type(cls) -> str:
394
+ """Get the type of this adapter. Types must be class-unique and
395
+ consistent.
396
+
397
+ :return: The type name
398
+ :rtype: str
399
+ """
400
+ return cls.ConnectionManager.TYPE
401
+
402
+ # Caching methods
403
+ ###
404
+ def _schema_is_cached(self, database: Optional[str], schema: str) -> bool:
405
+ """Check if the schema is cached, and by default logs if it is not."""
406
+
407
+ if (database, schema) not in self.cache:
408
+ fire_event(
409
+ CacheMiss(
410
+ conn_name=self.nice_connection_name(),
411
+ database=cast_to_str(database),
412
+ schema=schema,
413
+ )
414
+ )
415
+ return False
416
+ else:
417
+ return True
418
+
419
+ def _get_cache_schemas(self, relation_configs: Iterable[RelationConfig]) -> Set[BaseRelation]:
420
+ """Get the set of schema relations that the cache logic needs to
421
+ populate.
422
+ """
423
+ return {
424
+ self.Relation.create_from(quoting=self.config, relation_config=relation_config)
425
+ for relation_config in relation_configs
426
+ }
427
+
428
+ def _get_catalog_schemas(self, relation_configs: Iterable[RelationConfig]) -> SchemaSearchMap:
429
+ """Get a mapping of each node's "information_schema" relations to a
430
+ set of all schemas expected in that information_schema.
431
+
432
+ There may be keys that are technically duplicates on the database side,
433
+ for example all of '"foo", 'foo', '"FOO"' and 'FOO' could coexist as
434
+ databases, and values could overlap as appropriate. All values are
435
+ lowercase strings.
436
+ """
437
+ info_schema_name_map = SchemaSearchMap()
438
+ relations = self._get_catalog_relations(relation_configs)
439
+ for relation in relations:
440
+ info_schema_name_map.add(relation)
441
+ # result is a map whose keys are information_schema Relations without
442
+ # identifiers that have appropriate database prefixes, and whose values
443
+ # are sets of lowercase schema names that are valid members of those
444
+ # databases
445
+ return info_schema_name_map
446
+
447
+ def _get_catalog_relations_by_info_schema(
448
+ self, relations
449
+ ) -> Dict[InformationSchema, List[BaseRelation]]:
450
+ relations_by_info_schema: Dict[InformationSchema, List[BaseRelation]] = dict()
451
+ for relation in relations:
452
+ info_schema = relation.information_schema_only()
453
+ if info_schema not in relations_by_info_schema:
454
+ relations_by_info_schema[info_schema] = []
455
+ relations_by_info_schema[info_schema].append(relation)
456
+
457
+ return relations_by_info_schema
458
+
459
+ def _get_catalog_relations(
460
+ self, relation_configs: Iterable[RelationConfig]
461
+ ) -> List[BaseRelation]:
462
+ relations = [
463
+ self.Relation.create_from(quoting=self.config, relation_config=relation_config)
464
+ for relation_config in relation_configs
465
+ ]
466
+ return relations
467
+
468
+ def _relations_cache_for_schemas(
469
+ self,
470
+ relation_configs: Iterable[RelationConfig],
471
+ cache_schemas: Optional[Set[BaseRelation]] = None,
472
+ ) -> None:
473
+ """Populate the relations cache for the given schemas. Returns an
474
+ iterable of the schemas populated, as strings.
475
+ """
476
+ if not cache_schemas:
477
+ cache_schemas = self._get_cache_schemas(relation_configs)
478
+ with executor(self.config) as tpe:
479
+ futures: List[Future[List[BaseRelation]]] = []
480
+ for cache_schema in cache_schemas:
481
+ fut = tpe.submit_connected(
482
+ self,
483
+ f"list_{cache_schema.database}_{cache_schema.schema}",
484
+ self.list_relations_without_caching,
485
+ cache_schema,
486
+ )
487
+ futures.append(fut)
488
+
489
+ for future in as_completed(futures):
490
+ # if we can't read the relations we need to just raise anyway,
491
+ # so just call future.result() and let that raise on failure
492
+ for relation in future.result():
493
+ self.cache.add(relation)
494
+
495
+ # it's possible that there were no relations in some schemas. We want
496
+ # to insert the schemas we query into the cache's `.schemas` attribute
497
+ # so we can check it later
498
+ cache_update: Set[Tuple[Optional[str], str]] = set()
499
+ for relation in cache_schemas:
500
+ if relation.schema:
501
+ cache_update.add((relation.database, relation.schema))
502
+ self.cache.update_schemas(cache_update)
503
+
504
+ def set_relations_cache(
505
+ self,
506
+ relation_configs: Iterable[RelationConfig],
507
+ clear: bool = False,
508
+ required_schemas: Optional[Set[BaseRelation]] = None,
509
+ ) -> None:
510
+ """Run a query that gets a populated cache of the relations in the
511
+ database and set the cache on this adapter.
512
+ """
513
+ with self.cache.lock:
514
+ if clear:
515
+ self.cache.clear()
516
+ self._relations_cache_for_schemas(relation_configs, required_schemas)
517
+
518
+ @available
519
+ def cache_added(self, relation: Optional[BaseRelation]) -> str:
520
+ """Cache a new relation in dbt. It will show up in `list relations`."""
521
+ if relation is None:
522
+ name = self.nice_connection_name()
523
+ raise NullRelationCacheAttemptedError(name)
524
+ self.cache.add(relation)
525
+ # so jinja doesn't render things
526
+ return ""
527
+
528
+ @available
529
+ def cache_dropped(self, relation: Optional[BaseRelation]) -> str:
530
+ """Drop a relation in dbt. It will no longer show up in
531
+ `list relations`, and any bound views will be dropped from the cache
532
+ """
533
+ if relation is None:
534
+ name = self.nice_connection_name()
535
+ raise NullRelationDropAttemptedError(name)
536
+ self.cache.drop(relation)
537
+ return ""
538
+
539
+ @available
540
+ def cache_renamed(
541
+ self,
542
+ from_relation: Optional[BaseRelation],
543
+ to_relation: Optional[BaseRelation],
544
+ ) -> str:
545
+ """Rename a relation in dbt. It will show up with a new name in
546
+ `list_relations`, but bound views will remain bound.
547
+ """
548
+ if from_relation is None or to_relation is None:
549
+ name = self.nice_connection_name()
550
+ src_name = _relation_name(from_relation)
551
+ dst_name = _relation_name(to_relation)
552
+ raise RenameToNoneAttemptedError(src_name, dst_name, name)
553
+
554
+ self.cache.rename(from_relation, to_relation)
555
+ return ""
556
+
557
+ ###
558
+ # Abstract methods for database-specific values, attributes, and types
559
+ ###
560
+ @classmethod
561
+ @abc.abstractmethod
562
+ def date_function(cls) -> str:
563
+ """Get the date function used by this adapter's database."""
564
+ raise NotImplementedError("`date_function` is not implemented for this adapter!")
565
+
566
+ @classmethod
567
+ @abc.abstractmethod
568
+ def is_cancelable(cls) -> bool:
569
+ raise NotImplementedError("`is_cancelable` is not implemented for this adapter!")
570
+
571
+ ###
572
+ # Abstract methods about schemas
573
+ ###
574
+ @abc.abstractmethod
575
+ def list_schemas(self, database: str) -> List[str]:
576
+ """Get a list of existing schemas in database"""
577
+ raise NotImplementedError("`list_schemas` is not implemented for this adapter!")
578
+
579
+ @available.parse(lambda *a, **k: False)
580
+ def check_schema_exists(self, database: str, schema: str) -> bool:
581
+ """Check if a schema exists.
582
+
583
+ The default implementation of this is potentially unnecessarily slow,
584
+ and adapters should implement it if there is an optimized path (and
585
+ there probably is)
586
+ """
587
+ search = (s.lower() for s in self.list_schemas(database=database))
588
+ return schema.lower() in search
589
+
590
+ ###
591
+ # Abstract methods about relations
592
+ ###
593
+ @abc.abstractmethod
594
+ @available.parse_none
595
+ def drop_relation(self, relation: BaseRelation) -> None:
596
+ """Drop the given relation.
597
+
598
+ *Implementors must call self.cache.drop() to preserve cache state!*
599
+ """
600
+ raise NotImplementedError("`drop_relation` is not implemented for this adapter!")
601
+
602
+ @abc.abstractmethod
603
+ @available.parse_none
604
+ def truncate_relation(self, relation: BaseRelation) -> None:
605
+ """Truncate the given relation."""
606
+ raise NotImplementedError("`truncate_relation` is not implemented for this adapter!")
607
+
608
+ @abc.abstractmethod
609
+ @available.parse_none
610
+ def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None:
611
+ """Rename the relation from from_relation to to_relation.
612
+
613
+ Implementors must call self.cache.rename() to preserve cache state.
614
+ """
615
+ raise NotImplementedError("`rename_relation` is not implemented for this adapter!")
616
+
617
+ @abc.abstractmethod
618
+ @available.parse_list
619
+ def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]:
620
+ """Get a list of the columns in the given Relation."""
621
+ raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!")
622
+
623
+ @available.deprecated("get_columns_in_relation", lambda *a, **k: [])
624
+ def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]:
625
+ """DEPRECATED: Get a list of the columns in the given table."""
626
+ relation = self.Relation.create(
627
+ database=self.config.credentials.database,
628
+ schema=schema,
629
+ identifier=identifier,
630
+ quote_policy=self.config.quoting,
631
+ )
632
+ return self.get_columns_in_relation(relation)
633
+
634
+ @abc.abstractmethod
635
+ def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None:
636
+ """Expand the current table's types to match the goal table. (passable)
637
+
638
+ :param self.Relation goal: A relation that currently exists in the
639
+ database with columns of the desired types.
640
+ :param self.Relation current: A relation that currently exists in the
641
+ database with columns of unspecified types.
642
+ """
643
+ raise NotImplementedError(
644
+ "`expand_target_column_types` is not implemented for this adapter!"
645
+ )
646
+
647
+ @abc.abstractmethod
648
+ def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[BaseRelation]:
649
+ """List relations in the given schema, bypassing the cache.
650
+
651
+ This is used as the underlying behavior to fill the cache.
652
+
653
+ :param schema_relation: A relation containing the database and schema
654
+ as appropraite for the underlying data warehouse
655
+ :return: The relations in schema
656
+ :rtype: List[self.Relation]
657
+ """
658
+ raise NotImplementedError(
659
+ "`list_relations_without_caching` is not implemented for this adapter!"
660
+ )
661
+
662
+ ###
663
+ # Methods about grants
664
+ ###
665
+ @available
666
+ def standardize_grants_dict(self, grants_table: agate.Table) -> dict:
667
+ """Translate the result of `show grants` (or equivalent) to match the
668
+ grants which a user would configure in their project.
669
+
670
+ Ideally, the SQL to show grants should also be filtering:
671
+ filter OUT any grants TO the current user/role (e.g. OWNERSHIP).
672
+ If that's not possible in SQL, it can be done in this method instead.
673
+
674
+ :param grants_table: An agate table containing the query result of
675
+ the SQL returned by get_show_grant_sql
676
+ :return: A standardized dictionary matching the `grants` config
677
+ :rtype: dict
678
+ """
679
+ grants_dict: Dict[str, List[str]] = {}
680
+ for row in grants_table:
681
+ grantee = row["grantee"]
682
+ privilege = row["privilege_type"]
683
+ if privilege in grants_dict.keys():
684
+ grants_dict[privilege].append(grantee)
685
+ else:
686
+ grants_dict.update({privilege: [grantee]})
687
+ return grants_dict
688
+
689
+ ###
690
+ # Provided methods about relations
691
+ ###
692
+ @available.parse_list
693
+ def get_missing_columns(
694
+ self, from_relation: BaseRelation, to_relation: BaseRelation
695
+ ) -> List[BaseColumn]:
696
+ """Returns a list of Columns in from_relation that are missing from
697
+ to_relation.
698
+ """
699
+ if not isinstance(from_relation, self.Relation):
700
+ raise MacroArgTypeError(
701
+ method_name="get_missing_columns",
702
+ arg_name="from_relation",
703
+ got_value=from_relation,
704
+ expected_type=self.Relation,
705
+ )
706
+
707
+ if not isinstance(to_relation, self.Relation):
708
+ raise MacroArgTypeError(
709
+ method_name="get_missing_columns",
710
+ arg_name="to_relation",
711
+ got_value=to_relation,
712
+ expected_type=self.Relation,
713
+ )
714
+
715
+ from_columns = {col.name: col for col in self.get_columns_in_relation(from_relation)}
716
+
717
+ to_columns = {col.name: col for col in self.get_columns_in_relation(to_relation)}
718
+
719
+ missing_columns = set(from_columns.keys()) - set(to_columns.keys())
720
+
721
+ return [col for (col_name, col) in from_columns.items() if col_name in missing_columns]
722
+
723
+ @available.parse_none
724
+ def valid_snapshot_target(self, relation: BaseRelation) -> None:
725
+ """Ensure that the target relation is valid, by making sure it has the
726
+ expected columns.
727
+
728
+ :param Relation relation: The relation to check
729
+ :raises InvalidMacroArgType: If the columns are
730
+ incorrect.
731
+ """
732
+ if not isinstance(relation, self.Relation):
733
+ raise MacroArgTypeError(
734
+ method_name="valid_snapshot_target",
735
+ arg_name="relation",
736
+ got_value=relation,
737
+ expected_type=self.Relation,
738
+ )
739
+
740
+ columns = self.get_columns_in_relation(relation)
741
+ names = set(c.name.lower() for c in columns)
742
+ expanded_keys = ("scd_id", "valid_from", "valid_to")
743
+ extra = []
744
+ missing = []
745
+ for legacy in expanded_keys:
746
+ desired = "dbt_" + legacy
747
+ if desired not in names:
748
+ missing.append(desired)
749
+ if legacy in names:
750
+ extra.append(legacy)
751
+
752
+ if missing:
753
+ if extra:
754
+ raise SnapshotTargetIncompleteError(extra, missing)
755
+ else:
756
+ raise SnapshotTargetNotSnapshotTableError(missing)
757
+
758
+ @available.parse_none
759
+ def expand_target_column_types(
760
+ self, from_relation: BaseRelation, to_relation: BaseRelation
761
+ ) -> None:
762
+ if not isinstance(from_relation, self.Relation):
763
+ raise MacroArgTypeError(
764
+ method_name="expand_target_column_types",
765
+ arg_name="from_relation",
766
+ got_value=from_relation,
767
+ expected_type=self.Relation,
768
+ )
769
+
770
+ if not isinstance(to_relation, self.Relation):
771
+ raise MacroArgTypeError(
772
+ method_name="expand_target_column_types",
773
+ arg_name="to_relation",
774
+ got_value=to_relation,
775
+ expected_type=self.Relation,
776
+ )
777
+
778
+ self.expand_column_types(from_relation, to_relation)
779
+
780
+ def list_relations(self, database: Optional[str], schema: str) -> List[BaseRelation]:
781
+ if self._schema_is_cached(database, schema):
782
+ return self.cache.get_relations(database, schema)
783
+
784
+ schema_relation = self.Relation.create(
785
+ database=database,
786
+ schema=schema,
787
+ identifier="",
788
+ quote_policy=self.config.quoting,
789
+ ).without_identifier()
790
+
791
+ # we can't build the relations cache because we don't have a
792
+ # manifest so we can't run any operations.
793
+ relations = self.list_relations_without_caching(schema_relation)
794
+
795
+ # if the cache is already populated, add this schema in
796
+ # otherwise, skip updating the cache and just ignore
797
+ if self.cache:
798
+ for relation in relations:
799
+ self.cache.add(relation)
800
+ if not relations:
801
+ # it's possible that there were no relations in some schemas. We want
802
+ # to insert the schemas we query into the cache's `.schemas` attribute
803
+ # so we can check it later
804
+ self.cache.update_schemas([(database, schema)])
805
+
806
+ fire_event(
807
+ ListRelations(
808
+ database=cast_to_str(database),
809
+ schema=schema,
810
+ relations=[_make_ref_key_dict(x) for x in relations],
811
+ )
812
+ )
813
+
814
+ return relations
815
+
816
+ def _make_match_kwargs(self, database: str, schema: str, identifier: str) -> Dict[str, str]:
817
+ quoting = self.config.quoting
818
+ if identifier is not None and quoting["identifier"] is False:
819
+ identifier = identifier.lower()
820
+
821
+ if schema is not None and quoting["schema"] is False:
822
+ schema = schema.lower()
823
+
824
+ if database is not None and quoting["database"] is False:
825
+ database = database.lower()
826
+
827
+ return filter_null_values(
828
+ {
829
+ "database": database,
830
+ "identifier": identifier,
831
+ "schema": schema,
832
+ }
833
+ )
834
+
835
+ def _make_match(
836
+ self,
837
+ relations_list: List[BaseRelation],
838
+ database: str,
839
+ schema: str,
840
+ identifier: str,
841
+ ) -> List[BaseRelation]:
842
+ matches = []
843
+
844
+ search = self._make_match_kwargs(database, schema, identifier)
845
+
846
+ for relation in relations_list:
847
+ if relation.matches(**search):
848
+ matches.append(relation)
849
+
850
+ return matches
851
+
852
+ @available.parse_none
853
+ def get_relation(self, database: str, schema: str, identifier: str) -> Optional[BaseRelation]:
854
+ relations_list = self.list_relations(database, schema)
855
+
856
+ matches = self._make_match(relations_list, database, schema, identifier)
857
+
858
+ if len(matches) > 1:
859
+ kwargs = {
860
+ "identifier": identifier,
861
+ "schema": schema,
862
+ "database": database,
863
+ }
864
+ raise RelationReturnedMultipleResultsError(kwargs, matches)
865
+
866
+ elif matches:
867
+ return matches[0]
868
+
869
+ return None
870
+
871
+ @available.deprecated("get_relation", lambda *a, **k: False)
872
+ def already_exists(self, schema: str, name: str) -> bool:
873
+ """DEPRECATED: Return if a model already exists in the database"""
874
+ database = self.config.credentials.database
875
+ relation = self.get_relation(database, schema, name)
876
+ return relation is not None
877
+
878
+ ###
879
+ # ODBC FUNCTIONS -- these should not need to change for every adapter,
880
+ # although some adapters may override them
881
+ ###
882
+ @abc.abstractmethod
883
+ @available.parse_none
884
+ def create_schema(self, relation: BaseRelation):
885
+ """Create the given schema if it does not exist."""
886
+ raise NotImplementedError("`create_schema` is not implemented for this adapter!")
887
+
888
+ @abc.abstractmethod
889
+ @available.parse_none
890
+ def drop_schema(self, relation: BaseRelation):
891
+ """Drop the given schema (and everything in it) if it exists."""
892
+ raise NotImplementedError("`drop_schema` is not implemented for this adapter!")
893
+
894
+ @available
895
+ @classmethod
896
+ @abc.abstractmethod
897
+ def quote(cls, identifier: str) -> str:
898
+ """Quote the given identifier, as appropriate for the database."""
899
+ raise NotImplementedError("`quote` is not implemented for this adapter!")
900
+
901
+ @available
902
+ def quote_as_configured(self, identifier: str, quote_key: str) -> str:
903
+ """Quote or do not quote the given identifer as configured in the
904
+ project config for the quote key.
905
+
906
+ The quote key should be one of 'database' (on bigquery, 'profile'),
907
+ 'identifier', or 'schema', or it will be treated as if you set `True`.
908
+ """
909
+ try:
910
+ key = ComponentName(quote_key)
911
+ except ValueError:
912
+ return identifier
913
+
914
+ default = self.Relation.get_default_quote_policy().get_part(key)
915
+ if self.config.quoting.get(key, default):
916
+ return self.quote(identifier)
917
+ else:
918
+ return identifier
919
+
920
+ @available
921
+ def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str:
922
+ quote_columns: bool = True
923
+ if isinstance(quote_config, bool):
924
+ quote_columns = quote_config
925
+ elif quote_config is None:
926
+ pass
927
+ else:
928
+ raise QuoteConfigTypeError(quote_config)
929
+
930
+ if quote_columns:
931
+ return self.quote(column)
932
+ else:
933
+ return column
934
+
935
+ ###
936
+ # Conversions: These must be implemented by concrete implementations, for
937
+ # converting agate types into their sql equivalents.
938
+ ###
939
+ @classmethod
940
+ @abc.abstractmethod
941
+ def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str:
942
+ """Return the type in the database that best maps to the agate.Text
943
+ type for the given agate table and column index.
944
+
945
+ :param agate_table: The table
946
+ :param col_idx: The index into the agate table for the column.
947
+ :return: The name of the type in the database
948
+ """
949
+ raise NotImplementedError("`convert_text_type` is not implemented for this adapter!")
950
+
951
+ @classmethod
952
+ @abc.abstractmethod
953
+ def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str:
954
+ """Return the type in the database that best maps to the agate.Number
955
+ type for the given agate table and column index.
956
+
957
+ :param agate_table: The table
958
+ :param col_idx: The index into the agate table for the column.
959
+ :return: The name of the type in the database
960
+ """
961
+ raise NotImplementedError("`convert_number_type` is not implemented for this adapter!")
962
+
963
+ @classmethod
964
+ def convert_integer_type(cls, agate_table: agate.Table, col_idx: int) -> str:
965
+ """Return the type in the database that best maps to the agate.Number
966
+ type for the given agate table and column index.
967
+
968
+ :param agate_table: The table
969
+ :param col_idx: The index into the agate table for the column.
970
+ :return: The name of the type in the database
971
+ """
972
+ return "integer"
973
+
974
+ @classmethod
975
+ @abc.abstractmethod
976
+ def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str:
977
+ """Return the type in the database that best maps to the agate.Boolean
978
+ type for the given agate table and column index.
979
+
980
+ :param agate_table: The table
981
+ :param col_idx: The index into the agate table for the column.
982
+ :return: The name of the type in the database
983
+ """
984
+ raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!")
985
+
986
+ @classmethod
987
+ @abc.abstractmethod
988
+ def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
989
+ """Return the type in the database that best maps to the agate.DateTime
990
+ type for the given agate table and column index.
991
+
992
+ :param agate_table: The table
993
+ :param col_idx: The index into the agate table for the column.
994
+ :return: The name of the type in the database
995
+ """
996
+ raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!")
997
+
998
+ @classmethod
999
+ @abc.abstractmethod
1000
+ def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str:
1001
+ """Return the type in the database that best maps to the agate.Date
1002
+ type for the given agate table and column index.
1003
+
1004
+ :param agate_table: The table
1005
+ :param col_idx: The index into the agate table for the column.
1006
+ :return: The name of the type in the database
1007
+ """
1008
+ raise NotImplementedError("`convert_date_type` is not implemented for this adapter!")
1009
+
1010
+ @classmethod
1011
+ @abc.abstractmethod
1012
+ def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str:
1013
+ """Return the type in the database that best maps to the
1014
+ agate.TimeDelta type for the given agate table and column index.
1015
+
1016
+ :param agate_table: The table
1017
+ :param col_idx: The index into the agate table for the column.
1018
+ :return: The name of the type in the database
1019
+ """
1020
+ raise NotImplementedError("`convert_time_type` is not implemented for this adapter!")
1021
+
1022
+ @available
1023
+ @classmethod
1024
+ def convert_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
1025
+ return cls.convert_agate_type(agate_table, col_idx)
1026
+
1027
+ @classmethod
1028
+ def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]:
1029
+ agate_type: Type = agate_table.column_types[col_idx]
1030
+ conversions: List[Tuple[Type, Callable[..., str]]] = [
1031
+ (Integer, cls.convert_integer_type),
1032
+ (agate.Text, cls.convert_text_type),
1033
+ (agate.Number, cls.convert_number_type),
1034
+ (agate.Boolean, cls.convert_boolean_type),
1035
+ (agate.DateTime, cls.convert_datetime_type),
1036
+ (agate.Date, cls.convert_date_type),
1037
+ (agate.TimeDelta, cls.convert_time_type),
1038
+ ]
1039
+ for agate_cls, func in conversions:
1040
+ if isinstance(agate_type, agate_cls):
1041
+ return func(agate_table, col_idx)
1042
+
1043
+ return None
1044
+
1045
+ ###
1046
+ # Operations involving the manifest
1047
+ ###
1048
+ def execute_macro(
1049
+ self,
1050
+ macro_name: str,
1051
+ macro_resolver: Optional[MacroResolverProtocol] = None,
1052
+ project: Optional[str] = None,
1053
+ context_override: Optional[Dict[str, Any]] = None,
1054
+ kwargs: Optional[Dict[str, Any]] = None,
1055
+ ) -> AttrDict:
1056
+ """Look macro_name up in the manifest and execute its results.
1057
+
1058
+ :param macro_name: The name of the macro to execute.
1059
+ :param manifest: The manifest to use for generating the base macro
1060
+ execution context. If none is provided, use the internal manifest.
1061
+ :param project: The name of the project to search in, or None for the
1062
+ first match.
1063
+ :param context_override: An optional dict to update() the macro
1064
+ execution context.
1065
+ :param kwargs: An optional dict of keyword args used to pass to the
1066
+ macro.
1067
+ """
1068
+
1069
+ if kwargs is None:
1070
+ kwargs = {}
1071
+ if context_override is None:
1072
+ context_override = {}
1073
+
1074
+ resolver = macro_resolver or self._macro_resolver
1075
+ if resolver is None:
1076
+ raise DbtInternalError("Macro resolver was None when calling execute_macro!")
1077
+
1078
+ if self._macro_context_generator is None:
1079
+ raise DbtInternalError("Macro context generator was None when calling execute_macro!")
1080
+
1081
+ macro = resolver.find_macro_by_name(macro_name, self.config.project_name, project)
1082
+ if macro is None:
1083
+ if project is None:
1084
+ package_name = "any package"
1085
+ else:
1086
+ package_name = 'the "{}" package'.format(project)
1087
+
1088
+ raise DbtRuntimeError(
1089
+ 'dbt could not find a macro with the name "{}" in {}'.format(
1090
+ macro_name, package_name
1091
+ )
1092
+ )
1093
+
1094
+ macro_context = self._macro_context_generator(macro, self.config, resolver, project)
1095
+ macro_context.update(context_override)
1096
+
1097
+ macro_function = CallableMacroGenerator(macro, macro_context)
1098
+
1099
+ with self.connections.exception_handler(f"macro {macro_name}"):
1100
+ result = macro_function(**kwargs)
1101
+ return result
1102
+
1103
+ @classmethod
1104
+ def _catalog_filter_table(
1105
+ cls, table: agate.Table, used_schemas: FrozenSet[Tuple[str, str]]
1106
+ ) -> agate.Table:
1107
+ """Filter the table as appropriate for catalog entries. Subclasses can
1108
+ override this to change filtering rules on a per-adapter basis.
1109
+ """
1110
+ # force database + schema to be strings
1111
+ table = table_from_rows(
1112
+ table.rows,
1113
+ table.column_names,
1114
+ text_only_columns=["table_database", "table_schema", "table_name"],
1115
+ )
1116
+ return table.where(_catalog_filter_schemas(used_schemas))
1117
+
1118
+ def _get_one_catalog(
1119
+ self,
1120
+ information_schema: InformationSchema,
1121
+ schemas: Set[str],
1122
+ used_schemas: FrozenSet[Tuple[str, str]],
1123
+ ) -> agate.Table:
1124
+ kwargs = {"information_schema": information_schema, "schemas": schemas}
1125
+ table = self.execute_macro(GET_CATALOG_MACRO_NAME, kwargs=kwargs)
1126
+
1127
+ results = self._catalog_filter_table(table, used_schemas) # type: ignore[arg-type]
1128
+ return results
1129
+
1130
+ def _get_one_catalog_by_relations(
1131
+ self,
1132
+ information_schema: InformationSchema,
1133
+ relations: List[BaseRelation],
1134
+ used_schemas: FrozenSet[Tuple[str, str]],
1135
+ ) -> agate.Table:
1136
+ kwargs = {
1137
+ "information_schema": information_schema,
1138
+ "relations": relations,
1139
+ }
1140
+ table = self.execute_macro(GET_CATALOG_RELATIONS_MACRO_NAME, kwargs=kwargs)
1141
+
1142
+ results = self._catalog_filter_table(table, used_schemas) # type: ignore[arg-type]
1143
+ return results
1144
+
1145
+ def get_filtered_catalog(
1146
+ self,
1147
+ relation_configs: Iterable[RelationConfig],
1148
+ used_schemas: FrozenSet[Tuple[str, str]],
1149
+ relations: Optional[Set[BaseRelation]] = None,
1150
+ ):
1151
+ catalogs: agate.Table
1152
+ if (
1153
+ relations is None
1154
+ or len(relations) > 100
1155
+ or not self.supports(Capability.SchemaMetadataByRelations)
1156
+ ):
1157
+ # Do it the traditional way. We get the full catalog.
1158
+ catalogs, exceptions = self.get_catalog(relation_configs, used_schemas)
1159
+ else:
1160
+ # Do it the new way. We try to save time by selecting information
1161
+ # only for the exact set of relations we are interested in.
1162
+ catalogs, exceptions = self.get_catalog_by_relations(used_schemas, relations)
1163
+
1164
+ if relations and catalogs:
1165
+ relation_map = {
1166
+ (
1167
+ r.database.casefold() if r.database else None,
1168
+ r.schema.casefold() if r.schema else None,
1169
+ r.identifier.casefold() if r.identifier else None,
1170
+ )
1171
+ for r in relations
1172
+ }
1173
+
1174
+ def in_map(row: agate.Row):
1175
+ d = _expect_row_value("table_database", row)
1176
+ s = _expect_row_value("table_schema", row)
1177
+ i = _expect_row_value("table_name", row)
1178
+ d = d.casefold() if d is not None else None
1179
+ s = s.casefold() if s is not None else None
1180
+ i = i.casefold() if i is not None else None
1181
+ return (d, s, i) in relation_map
1182
+
1183
+ catalogs = catalogs.where(in_map)
1184
+
1185
+ return catalogs, exceptions
1186
+
1187
+ def row_matches_relation(self, row: agate.Row, relations: Set[BaseRelation]):
1188
+ pass
1189
+
1190
+ def get_catalog(
1191
+ self,
1192
+ relation_configs: Iterable[RelationConfig],
1193
+ used_schemas: FrozenSet[Tuple[str, str]],
1194
+ ) -> Tuple[agate.Table, List[Exception]]:
1195
+ with executor(self.config) as tpe:
1196
+ futures: List[Future[agate.Table]] = []
1197
+ schema_map: SchemaSearchMap = self._get_catalog_schemas(relation_configs)
1198
+ for info, schemas in schema_map.items():
1199
+ if len(schemas) == 0:
1200
+ continue
1201
+ name = ".".join([str(info.database), "information_schema"])
1202
+ fut = tpe.submit_connected(
1203
+ self, name, self._get_one_catalog, info, schemas, used_schemas
1204
+ )
1205
+ futures.append(fut)
1206
+
1207
+ catalogs, exceptions = catch_as_completed(futures)
1208
+ return catalogs, exceptions
1209
+
1210
+ def get_catalog_by_relations(
1211
+ self, used_schemas: FrozenSet[Tuple[str, str]], relations: Set[BaseRelation]
1212
+ ) -> Tuple[agate.Table, List[Exception]]:
1213
+ with executor(self.config) as tpe:
1214
+ futures: List[Future[agate.Table]] = []
1215
+ relations_by_schema = self._get_catalog_relations_by_info_schema(relations)
1216
+ for info_schema in relations_by_schema:
1217
+ name = ".".join([str(info_schema.database), "information_schema"])
1218
+ relations = set(relations_by_schema[info_schema])
1219
+ fut = tpe.submit_connected(
1220
+ self,
1221
+ name,
1222
+ self._get_one_catalog_by_relations,
1223
+ info_schema,
1224
+ relations,
1225
+ used_schemas,
1226
+ )
1227
+ futures.append(fut)
1228
+
1229
+ catalogs, exceptions = catch_as_completed(futures)
1230
+ return catalogs, exceptions
1231
+
1232
+ def cancel_open_connections(self):
1233
+ """Cancel all open connections."""
1234
+ return self.connections.cancel_open()
1235
+
1236
+ def calculate_freshness(
1237
+ self,
1238
+ source: BaseRelation,
1239
+ loaded_at_field: str,
1240
+ filter: Optional[str],
1241
+ macro_resolver: Optional[MacroResolverProtocol] = None,
1242
+ ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
1243
+ """Calculate the freshness of sources in dbt, and return it"""
1244
+ kwargs: Dict[str, Any] = {
1245
+ "source": source,
1246
+ "loaded_at_field": loaded_at_field,
1247
+ "filter": filter,
1248
+ }
1249
+
1250
+ # run the macro
1251
+ # in older versions of dbt-core, the 'collect_freshness' macro returned the table of results directly
1252
+ # starting in v1.5, by default, we return both the table and the adapter response (metadata about the query)
1253
+ result: Union[
1254
+ AttrDict, # current: contains AdapterResponse + agate.Table
1255
+ agate.Table, # previous: just table
1256
+ ]
1257
+ result = self.execute_macro(
1258
+ FRESHNESS_MACRO_NAME, kwargs=kwargs, macro_resolver=macro_resolver
1259
+ )
1260
+ if isinstance(result, agate.Table):
1261
+ warn_or_error(CollectFreshnessReturnSignature())
1262
+ adapter_response = None
1263
+ table = result
1264
+ else:
1265
+ adapter_response, table = result.response, result.table # type: ignore[attr-defined]
1266
+ # now we have a 1-row table of the maximum `loaded_at_field` value and
1267
+ # the current time according to the db.
1268
+ if len(table) != 1 or len(table[0]) != 2:
1269
+ raise MacroResultError(FRESHNESS_MACRO_NAME, table)
1270
+ if table[0][0] is None:
1271
+ # no records in the table, so really the max_loaded_at was
1272
+ # infinitely long ago. Just call it 0:00 January 1 year UTC
1273
+ max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
1274
+ else:
1275
+ max_loaded_at = _utc(table[0][0], source, loaded_at_field)
1276
+
1277
+ snapshotted_at = _utc(table[0][1], source, loaded_at_field)
1278
+ age = (snapshotted_at - max_loaded_at).total_seconds()
1279
+ freshness: FreshnessResponse = {
1280
+ "max_loaded_at": max_loaded_at,
1281
+ "snapshotted_at": snapshotted_at,
1282
+ "age": age,
1283
+ }
1284
+ return adapter_response, freshness
1285
+
1286
+ def calculate_freshness_from_metadata(
1287
+ self,
1288
+ source: BaseRelation,
1289
+ macro_resolver: Optional[MacroResolverProtocol] = None,
1290
+ ) -> Tuple[Optional[AdapterResponse], FreshnessResponse]:
1291
+ kwargs: Dict[str, Any] = {
1292
+ "information_schema": source.information_schema_only(),
1293
+ "relations": [source],
1294
+ }
1295
+ result = self.execute_macro(
1296
+ GET_RELATION_LAST_MODIFIED_MACRO_NAME,
1297
+ kwargs=kwargs,
1298
+ macro_resolver=macro_resolver,
1299
+ )
1300
+ adapter_response, table = result.response, result.table # type: ignore[attr-defined]
1301
+
1302
+ try:
1303
+ row = table[0]
1304
+ last_modified_val = get_column_value_uncased("last_modified", row)
1305
+ snapshotted_at_val = get_column_value_uncased("snapshotted_at", row)
1306
+ except Exception:
1307
+ raise MacroResultError(GET_RELATION_LAST_MODIFIED_MACRO_NAME, table)
1308
+
1309
+ if last_modified_val is None:
1310
+ # Interpret missing value as "infinitely long ago"
1311
+ max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC)
1312
+ else:
1313
+ max_loaded_at = _utc(last_modified_val, None, "last_modified")
1314
+
1315
+ snapshotted_at = _utc(snapshotted_at_val, None, "snapshotted_at")
1316
+
1317
+ age = (snapshotted_at - max_loaded_at).total_seconds()
1318
+
1319
+ freshness: FreshnessResponse = {
1320
+ "max_loaded_at": max_loaded_at,
1321
+ "snapshotted_at": snapshotted_at,
1322
+ "age": age,
1323
+ }
1324
+
1325
+ return adapter_response, freshness
1326
+
1327
+ def pre_model_hook(self, config: Mapping[str, Any]) -> Any:
1328
+ """A hook for running some operation before the model materialization
1329
+ runs. The hook can assume it has a connection available.
1330
+
1331
+ The only parameter is a configuration dictionary (the same one
1332
+ available in the materialization context). It should be considered
1333
+ read-only.
1334
+
1335
+ The pre-model hook may return anything as a context, which will be
1336
+ passed to the post-model hook.
1337
+ """
1338
+ pass
1339
+
1340
+ def post_model_hook(self, config: Mapping[str, Any], context: Any) -> None:
1341
+ """A hook for running some operation after the model materialization
1342
+ runs. The hook can assume it has a connection available.
1343
+
1344
+ The first parameter is a configuration dictionary (the same one
1345
+ available in the materialization context). It should be considered
1346
+ read-only.
1347
+
1348
+ The second parameter is the value returned by pre_mdoel_hook.
1349
+ """
1350
+ pass
1351
+
1352
+ # Methods used in adapter tests
1353
+ def update_column_sql(
1354
+ self,
1355
+ dst_name: str,
1356
+ dst_column: str,
1357
+ clause: str,
1358
+ where_clause: Optional[str] = None,
1359
+ ) -> str:
1360
+ clause = f"update {dst_name} set {dst_column} = {clause}"
1361
+ if where_clause is not None:
1362
+ clause += f" where {where_clause}"
1363
+ return clause
1364
+
1365
+ def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str:
1366
+ # for backwards compatibility, we're compelled to set some sort of
1367
+ # default. A lot of searching has lead me to believe that the
1368
+ # '+ interval' syntax used in postgres/redshift is relatively common
1369
+ # and might even be the SQL standard's intention.
1370
+ return f"{add_to} + interval '{number} {interval}'"
1371
+
1372
+ def string_add_sql(
1373
+ self,
1374
+ add_to: str,
1375
+ value: str,
1376
+ location="append",
1377
+ ) -> str:
1378
+ if location == "append":
1379
+ return f"{add_to} || '{value}'"
1380
+ elif location == "prepend":
1381
+ return f"'{value}' || {add_to}"
1382
+ else:
1383
+ raise DbtRuntimeError(f'Got an unexpected location value of "{location}"')
1384
+
1385
+ def get_rows_different_sql(
1386
+ self,
1387
+ relation_a: BaseRelation,
1388
+ relation_b: BaseRelation,
1389
+ column_names: Optional[List[str]] = None,
1390
+ except_operator: str = "EXCEPT",
1391
+ ) -> str:
1392
+ """Generate SQL for a query that returns a single row with a two
1393
+ columns: the number of rows that are different between the two
1394
+ relations and the number of mismatched rows.
1395
+ """
1396
+ # This method only really exists for test reasons.
1397
+ names: List[str]
1398
+ if column_names is None:
1399
+ columns = self.get_columns_in_relation(relation_a)
1400
+ names = sorted((self.quote(c.name) for c in columns))
1401
+ else:
1402
+ names = sorted((self.quote(n) for n in column_names))
1403
+ columns_csv = ", ".join(names)
1404
+
1405
+ sql = COLUMNS_EQUAL_SQL.format(
1406
+ columns=columns_csv,
1407
+ relation_a=str(relation_a),
1408
+ relation_b=str(relation_b),
1409
+ except_op=except_operator,
1410
+ )
1411
+
1412
+ return sql
1413
+
1414
+ @property
1415
+ def python_submission_helpers(self) -> Dict[str, Type[PythonJobHelper]]:
1416
+ raise NotImplementedError("python_submission_helpers is not specified")
1417
+
1418
+ @property
1419
+ def default_python_submission_method(self) -> str:
1420
+ raise NotImplementedError("default_python_submission_method is not specified")
1421
+
1422
+ @log_code_execution
1423
+ def submit_python_job(self, parsed_model: dict, compiled_code: str) -> AdapterResponse:
1424
+ submission_method = parsed_model["config"].get(
1425
+ "submission_method", self.default_python_submission_method
1426
+ )
1427
+ if submission_method not in self.python_submission_helpers:
1428
+ raise NotImplementedError(
1429
+ "Submission method {} is not supported for current adapter".format(
1430
+ submission_method
1431
+ )
1432
+ )
1433
+ job_helper = self.python_submission_helpers[submission_method](
1434
+ parsed_model, self.connections.profile.credentials
1435
+ )
1436
+ submission_result = job_helper.submit(compiled_code)
1437
+ # process submission result to generate adapter response
1438
+ return self.generate_python_submission_response(submission_result)
1439
+
1440
+ def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse:
1441
+ raise NotImplementedError(
1442
+ "Your adapter need to implement generate_python_submission_response"
1443
+ )
1444
+
1445
+ def valid_incremental_strategies(self):
1446
+ """The set of standard builtin strategies which this adapter supports out-of-the-box.
1447
+ Not used to validate custom strategies defined by end users.
1448
+ """
1449
+ return ["append"]
1450
+
1451
+ def builtin_incremental_strategies(self):
1452
+ return ["append", "delete+insert", "merge", "insert_overwrite"]
1453
+
1454
+ @available.parse_none
1455
+ def get_incremental_strategy_macro(self, model_context, strategy: str):
1456
+ # Construct macro_name from strategy name
1457
+ if strategy is None:
1458
+ strategy = "default"
1459
+
1460
+ # validate strategies for this adapter
1461
+ valid_strategies = self.valid_incremental_strategies()
1462
+ valid_strategies.append("default")
1463
+ builtin_strategies = self.builtin_incremental_strategies()
1464
+ if strategy in builtin_strategies and strategy not in valid_strategies:
1465
+ raise DbtRuntimeError(
1466
+ f"The incremental strategy '{strategy}' is not valid for this adapter"
1467
+ )
1468
+
1469
+ strategy = strategy.replace("+", "_")
1470
+ macro_name = f"get_incremental_{strategy}_sql"
1471
+ # The model_context should have callable objects for all macros
1472
+ if macro_name not in model_context:
1473
+ raise DbtRuntimeError(
1474
+ 'dbt could not find an incremental strategy macro with the name "{}" in {}'.format(
1475
+ macro_name, self.config.project_name
1476
+ )
1477
+ )
1478
+
1479
+ # This returns a callable macro
1480
+ return model_context[macro_name]
1481
+
1482
+ @classmethod
1483
+ def _parse_column_constraint(cls, raw_constraint: Dict[str, Any]) -> ColumnLevelConstraint:
1484
+ try:
1485
+ ColumnLevelConstraint.validate(raw_constraint)
1486
+ return ColumnLevelConstraint.from_dict(raw_constraint)
1487
+ except Exception:
1488
+ raise DbtValidationError(f"Could not parse constraint: {raw_constraint}")
1489
+
1490
+ @classmethod
1491
+ def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional[str]:
1492
+ """Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint
1493
+ rendering."""
1494
+ constraint_expression = constraint.expression or ""
1495
+
1496
+ rendered_column_constraint = None
1497
+ if constraint.type == ConstraintType.check and constraint_expression:
1498
+ rendered_column_constraint = f"check ({constraint_expression})"
1499
+ elif constraint.type == ConstraintType.not_null:
1500
+ rendered_column_constraint = f"not null {constraint_expression}"
1501
+ elif constraint.type == ConstraintType.unique:
1502
+ rendered_column_constraint = f"unique {constraint_expression}"
1503
+ elif constraint.type == ConstraintType.primary_key:
1504
+ rendered_column_constraint = f"primary key {constraint_expression}"
1505
+ elif constraint.type == ConstraintType.foreign_key and constraint_expression:
1506
+ rendered_column_constraint = f"references {constraint_expression}"
1507
+ elif constraint.type == ConstraintType.custom and constraint_expression:
1508
+ rendered_column_constraint = constraint_expression
1509
+
1510
+ if rendered_column_constraint:
1511
+ rendered_column_constraint = rendered_column_constraint.strip()
1512
+
1513
+ return rendered_column_constraint
1514
+
1515
+ @available
1516
+ @classmethod
1517
+ def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List:
1518
+ rendered_column_constraints = []
1519
+
1520
+ for v in raw_columns.values():
1521
+ col_name = cls.quote(v["name"]) if v.get("quote") else v["name"]
1522
+ rendered_column_constraint = [f"{col_name} {v['data_type']}"]
1523
+ for con in v.get("constraints", None):
1524
+ constraint = cls._parse_column_constraint(con)
1525
+ c = cls.process_parsed_constraint(constraint, cls.render_column_constraint)
1526
+ if c is not None:
1527
+ rendered_column_constraint.append(c)
1528
+ rendered_column_constraints.append(" ".join(rendered_column_constraint))
1529
+
1530
+ return rendered_column_constraints
1531
+
1532
+ @classmethod
1533
+ def process_parsed_constraint(
1534
+ cls,
1535
+ parsed_constraint: Union[ColumnLevelConstraint, ModelLevelConstraint],
1536
+ render_func,
1537
+ ) -> Optional[str]:
1538
+ if (
1539
+ parsed_constraint.warn_unsupported
1540
+ and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_SUPPORTED
1541
+ ):
1542
+ warn_or_error(
1543
+ ConstraintNotSupported(constraint=parsed_constraint.type.value, adapter=cls.type())
1544
+ )
1545
+ if (
1546
+ parsed_constraint.warn_unenforced
1547
+ and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_ENFORCED
1548
+ ):
1549
+ warn_or_error(
1550
+ ConstraintNotEnforced(constraint=parsed_constraint.type.value, adapter=cls.type())
1551
+ )
1552
+ if cls.CONSTRAINT_SUPPORT[parsed_constraint.type] != ConstraintSupport.NOT_SUPPORTED:
1553
+ return render_func(parsed_constraint)
1554
+
1555
+ return None
1556
+
1557
+ @classmethod
1558
+ def _parse_model_constraint(cls, raw_constraint: Dict[str, Any]) -> ModelLevelConstraint:
1559
+ try:
1560
+ ModelLevelConstraint.validate(raw_constraint)
1561
+ c = ModelLevelConstraint.from_dict(raw_constraint)
1562
+ return c
1563
+ except Exception:
1564
+ raise DbtValidationError(f"Could not parse constraint: {raw_constraint}")
1565
+
1566
+ @available
1567
+ @classmethod
1568
+ def render_raw_model_constraints(cls, raw_constraints: List[Dict[str, Any]]) -> List[str]:
1569
+ return [c for c in map(cls.render_raw_model_constraint, raw_constraints) if c is not None]
1570
+
1571
+ @classmethod
1572
+ def render_raw_model_constraint(cls, raw_constraint: Dict[str, Any]) -> Optional[str]:
1573
+ constraint = cls._parse_model_constraint(raw_constraint)
1574
+ return cls.process_parsed_constraint(constraint, cls.render_model_constraint)
1575
+
1576
+ @classmethod
1577
+ def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]:
1578
+ """Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint
1579
+ rendering."""
1580
+ constraint_prefix = f"constraint {constraint.name} " if constraint.name else ""
1581
+ column_list = ", ".join(constraint.columns)
1582
+ if constraint.type == ConstraintType.check and constraint.expression:
1583
+ return f"{constraint_prefix}check ({constraint.expression})"
1584
+ elif constraint.type == ConstraintType.unique:
1585
+ constraint_expression = f" {constraint.expression}" if constraint.expression else ""
1586
+ return f"{constraint_prefix}unique{constraint_expression} ({column_list})"
1587
+ elif constraint.type == ConstraintType.primary_key:
1588
+ constraint_expression = f" {constraint.expression}" if constraint.expression else ""
1589
+ return f"{constraint_prefix}primary key{constraint_expression} ({column_list})"
1590
+ elif constraint.type == ConstraintType.foreign_key and constraint.expression:
1591
+ return f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}"
1592
+ elif constraint.type == ConstraintType.custom and constraint.expression:
1593
+ return f"{constraint_prefix}{constraint.expression}"
1594
+ else:
1595
+ return None
1596
+
1597
+ @classmethod
1598
+ def capabilities(cls) -> CapabilityDict:
1599
+ return cls._capabilities
1600
+
1601
+ @classmethod
1602
+ def supports(cls, capability: Capability) -> bool:
1603
+ return bool(cls.capabilities()[capability])
1604
+
1605
+
1606
+ COLUMNS_EQUAL_SQL = """
1607
+ with diff_count as (
1608
+ SELECT
1609
+ 1 as id,
1610
+ COUNT(*) as num_missing FROM (
1611
+ (SELECT {columns} FROM {relation_a} {except_op}
1612
+ SELECT {columns} FROM {relation_b})
1613
+ UNION ALL
1614
+ (SELECT {columns} FROM {relation_b} {except_op}
1615
+ SELECT {columns} FROM {relation_a})
1616
+ ) as a
1617
+ ), table_a as (
1618
+ SELECT COUNT(*) as num_rows FROM {relation_a}
1619
+ ), table_b as (
1620
+ SELECT COUNT(*) as num_rows FROM {relation_b}
1621
+ ), row_count_diff as (
1622
+ select
1623
+ 1 as id,
1624
+ table_a.num_rows - table_b.num_rows as difference
1625
+ from table_a, table_b
1626
+ )
1627
+ select
1628
+ row_count_diff.difference as row_count_difference,
1629
+ diff_count.num_missing as num_mismatched
1630
+ from row_count_diff
1631
+ join diff_count using (id)
1632
+ """.strip()
1633
+
1634
+
1635
+ def catch_as_completed(
1636
+ futures, # typing: List[Future[agate.Table]]
1637
+ ) -> Tuple[agate.Table, List[Exception]]:
1638
+ # catalogs: agate.Table = agate.Table(rows=[])
1639
+ tables: List[agate.Table] = []
1640
+ exceptions: List[Exception] = []
1641
+
1642
+ for future in as_completed(futures):
1643
+ exc = future.exception()
1644
+ # we want to re-raise on ctrl+c and BaseException
1645
+ if exc is None:
1646
+ catalog = future.result()
1647
+ tables.append(catalog)
1648
+ elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception):
1649
+ raise exc
1650
+ else:
1651
+ warn_or_error(CatalogGenerationError(exc=str(exc)))
1652
+ # exc is not None, derives from Exception, and isn't ctrl+c
1653
+ exceptions.append(exc)
1654
+ return merge_tables(tables), exceptions