dvt-core 0.52.2__cp310-cp310-macosx_10_9_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dvt-core might be problematic. Click here for more details.

Files changed (275) hide show
  1. dbt/__init__.py +7 -0
  2. dbt/_pydantic_shim.py +26 -0
  3. dbt/artifacts/__init__.py +0 -0
  4. dbt/artifacts/exceptions/__init__.py +1 -0
  5. dbt/artifacts/exceptions/schemas.py +31 -0
  6. dbt/artifacts/resources/__init__.py +116 -0
  7. dbt/artifacts/resources/base.py +67 -0
  8. dbt/artifacts/resources/types.py +93 -0
  9. dbt/artifacts/resources/v1/analysis.py +10 -0
  10. dbt/artifacts/resources/v1/catalog.py +23 -0
  11. dbt/artifacts/resources/v1/components.py +274 -0
  12. dbt/artifacts/resources/v1/config.py +277 -0
  13. dbt/artifacts/resources/v1/documentation.py +11 -0
  14. dbt/artifacts/resources/v1/exposure.py +51 -0
  15. dbt/artifacts/resources/v1/function.py +52 -0
  16. dbt/artifacts/resources/v1/generic_test.py +31 -0
  17. dbt/artifacts/resources/v1/group.py +21 -0
  18. dbt/artifacts/resources/v1/hook.py +11 -0
  19. dbt/artifacts/resources/v1/macro.py +29 -0
  20. dbt/artifacts/resources/v1/metric.py +172 -0
  21. dbt/artifacts/resources/v1/model.py +145 -0
  22. dbt/artifacts/resources/v1/owner.py +10 -0
  23. dbt/artifacts/resources/v1/saved_query.py +111 -0
  24. dbt/artifacts/resources/v1/seed.py +41 -0
  25. dbt/artifacts/resources/v1/semantic_layer_components.py +72 -0
  26. dbt/artifacts/resources/v1/semantic_model.py +314 -0
  27. dbt/artifacts/resources/v1/singular_test.py +14 -0
  28. dbt/artifacts/resources/v1/snapshot.py +91 -0
  29. dbt/artifacts/resources/v1/source_definition.py +84 -0
  30. dbt/artifacts/resources/v1/sql_operation.py +10 -0
  31. dbt/artifacts/resources/v1/unit_test_definition.py +77 -0
  32. dbt/artifacts/schemas/__init__.py +0 -0
  33. dbt/artifacts/schemas/base.py +191 -0
  34. dbt/artifacts/schemas/batch_results.py +24 -0
  35. dbt/artifacts/schemas/catalog/__init__.py +11 -0
  36. dbt/artifacts/schemas/catalog/v1/__init__.py +0 -0
  37. dbt/artifacts/schemas/catalog/v1/catalog.py +59 -0
  38. dbt/artifacts/schemas/freshness/__init__.py +1 -0
  39. dbt/artifacts/schemas/freshness/v3/__init__.py +0 -0
  40. dbt/artifacts/schemas/freshness/v3/freshness.py +158 -0
  41. dbt/artifacts/schemas/manifest/__init__.py +2 -0
  42. dbt/artifacts/schemas/manifest/v12/__init__.py +0 -0
  43. dbt/artifacts/schemas/manifest/v12/manifest.py +211 -0
  44. dbt/artifacts/schemas/results.py +147 -0
  45. dbt/artifacts/schemas/run/__init__.py +2 -0
  46. dbt/artifacts/schemas/run/v5/__init__.py +0 -0
  47. dbt/artifacts/schemas/run/v5/run.py +184 -0
  48. dbt/artifacts/schemas/upgrades/__init__.py +4 -0
  49. dbt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
  50. dbt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
  51. dbt/artifacts/utils/validation.py +153 -0
  52. dbt/cli/__init__.py +1 -0
  53. dbt/cli/context.py +17 -0
  54. dbt/cli/exceptions.py +57 -0
  55. dbt/cli/flags.py +560 -0
  56. dbt/cli/main.py +2039 -0
  57. dbt/cli/option_types.py +121 -0
  58. dbt/cli/options.py +80 -0
  59. dbt/cli/params.py +804 -0
  60. dbt/cli/requires.py +490 -0
  61. dbt/cli/resolvers.py +50 -0
  62. dbt/cli/types.py +40 -0
  63. dbt/clients/__init__.py +0 -0
  64. dbt/clients/checked_load.py +83 -0
  65. dbt/clients/git.py +164 -0
  66. dbt/clients/jinja.py +206 -0
  67. dbt/clients/jinja_static.py +245 -0
  68. dbt/clients/registry.py +192 -0
  69. dbt/clients/yaml_helper.py +68 -0
  70. dbt/compilation.py +876 -0
  71. dbt/compute/__init__.py +14 -0
  72. dbt/compute/engines/__init__.py +12 -0
  73. dbt/compute/engines/spark_engine.py +624 -0
  74. dbt/compute/federated_executor.py +837 -0
  75. dbt/compute/filter_pushdown.cpython-310-darwin.so +0 -0
  76. dbt/compute/filter_pushdown.py +273 -0
  77. dbt/compute/jar_provisioning.cpython-310-darwin.so +0 -0
  78. dbt/compute/jar_provisioning.py +255 -0
  79. dbt/compute/java_compat.cpython-310-darwin.so +0 -0
  80. dbt/compute/java_compat.py +689 -0
  81. dbt/compute/jdbc_utils.cpython-310-darwin.so +0 -0
  82. dbt/compute/jdbc_utils.py +678 -0
  83. dbt/compute/smart_selector.cpython-310-darwin.so +0 -0
  84. dbt/compute/smart_selector.py +311 -0
  85. dbt/compute/strategies/__init__.py +54 -0
  86. dbt/compute/strategies/base.py +165 -0
  87. dbt/compute/strategies/dataproc.py +207 -0
  88. dbt/compute/strategies/emr.py +203 -0
  89. dbt/compute/strategies/local.py +364 -0
  90. dbt/compute/strategies/standalone.py +262 -0
  91. dbt/config/__init__.py +4 -0
  92. dbt/config/catalogs.py +94 -0
  93. dbt/config/compute.cpython-310-darwin.so +0 -0
  94. dbt/config/compute.py +547 -0
  95. dbt/config/dvt_profile.cpython-310-darwin.so +0 -0
  96. dbt/config/dvt_profile.py +342 -0
  97. dbt/config/profile.py +422 -0
  98. dbt/config/project.py +873 -0
  99. dbt/config/project_utils.py +28 -0
  100. dbt/config/renderer.py +231 -0
  101. dbt/config/runtime.py +553 -0
  102. dbt/config/selectors.py +208 -0
  103. dbt/config/utils.py +77 -0
  104. dbt/constants.py +28 -0
  105. dbt/context/__init__.py +0 -0
  106. dbt/context/base.py +745 -0
  107. dbt/context/configured.py +135 -0
  108. dbt/context/context_config.py +382 -0
  109. dbt/context/docs.py +82 -0
  110. dbt/context/exceptions_jinja.py +178 -0
  111. dbt/context/macro_resolver.py +195 -0
  112. dbt/context/macros.py +171 -0
  113. dbt/context/manifest.py +72 -0
  114. dbt/context/providers.py +2249 -0
  115. dbt/context/query_header.py +13 -0
  116. dbt/context/secret.py +58 -0
  117. dbt/context/target.py +74 -0
  118. dbt/contracts/__init__.py +0 -0
  119. dbt/contracts/files.py +413 -0
  120. dbt/contracts/graph/__init__.py +0 -0
  121. dbt/contracts/graph/manifest.py +1904 -0
  122. dbt/contracts/graph/metrics.py +97 -0
  123. dbt/contracts/graph/model_config.py +70 -0
  124. dbt/contracts/graph/node_args.py +42 -0
  125. dbt/contracts/graph/nodes.py +1806 -0
  126. dbt/contracts/graph/semantic_manifest.py +232 -0
  127. dbt/contracts/graph/unparsed.py +811 -0
  128. dbt/contracts/project.py +417 -0
  129. dbt/contracts/results.py +53 -0
  130. dbt/contracts/selection.py +23 -0
  131. dbt/contracts/sql.py +85 -0
  132. dbt/contracts/state.py +68 -0
  133. dbt/contracts/util.py +46 -0
  134. dbt/deprecations.py +346 -0
  135. dbt/deps/__init__.py +0 -0
  136. dbt/deps/base.py +152 -0
  137. dbt/deps/git.py +195 -0
  138. dbt/deps/local.py +79 -0
  139. dbt/deps/registry.py +130 -0
  140. dbt/deps/resolver.py +149 -0
  141. dbt/deps/tarball.py +120 -0
  142. dbt/docs/source/_ext/dbt_click.py +119 -0
  143. dbt/docs/source/conf.py +32 -0
  144. dbt/env_vars.py +64 -0
  145. dbt/event_time/event_time.py +40 -0
  146. dbt/event_time/sample_window.py +60 -0
  147. dbt/events/__init__.py +15 -0
  148. dbt/events/base_types.py +36 -0
  149. dbt/events/core_types_pb2.py +2 -0
  150. dbt/events/logging.py +108 -0
  151. dbt/events/types.py +2516 -0
  152. dbt/exceptions.py +1486 -0
  153. dbt/flags.py +89 -0
  154. dbt/graph/__init__.py +11 -0
  155. dbt/graph/cli.py +247 -0
  156. dbt/graph/graph.py +172 -0
  157. dbt/graph/queue.py +214 -0
  158. dbt/graph/selector.py +374 -0
  159. dbt/graph/selector_methods.py +975 -0
  160. dbt/graph/selector_spec.py +222 -0
  161. dbt/graph/thread_pool.py +18 -0
  162. dbt/hooks.py +21 -0
  163. dbt/include/README.md +49 -0
  164. dbt/include/__init__.py +3 -0
  165. dbt/include/starter_project/.gitignore +4 -0
  166. dbt/include/starter_project/README.md +15 -0
  167. dbt/include/starter_project/__init__.py +3 -0
  168. dbt/include/starter_project/analyses/.gitkeep +0 -0
  169. dbt/include/starter_project/dbt_project.yml +36 -0
  170. dbt/include/starter_project/macros/.gitkeep +0 -0
  171. dbt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
  172. dbt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
  173. dbt/include/starter_project/models/example/schema.yml +21 -0
  174. dbt/include/starter_project/seeds/.gitkeep +0 -0
  175. dbt/include/starter_project/snapshots/.gitkeep +0 -0
  176. dbt/include/starter_project/tests/.gitkeep +0 -0
  177. dbt/internal_deprecations.py +26 -0
  178. dbt/jsonschemas/__init__.py +3 -0
  179. dbt/jsonschemas/jsonschemas.py +309 -0
  180. dbt/jsonschemas/project/0.0.110.json +4717 -0
  181. dbt/jsonschemas/project/0.0.85.json +2015 -0
  182. dbt/jsonschemas/resources/0.0.110.json +2636 -0
  183. dbt/jsonschemas/resources/0.0.85.json +2536 -0
  184. dbt/jsonschemas/resources/latest.json +6773 -0
  185. dbt/links.py +4 -0
  186. dbt/materializations/__init__.py +0 -0
  187. dbt/materializations/incremental/__init__.py +0 -0
  188. dbt/materializations/incremental/microbatch.py +236 -0
  189. dbt/mp_context.py +8 -0
  190. dbt/node_types.py +37 -0
  191. dbt/parser/__init__.py +23 -0
  192. dbt/parser/analysis.py +21 -0
  193. dbt/parser/base.py +548 -0
  194. dbt/parser/common.py +266 -0
  195. dbt/parser/docs.py +52 -0
  196. dbt/parser/fixtures.py +51 -0
  197. dbt/parser/functions.py +30 -0
  198. dbt/parser/generic_test.py +100 -0
  199. dbt/parser/generic_test_builders.py +333 -0
  200. dbt/parser/hooks.py +118 -0
  201. dbt/parser/macros.py +137 -0
  202. dbt/parser/manifest.py +2204 -0
  203. dbt/parser/models.py +573 -0
  204. dbt/parser/partial.py +1178 -0
  205. dbt/parser/read_files.py +445 -0
  206. dbt/parser/schema_generic_tests.py +422 -0
  207. dbt/parser/schema_renderer.py +111 -0
  208. dbt/parser/schema_yaml_readers.py +935 -0
  209. dbt/parser/schemas.py +1466 -0
  210. dbt/parser/search.py +149 -0
  211. dbt/parser/seeds.py +28 -0
  212. dbt/parser/singular_test.py +20 -0
  213. dbt/parser/snapshots.py +44 -0
  214. dbt/parser/sources.py +558 -0
  215. dbt/parser/sql.py +62 -0
  216. dbt/parser/unit_tests.py +621 -0
  217. dbt/plugins/__init__.py +20 -0
  218. dbt/plugins/contracts.py +9 -0
  219. dbt/plugins/exceptions.py +2 -0
  220. dbt/plugins/manager.py +163 -0
  221. dbt/plugins/manifest.py +21 -0
  222. dbt/profiler.py +20 -0
  223. dbt/py.typed +1 -0
  224. dbt/query_analyzer.cpython-310-darwin.so +0 -0
  225. dbt/query_analyzer.py +410 -0
  226. dbt/runners/__init__.py +2 -0
  227. dbt/runners/exposure_runner.py +7 -0
  228. dbt/runners/no_op_runner.py +45 -0
  229. dbt/runners/saved_query_runner.py +7 -0
  230. dbt/selected_resources.py +8 -0
  231. dbt/task/__init__.py +0 -0
  232. dbt/task/base.py +503 -0
  233. dbt/task/build.py +197 -0
  234. dbt/task/clean.py +56 -0
  235. dbt/task/clone.py +161 -0
  236. dbt/task/compile.py +150 -0
  237. dbt/task/compute.py +454 -0
  238. dbt/task/debug.py +505 -0
  239. dbt/task/deps.py +280 -0
  240. dbt/task/docs/__init__.py +3 -0
  241. dbt/task/docs/generate.py +660 -0
  242. dbt/task/docs/index.html +250 -0
  243. dbt/task/docs/serve.py +29 -0
  244. dbt/task/freshness.py +322 -0
  245. dbt/task/function.py +121 -0
  246. dbt/task/group_lookup.py +46 -0
  247. dbt/task/init.py +553 -0
  248. dbt/task/java.py +316 -0
  249. dbt/task/list.py +236 -0
  250. dbt/task/printer.py +175 -0
  251. dbt/task/retry.py +175 -0
  252. dbt/task/run.py +1306 -0
  253. dbt/task/run_operation.py +141 -0
  254. dbt/task/runnable.py +758 -0
  255. dbt/task/seed.py +103 -0
  256. dbt/task/show.py +149 -0
  257. dbt/task/snapshot.py +56 -0
  258. dbt/task/spark.py +414 -0
  259. dbt/task/sql.py +110 -0
  260. dbt/task/target_sync.py +759 -0
  261. dbt/task/test.py +464 -0
  262. dbt/tests/fixtures/__init__.py +1 -0
  263. dbt/tests/fixtures/project.py +620 -0
  264. dbt/tests/util.py +651 -0
  265. dbt/tracking.py +529 -0
  266. dbt/utils/__init__.py +3 -0
  267. dbt/utils/artifact_upload.py +151 -0
  268. dbt/utils/utils.py +408 -0
  269. dbt/version.py +268 -0
  270. dvt_cli/__init__.py +72 -0
  271. dvt_core-0.52.2.dist-info/METADATA +286 -0
  272. dvt_core-0.52.2.dist-info/RECORD +275 -0
  273. dvt_core-0.52.2.dist-info/WHEEL +5 -0
  274. dvt_core-0.52.2.dist-info/entry_points.txt +2 -0
  275. dvt_core-0.52.2.dist-info/top_level.txt +2 -0
@@ -0,0 +1,811 @@
1
+ import datetime
2
+ import re
3
+ from dataclasses import dataclass, field
4
+ from pathlib import Path
5
+ from typing import Any, Dict, List, Literal, Optional, Sequence, Union
6
+
7
+ # trigger the PathEncoder
8
+ import dbt_common.helper_types # noqa:F401
9
+ from dbt import deprecations
10
+ from dbt.artifacts.resources import (
11
+ ConstantPropertyInput,
12
+ Defaults,
13
+ DimensionValidityParams,
14
+ Docs,
15
+ ExposureType,
16
+ ExternalTable,
17
+ FreshnessThreshold,
18
+ FunctionArgument,
19
+ FunctionReturns,
20
+ MacroArgument,
21
+ MaturityType,
22
+ MeasureAggregationParameters,
23
+ NodeVersion,
24
+ Owner,
25
+ Quoting,
26
+ TimeSpine,
27
+ UnitTestInputFixture,
28
+ UnitTestNodeVersions,
29
+ UnitTestOutputFixture,
30
+ UnitTestOverrides,
31
+ list_str,
32
+ metas,
33
+ )
34
+ from dbt.exceptions import ParsingError
35
+ from dbt.node_types import NodeType
36
+ from dbt_common.contracts.config.base import CompareBehavior, MergeBehavior
37
+ from dbt_common.contracts.config.metadata import ShowBehavior
38
+ from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
39
+ from dbt_common.contracts.util import Mergeable
40
+ from dbt_common.dataclass_schema import (
41
+ ExtensibleDbtClassMixin,
42
+ StrEnum,
43
+ ValidationError,
44
+ dbtClassMixin,
45
+ )
46
+ from dbt_common.exceptions import DbtInternalError
47
+ from dbt_semantic_interfaces.type_enums import (
48
+ ConversionCalculationType,
49
+ PeriodAggregation,
50
+ )
51
+
52
+
53
+ @dataclass
54
+ class UnparsedBaseNode(dbtClassMixin):
55
+ package_name: str
56
+ path: str
57
+ original_file_path: str
58
+
59
+ @property
60
+ def file_id(self):
61
+ return f"{self.package_name}://{self.original_file_path}"
62
+
63
+
64
+ @dataclass
65
+ class HasCode(dbtClassMixin):
66
+ raw_code: str
67
+ language: str
68
+
69
+ @property
70
+ def empty(self):
71
+ return not self.raw_code.strip()
72
+
73
+
74
+ @dataclass
75
+ class UnparsedMacro(UnparsedBaseNode, HasCode):
76
+ resource_type: Literal[NodeType.Macro]
77
+
78
+
79
+ @dataclass
80
+ class UnparsedGenericTest(UnparsedBaseNode, HasCode):
81
+ resource_type: Literal[NodeType.Macro]
82
+
83
+
84
+ @dataclass
85
+ class UnparsedNode(UnparsedBaseNode, HasCode):
86
+ name: str
87
+ resource_type: NodeType
88
+
89
+ @property
90
+ def search_name(self):
91
+ return self.name
92
+
93
+
94
+ @dataclass
95
+ class UnparsedRunHook(UnparsedNode):
96
+ resource_type: Literal[NodeType.Operation]
97
+ index: Optional[int] = None
98
+
99
+
100
+ @dataclass
101
+ class HasColumnProps(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
102
+ name: str
103
+ description: str = ""
104
+ meta: Dict[str, Any] = field(default_factory=dict)
105
+ data_type: Optional[str] = None
106
+ constraints: List[Dict[str, Any]] = field(default_factory=list)
107
+ docs: Docs = field(default_factory=Docs)
108
+ config: Dict[str, Any] = field(default_factory=dict)
109
+ _extra: Dict[str, Any] = field(default_factory=dict)
110
+
111
+
112
+ TestDef = Union[Dict[str, Any], str]
113
+
114
+
115
+ @dataclass
116
+ class HasColumnAndTestProps(HasColumnProps):
117
+ data_tests: List[TestDef] = field(default_factory=list)
118
+ tests: List[TestDef] = field(
119
+ default_factory=list
120
+ ) # back compat for previous name of 'data_tests'
121
+
122
+
123
+ @dataclass
124
+ class HasColumnDocs(dbtClassMixin):
125
+ columns: Sequence[HasColumnProps] = field(default_factory=list)
126
+
127
+
128
+ @dataclass
129
+ class HasYamlMetadata(dbtClassMixin):
130
+ original_file_path: str
131
+ yaml_key: str
132
+ package_name: str
133
+
134
+ @property
135
+ def file_id(self):
136
+ return f"{self.package_name}://{self.original_file_path}"
137
+
138
+
139
+ @dataclass
140
+ class HasConfig:
141
+ config: Dict[str, Any] = field(default_factory=dict)
142
+
143
+
144
+ @dataclass
145
+ class UnparsedColumn(HasConfig, HasColumnAndTestProps):
146
+ quote: Optional[bool] = None
147
+ tags: List[str] = field(default_factory=list)
148
+ granularity: Optional[str] = None # str is really a TimeGranularity Enum
149
+
150
+
151
+ @dataclass
152
+ class HasColumnTests(dbtClassMixin):
153
+ columns: Sequence[UnparsedColumn] = field(default_factory=list)
154
+
155
+
156
+ @dataclass
157
+ class UnparsedVersion(dbtClassMixin):
158
+ v: NodeVersion
159
+ defined_in: Optional[str] = None
160
+ description: str = ""
161
+ access: Optional[str] = None
162
+ config: Dict[str, Any] = field(default_factory=dict)
163
+ constraints: List[Dict[str, Any]] = field(default_factory=list)
164
+ docs: Docs = field(default_factory=Docs)
165
+ data_tests: Optional[List[TestDef]] = None
166
+ tests: Optional[List[TestDef]] = None # back compat for previous name of 'data_tests'
167
+ columns: Sequence[Union[dbt_common.helper_types.IncludeExclude, UnparsedColumn]] = field(
168
+ default_factory=list
169
+ )
170
+ deprecation_date: Optional[datetime.datetime] = None
171
+
172
+ def __lt__(self, other):
173
+ try:
174
+ return float(self.v) < float(other.v)
175
+ except ValueError:
176
+ return str(self.v) < str(other.v)
177
+
178
+ @property
179
+ def include_exclude(self) -> dbt_common.helper_types.IncludeExclude:
180
+ return self._include_exclude
181
+
182
+ @property
183
+ def unparsed_columns(self) -> List:
184
+ return self._unparsed_columns
185
+
186
+ @property
187
+ def formatted_v(self) -> str:
188
+ return f"v{self.v}"
189
+
190
+ def __post_init__(self):
191
+ has_include_exclude = False
192
+ self._include_exclude = dbt_common.helper_types.IncludeExclude(include="*")
193
+ self._unparsed_columns = []
194
+ for column in self.columns:
195
+ if isinstance(column, dbt_common.helper_types.IncludeExclude):
196
+ if not has_include_exclude:
197
+ self._include_exclude = column
198
+ has_include_exclude = True
199
+ else:
200
+ raise ParsingError("version can have at most one include/exclude element")
201
+ else:
202
+ self._unparsed_columns.append(column)
203
+
204
+ self.deprecation_date = normalize_date(self.deprecation_date)
205
+
206
+
207
+ @dataclass
208
+ class UnparsedAnalysisUpdate(HasConfig, HasColumnDocs, HasColumnProps, HasYamlMetadata):
209
+ access: Optional[str] = None
210
+
211
+
212
+ @dataclass
213
+ class UnparsedSingularTestUpdate(HasConfig, HasColumnProps, HasYamlMetadata):
214
+ pass
215
+
216
+
217
+ @dataclass
218
+ class UnparsedNodeUpdate(HasConfig, HasColumnTests, HasColumnAndTestProps, HasYamlMetadata):
219
+ quote_columns: Optional[bool] = None
220
+ access: Optional[str] = None
221
+
222
+
223
+ @dataclass
224
+ class UnparsedModelUpdate(UnparsedNodeUpdate):
225
+ quote_columns: Optional[bool] = None
226
+ access: Optional[str] = None
227
+ latest_version: Optional[NodeVersion] = None
228
+ versions: Sequence[UnparsedVersion] = field(default_factory=list)
229
+ deprecation_date: Optional[datetime.datetime] = None
230
+ time_spine: Optional[TimeSpine] = None
231
+
232
+ def __post_init__(self) -> None:
233
+ if self.latest_version:
234
+ version_values = [version.v for version in self.versions]
235
+ if self.latest_version not in version_values:
236
+ raise ParsingError(
237
+ f"latest_version: {self.latest_version} is not one of model '{self.name}' versions: {version_values} "
238
+ )
239
+
240
+ seen_versions = set()
241
+ for version in self.versions:
242
+ if str(version.v) in seen_versions:
243
+ raise ParsingError(
244
+ f"Found duplicate version: '{version.v}' in versions list of model '{self.name}'"
245
+ )
246
+ seen_versions.add(str(version.v))
247
+
248
+ self._version_map = {version.v: version for version in self.versions}
249
+
250
+ self.deprecation_date = normalize_date(self.deprecation_date)
251
+
252
+ if self.time_spine:
253
+ columns = (
254
+ self.get_columns_for_version(self.latest_version)
255
+ if self.latest_version
256
+ else self.columns
257
+ )
258
+ column_names_to_columns = {column.name: column for column in columns}
259
+ if self.time_spine.standard_granularity_column not in column_names_to_columns:
260
+ raise ParsingError(
261
+ f"Time spine standard granularity column must be defined on the model. Got invalid "
262
+ f"column name '{self.time_spine.standard_granularity_column}' for model '{self.name}'. Valid names"
263
+ f"{' for latest version' if self.latest_version else ''}: {list(column_names_to_columns.keys())}."
264
+ )
265
+ standard_column = column_names_to_columns[self.time_spine.standard_granularity_column]
266
+ if not standard_column.granularity:
267
+ raise ParsingError(
268
+ f"Time spine standard granularity column must have a granularity defined. Please add one for "
269
+ f"column '{self.time_spine.standard_granularity_column}' in model '{self.name}'."
270
+ )
271
+ custom_granularity_columns_not_found = []
272
+ for custom_granularity in self.time_spine.custom_granularities:
273
+ column_name = (
274
+ custom_granularity.column_name
275
+ if custom_granularity.column_name
276
+ else custom_granularity.name
277
+ )
278
+ if column_name not in column_names_to_columns:
279
+ custom_granularity_columns_not_found.append(column_name)
280
+ if custom_granularity_columns_not_found:
281
+ raise ParsingError(
282
+ "Time spine custom granularity columns do not exist in the model. "
283
+ f"Columns not found: {custom_granularity_columns_not_found}; "
284
+ f"Available columns: {list(column_names_to_columns.keys())}"
285
+ )
286
+
287
+ def get_columns_for_version(self, version: NodeVersion) -> List[UnparsedColumn]:
288
+ if version not in self._version_map:
289
+ raise DbtInternalError(
290
+ f"get_columns_for_version called for version '{version}' not in version map"
291
+ )
292
+
293
+ version_columns = []
294
+ unparsed_version = self._version_map[version]
295
+ for base_column in self.columns:
296
+ if unparsed_version.include_exclude.includes(base_column.name):
297
+ version_columns.append(base_column)
298
+
299
+ for column in unparsed_version.unparsed_columns:
300
+ version_columns.append(column)
301
+
302
+ return version_columns
303
+
304
+ def get_tests_for_version(self, version: NodeVersion) -> List[TestDef]:
305
+ if version not in self._version_map:
306
+ raise DbtInternalError(
307
+ f"get_tests_for_version called for version '{version}' not in version map"
308
+ )
309
+ unparsed_version = self._version_map[version]
310
+ return (
311
+ unparsed_version.data_tests
312
+ if unparsed_version.data_tests is not None
313
+ else self.data_tests
314
+ )
315
+
316
+
317
+ @dataclass
318
+ class UnparsedMacroUpdate(HasConfig, HasColumnProps, HasYamlMetadata):
319
+ arguments: List[MacroArgument] = field(default_factory=list)
320
+
321
+
322
+ @dataclass
323
+ class UnparsedSourceTableDefinition(HasColumnTests, HasColumnAndTestProps):
324
+ config: Dict[str, Any] = field(default_factory=dict)
325
+ loaded_at_field: Optional[str] = None
326
+ loaded_at_field_present: Optional[bool] = None
327
+ loaded_at_query: Optional[str] = None
328
+ identifier: Optional[str] = None
329
+ quoting: Quoting = field(default_factory=Quoting)
330
+ freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold)
331
+ external: Optional[ExternalTable] = None
332
+ tags: List[str] = field(default_factory=list)
333
+
334
+ def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
335
+ dct = super().__post_serialize__(dct, context)
336
+ if "freshness" not in dct and self.freshness is None:
337
+ dct["freshness"] = None
338
+ return dct
339
+
340
+
341
+ @dataclass
342
+ class UnparsedSourceDefinition(dbtClassMixin):
343
+ name: str
344
+ description: str = ""
345
+ meta: Dict[str, Any] = field(default_factory=dict)
346
+ database: Optional[str] = None
347
+ schema: Optional[str] = None
348
+ loader: str = ""
349
+ quoting: Quoting = field(default_factory=Quoting)
350
+ freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold)
351
+ loaded_at_field: Optional[str] = None
352
+ loaded_at_field_present: Optional[bool] = None
353
+ loaded_at_query: Optional[str] = None
354
+ tables: List[UnparsedSourceTableDefinition] = field(default_factory=list)
355
+ tags: List[str] = field(default_factory=list)
356
+ config: Dict[str, Any] = field(default_factory=dict)
357
+ unrendered_database: Optional[str] = None
358
+ unrendered_schema: Optional[str] = None
359
+ # DVT: Connection name from profiles.yml to use for this source
360
+ connection: Optional[str] = None
361
+
362
+ @classmethod
363
+ def validate(cls, data):
364
+ super(UnparsedSourceDefinition, cls).validate(data)
365
+
366
+ if data.get("loaded_at_field", None) == "":
367
+ raise ValidationError("loaded_at_field cannot be an empty string.")
368
+ if "tables" in data:
369
+ for table in data["tables"]:
370
+ if table.get("loaded_at_field", None) == "":
371
+ raise ValidationError("loaded_at_field cannot be an empty string.")
372
+
373
+ @property
374
+ def yaml_key(self) -> "str":
375
+ return "sources"
376
+
377
+ def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
378
+ dct = super().__post_serialize__(dct, context)
379
+ if "freshness" not in dct and self.freshness is None:
380
+ dct["freshness"] = None
381
+ return dct
382
+
383
+
384
+ @dataclass
385
+ class SourceTablePatch(dbtClassMixin):
386
+ name: str
387
+ description: Optional[str] = None
388
+ meta: Optional[Dict[str, Any]] = None
389
+ data_type: Optional[str] = None
390
+ docs: Optional[Docs] = None
391
+ loaded_at_field: Optional[str] = None
392
+ loaded_at_field_present: Optional[bool] = None
393
+ loaded_at_query: Optional[str] = None
394
+ identifier: Optional[str] = None
395
+ quoting: Quoting = field(default_factory=Quoting)
396
+ freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold)
397
+ external: Optional[ExternalTable] = None
398
+ tags: Optional[List[str]] = None
399
+ data_tests: Optional[List[TestDef]] = None
400
+ tests: Optional[List[TestDef]] = None # back compat for previous name of 'data_tests'
401
+ columns: Optional[Sequence[UnparsedColumn]] = None
402
+
403
+ def to_patch_dict(self) -> Dict[str, Any]:
404
+ dct = self.to_dict(omit_none=True)
405
+ remove_keys = "name"
406
+ for key in remove_keys:
407
+ if key in dct:
408
+ del dct[key]
409
+
410
+ if self.freshness is None:
411
+ dct["freshness"] = None
412
+
413
+ return dct
414
+
415
+
416
+ @dataclass
417
+ class SourcePatch(dbtClassMixin):
418
+ name: str = field(
419
+ metadata=dict(description="The name of the source to override"),
420
+ )
421
+ overrides: str = field(
422
+ metadata=dict(description="The package of the source to override"),
423
+ )
424
+ path: Path = field(
425
+ metadata=dict(description="The path to the patch-defining yml file"),
426
+ )
427
+ config: Dict[str, Any] = field(default_factory=dict)
428
+ description: Optional[str] = None
429
+ meta: Optional[Dict[str, Any]] = None
430
+ database: Optional[str] = None
431
+ schema: Optional[str] = None
432
+ loader: Optional[str] = None
433
+ quoting: Optional[Quoting] = None
434
+ freshness: Optional[Optional[FreshnessThreshold]] = field(default_factory=FreshnessThreshold)
435
+ loaded_at_field: Optional[str] = None
436
+ loaded_at_field_present: Optional[bool] = None
437
+ loaded_at_query: Optional[str] = None
438
+ tables: Optional[List[SourceTablePatch]] = None
439
+ tags: Optional[List[str]] = None
440
+
441
+ def to_patch_dict(self) -> Dict[str, Any]:
442
+ dct = self.to_dict(omit_none=True)
443
+ remove_keys = ("name", "overrides", "tables", "path")
444
+ for key in remove_keys:
445
+ if key in dct:
446
+ del dct[key]
447
+
448
+ if self.freshness is None:
449
+ dct["freshness"] = None
450
+
451
+ return dct
452
+
453
+ def get_table_named(self, name: str) -> Optional[SourceTablePatch]:
454
+ if self.tables is not None:
455
+ for table in self.tables:
456
+ if table.name == name:
457
+ return table
458
+ return None
459
+
460
+
461
+ @dataclass
462
+ class UnparsedDocumentation(dbtClassMixin):
463
+ package_name: str
464
+ path: str
465
+ original_file_path: str
466
+
467
+ @property
468
+ def file_id(self):
469
+ return f"{self.package_name}://{self.original_file_path}"
470
+
471
+ @property
472
+ def resource_type(self):
473
+ return NodeType.Documentation
474
+
475
+
476
+ @dataclass
477
+ class UnparsedDocumentationFile(UnparsedDocumentation):
478
+ file_contents: str
479
+
480
+
481
+ # can't use total_ordering decorator here, as str provides an ordering already
482
+ # and it's not the one we want.
483
+ class Maturity(StrEnum):
484
+ low = "low"
485
+ medium = "medium"
486
+ high = "high"
487
+
488
+ def __lt__(self, other):
489
+ if not isinstance(other, Maturity):
490
+ return NotImplemented
491
+ order = (Maturity.low, Maturity.medium, Maturity.high)
492
+ return order.index(self) < order.index(other)
493
+
494
+ def __gt__(self, other):
495
+ if not isinstance(other, Maturity):
496
+ return NotImplemented
497
+ return self != other and not (self < other)
498
+
499
+ def __ge__(self, other):
500
+ if not isinstance(other, Maturity):
501
+ return NotImplemented
502
+ return self == other or not (self < other)
503
+
504
+ def __le__(self, other):
505
+ if not isinstance(other, Maturity):
506
+ return NotImplemented
507
+ return self == other or self < other
508
+
509
+
510
+ @dataclass
511
+ class UnparsedExposure(dbtClassMixin):
512
+ name: str
513
+ type: ExposureType
514
+ owner: Owner
515
+ description: str = ""
516
+ label: Optional[str] = None
517
+ maturity: Optional[MaturityType] = None
518
+ meta: Dict[str, Any] = field(default_factory=dict)
519
+ tags: List[str] = field(default_factory=list)
520
+ url: Optional[str] = None
521
+ depends_on: List[str] = field(default_factory=list)
522
+ config: Dict[str, Any] = field(default_factory=dict)
523
+
524
+ @classmethod
525
+ def validate(cls, data):
526
+ super(UnparsedExposure, cls).validate(data)
527
+ if "name" in data:
528
+ # name can only contain alphanumeric chars and underscores
529
+ if not (re.match(r"[\w-]+$", data["name"])):
530
+ deprecations.warn("exposure-name", exposure=data["name"])
531
+
532
+ if data["owner"].get("name") is None and data["owner"].get("email") is None:
533
+ raise ValidationError("Exposure owner must have at least one of 'name' or 'email'.")
534
+
535
+
536
+ @dataclass
537
+ class MetricFilter(dbtClassMixin):
538
+ field: str
539
+ operator: str
540
+ # TODO : Can we make this Any?
541
+ value: str
542
+
543
+
544
+ class MetricTimePeriod(StrEnum):
545
+ day = "day"
546
+ week = "week"
547
+ month = "month"
548
+ year = "year"
549
+
550
+ def plural(self) -> str:
551
+ return str(self) + "s"
552
+
553
+
554
+ @dataclass
555
+ class MetricTime(dbtClassMixin, Mergeable):
556
+ count: Optional[int] = None
557
+ period: Optional[MetricTimePeriod] = None
558
+
559
+ def __bool__(self):
560
+ return self.count is not None and self.period is not None
561
+
562
+
563
+ @dataclass
564
+ class UnparsedMetricInputMeasure(dbtClassMixin):
565
+ name: str
566
+ # Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
567
+ filter: Union[str, List[str], None] = None
568
+ alias: Optional[str] = None
569
+ join_to_timespine: bool = False
570
+ fill_nulls_with: Optional[int] = None
571
+
572
+
573
+ @dataclass
574
+ class UnparsedMetricInput(dbtClassMixin):
575
+ name: str
576
+ # Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
577
+ filter: Union[str, List[str], None] = None
578
+ alias: Optional[str] = None
579
+ offset_window: Optional[str] = None
580
+ offset_to_grain: Optional[str] = None
581
+
582
+
583
+ @dataclass
584
+ class UnparsedConversionTypeParams(dbtClassMixin):
585
+ base_measure: Union[UnparsedMetricInputMeasure, str]
586
+ conversion_measure: Union[UnparsedMetricInputMeasure, str]
587
+ entity: str
588
+ calculation: str = (
589
+ ConversionCalculationType.CONVERSION_RATE.value
590
+ ) # ConversionCalculationType Enum
591
+ window: Optional[str] = None
592
+ constant_properties: Optional[List[ConstantPropertyInput]] = None
593
+
594
+
595
+ @dataclass
596
+ class UnparsedCumulativeTypeParams(dbtClassMixin):
597
+ window: Optional[str] = None
598
+ grain_to_date: Optional[str] = None
599
+ period_agg: str = PeriodAggregation.FIRST.value
600
+
601
+
602
+ @dataclass
603
+ class UnparsedMetricTypeParams(dbtClassMixin):
604
+ measure: Optional[Union[UnparsedMetricInputMeasure, str]] = None
605
+ numerator: Optional[Union[UnparsedMetricInput, str]] = None
606
+ denominator: Optional[Union[UnparsedMetricInput, str]] = None
607
+ expr: Optional[Union[str, bool]] = None
608
+ window: Optional[str] = None
609
+ grain_to_date: Optional[str] = None # str is really a TimeGranularity Enum
610
+ metrics: Optional[List[Union[UnparsedMetricInput, str]]] = None
611
+ conversion_type_params: Optional[UnparsedConversionTypeParams] = None
612
+ cumulative_type_params: Optional[UnparsedCumulativeTypeParams] = None
613
+
614
+
615
+ @dataclass
616
+ class UnparsedMetric(dbtClassMixin):
617
+ name: str
618
+ label: str
619
+ type: str
620
+ type_params: UnparsedMetricTypeParams
621
+ description: str = ""
622
+ # Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
623
+ filter: Union[str, List[str], None] = None
624
+ time_granularity: Optional[str] = None
625
+ # metadata: Optional[Unparsedetadata] = None # TODO
626
+ meta: Dict[str, Any] = field(default_factory=dict)
627
+ tags: List[str] = field(default_factory=list)
628
+ config: Dict[str, Any] = field(default_factory=dict)
629
+
630
+ @classmethod
631
+ def validate(cls, data):
632
+ super(UnparsedMetric, cls).validate(data)
633
+ if "name" in data:
634
+ errors = []
635
+ if " " in data["name"]:
636
+ errors.append("cannot contain spaces")
637
+ # This handles failing queries due to too long metric names.
638
+ # It only occurs in BigQuery and Snowflake (Postgres/Redshift truncate)
639
+ if len(data["name"]) > 250:
640
+ errors.append("cannot contain more than 250 characters")
641
+ if not (re.match(r"^[A-Za-z]", data["name"])):
642
+ errors.append("must begin with a letter")
643
+ if not (re.match(r"[\w-]+$", data["name"])):
644
+ errors.append("must contain only letters, numbers and underscores")
645
+
646
+ if errors:
647
+ raise ParsingError(
648
+ f"The metric name '{data['name']}' is invalid. It {', '.join(e for e in errors)}"
649
+ )
650
+
651
+
652
+ @dataclass
653
+ class UnparsedGroup(dbtClassMixin):
654
+ name: str
655
+ owner: Owner
656
+ description: Optional[str] = None
657
+ config: Dict[str, Any] = field(default_factory=dict)
658
+
659
+ @classmethod
660
+ def validate(cls, data):
661
+ super(UnparsedGroup, cls).validate(data)
662
+ if data["owner"].get("name") is None and data["owner"].get("email") is None:
663
+ raise ValidationError("Group owner must have at least one of 'name' or 'email'.")
664
+
665
+
666
+ @dataclass
667
+ class UnparsedFunctionReturns(dbtClassMixin):
668
+ returns: FunctionReturns
669
+
670
+
671
+ @dataclass
672
+ class UnparsedFunctionUpdate(HasConfig, HasColumnProps, HasYamlMetadata, UnparsedFunctionReturns):
673
+ access: Optional[str] = None
674
+ arguments: List[FunctionArgument] = field(default_factory=list)
675
+
676
+
677
+ #
678
+ # semantic interfaces unparsed objects
679
+ #
680
+
681
+
682
+ @dataclass
683
+ class UnparsedEntity(dbtClassMixin):
684
+ name: str
685
+ type: str # EntityType enum
686
+ description: Optional[str] = None
687
+ label: Optional[str] = None
688
+ role: Optional[str] = None
689
+ expr: Optional[str] = None
690
+ config: Dict[str, Any] = field(default_factory=dict)
691
+
692
+
693
+ @dataclass
694
+ class UnparsedNonAdditiveDimension(dbtClassMixin):
695
+ name: str
696
+ window_choice: str # AggregationType enum
697
+ window_groupings: List[str] = field(default_factory=list)
698
+
699
+
700
+ @dataclass
701
+ class UnparsedMeasure(dbtClassMixin):
702
+ name: str
703
+ agg: str # actually an enum
704
+ description: Optional[str] = None
705
+ label: Optional[str] = None
706
+ expr: Optional[Union[str, bool, int]] = None
707
+ agg_params: Optional[MeasureAggregationParameters] = None
708
+ non_additive_dimension: Optional[UnparsedNonAdditiveDimension] = None
709
+ agg_time_dimension: Optional[str] = None
710
+ create_metric: bool = False
711
+ config: Dict[str, Any] = field(default_factory=dict)
712
+
713
+
714
+ @dataclass
715
+ class UnparsedDimensionTypeParams(dbtClassMixin):
716
+ time_granularity: str # TimeGranularity enum
717
+ validity_params: Optional[DimensionValidityParams] = None
718
+
719
+
720
+ @dataclass
721
+ class UnparsedDimension(dbtClassMixin):
722
+ name: str
723
+ type: str # actually an enum
724
+ description: Optional[str] = None
725
+ label: Optional[str] = None
726
+ is_partition: bool = False
727
+ type_params: Optional[UnparsedDimensionTypeParams] = None
728
+ expr: Optional[str] = None
729
+ config: Dict[str, Any] = field(default_factory=dict)
730
+
731
+
732
+ @dataclass
733
+ class UnparsedSemanticModel(dbtClassMixin):
734
+ name: str
735
+ model: str # looks like "ref(...)"
736
+ config: Dict[str, Any] = field(default_factory=dict)
737
+ description: Optional[str] = None
738
+ label: Optional[str] = None
739
+ defaults: Optional[Defaults] = None
740
+ entities: List[UnparsedEntity] = field(default_factory=list)
741
+ measures: List[UnparsedMeasure] = field(default_factory=list)
742
+ dimensions: List[UnparsedDimension] = field(default_factory=list)
743
+ primary_entity: Optional[str] = None
744
+
745
+
746
+ @dataclass
747
+ class UnparsedQueryParams(dbtClassMixin):
748
+ metrics: List[str] = field(default_factory=list)
749
+ group_by: List[str] = field(default_factory=list)
750
+ # Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
751
+ where: Union[str, List[str], None] = None
752
+ order_by: List[str] = field(default_factory=list)
753
+ limit: Optional[int] = None
754
+
755
+
756
+ @dataclass
757
+ class UnparsedExport(dbtClassMixin):
758
+ """Configuration for writing query results to a table."""
759
+
760
+ name: str
761
+ config: Dict[str, Any] = field(default_factory=dict)
762
+
763
+
764
+ @dataclass
765
+ class UnparsedSavedQuery(dbtClassMixin):
766
+ name: str
767
+ query_params: UnparsedQueryParams
768
+ description: Optional[str] = None
769
+ label: Optional[str] = None
770
+ exports: List[UnparsedExport] = field(default_factory=list)
771
+ config: Dict[str, Any] = field(default_factory=dict)
772
+ # Note: the order of the types is critical; it's the order that they will be checked against inputs.
773
+ # if reversed, a single-string tag like `tag: "good"` becomes ['g','o','o','d']
774
+ tags: Union[str, List[str]] = field(
775
+ default_factory=list_str,
776
+ metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
777
+ )
778
+
779
+
780
+ def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
781
+ """Convert date to datetime (at midnight), and add local time zone if naive"""
782
+ if d is None:
783
+ return None
784
+
785
+ # convert date to datetime
786
+ dt = d if isinstance(d, datetime.datetime) else datetime.datetime(d.year, d.month, d.day)
787
+
788
+ if not dt.tzinfo:
789
+ # date is naive, re-interpret as system time zone
790
+ dt = dt.astimezone()
791
+
792
+ return dt
793
+
794
+
795
+ @dataclass
796
+ class UnparsedUnitTest(dbtClassMixin):
797
+ name: str
798
+ model: str # name of the model being unit tested
799
+ given: Sequence[UnitTestInputFixture]
800
+ expect: UnitTestOutputFixture
801
+ description: str = ""
802
+ overrides: Optional[UnitTestOverrides] = None
803
+ config: Dict[str, Any] = field(default_factory=dict)
804
+ versions: Optional[UnitTestNodeVersions] = None
805
+
806
+ @classmethod
807
+ def validate(cls, data):
808
+ super(UnparsedUnitTest, cls).validate(data)
809
+ if data.get("versions", None):
810
+ if data["versions"].get("include") and data["versions"].get("exclude"):
811
+ raise ValidationError("Unit tests can not both include and exclude versions.")