dvt-core 1.11.0b4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dvt-core might be problematic. Click here for more details.
- dvt/__init__.py +7 -0
- dvt/_pydantic_shim.py +26 -0
- dvt/adapters/__init__.py +16 -0
- dvt/adapters/multi_adapter_manager.py +268 -0
- dvt/artifacts/__init__.py +0 -0
- dvt/artifacts/exceptions/__init__.py +1 -0
- dvt/artifacts/exceptions/schemas.py +31 -0
- dvt/artifacts/resources/__init__.py +116 -0
- dvt/artifacts/resources/base.py +68 -0
- dvt/artifacts/resources/types.py +93 -0
- dvt/artifacts/resources/v1/analysis.py +10 -0
- dvt/artifacts/resources/v1/catalog.py +23 -0
- dvt/artifacts/resources/v1/components.py +275 -0
- dvt/artifacts/resources/v1/config.py +282 -0
- dvt/artifacts/resources/v1/documentation.py +11 -0
- dvt/artifacts/resources/v1/exposure.py +52 -0
- dvt/artifacts/resources/v1/function.py +53 -0
- dvt/artifacts/resources/v1/generic_test.py +32 -0
- dvt/artifacts/resources/v1/group.py +22 -0
- dvt/artifacts/resources/v1/hook.py +11 -0
- dvt/artifacts/resources/v1/macro.py +30 -0
- dvt/artifacts/resources/v1/metric.py +173 -0
- dvt/artifacts/resources/v1/model.py +146 -0
- dvt/artifacts/resources/v1/owner.py +10 -0
- dvt/artifacts/resources/v1/saved_query.py +112 -0
- dvt/artifacts/resources/v1/seed.py +42 -0
- dvt/artifacts/resources/v1/semantic_layer_components.py +72 -0
- dvt/artifacts/resources/v1/semantic_model.py +315 -0
- dvt/artifacts/resources/v1/singular_test.py +14 -0
- dvt/artifacts/resources/v1/snapshot.py +92 -0
- dvt/artifacts/resources/v1/source_definition.py +85 -0
- dvt/artifacts/resources/v1/sql_operation.py +10 -0
- dvt/artifacts/resources/v1/unit_test_definition.py +78 -0
- dvt/artifacts/schemas/__init__.py +0 -0
- dvt/artifacts/schemas/base.py +191 -0
- dvt/artifacts/schemas/batch_results.py +24 -0
- dvt/artifacts/schemas/catalog/__init__.py +12 -0
- dvt/artifacts/schemas/catalog/v1/__init__.py +0 -0
- dvt/artifacts/schemas/catalog/v1/catalog.py +60 -0
- dvt/artifacts/schemas/freshness/__init__.py +1 -0
- dvt/artifacts/schemas/freshness/v3/__init__.py +0 -0
- dvt/artifacts/schemas/freshness/v3/freshness.py +159 -0
- dvt/artifacts/schemas/manifest/__init__.py +2 -0
- dvt/artifacts/schemas/manifest/v12/__init__.py +0 -0
- dvt/artifacts/schemas/manifest/v12/manifest.py +212 -0
- dvt/artifacts/schemas/results.py +148 -0
- dvt/artifacts/schemas/run/__init__.py +2 -0
- dvt/artifacts/schemas/run/v5/__init__.py +0 -0
- dvt/artifacts/schemas/run/v5/run.py +184 -0
- dvt/artifacts/schemas/upgrades/__init__.py +4 -0
- dvt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
- dvt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
- dvt/artifacts/utils/validation.py +153 -0
- dvt/cli/__init__.py +1 -0
- dvt/cli/context.py +16 -0
- dvt/cli/exceptions.py +56 -0
- dvt/cli/flags.py +558 -0
- dvt/cli/main.py +971 -0
- dvt/cli/option_types.py +121 -0
- dvt/cli/options.py +79 -0
- dvt/cli/params.py +803 -0
- dvt/cli/requires.py +478 -0
- dvt/cli/resolvers.py +32 -0
- dvt/cli/types.py +40 -0
- dvt/clients/__init__.py +0 -0
- dvt/clients/checked_load.py +82 -0
- dvt/clients/git.py +164 -0
- dvt/clients/jinja.py +206 -0
- dvt/clients/jinja_static.py +245 -0
- dvt/clients/registry.py +192 -0
- dvt/clients/yaml_helper.py +68 -0
- dvt/compilation.py +833 -0
- dvt/compute/__init__.py +26 -0
- dvt/compute/base.py +288 -0
- dvt/compute/engines/__init__.py +13 -0
- dvt/compute/engines/duckdb_engine.py +368 -0
- dvt/compute/engines/spark_engine.py +273 -0
- dvt/compute/query_analyzer.py +212 -0
- dvt/compute/router.py +483 -0
- dvt/config/__init__.py +4 -0
- dvt/config/catalogs.py +95 -0
- dvt/config/compute_config.py +406 -0
- dvt/config/profile.py +411 -0
- dvt/config/profiles_v2.py +464 -0
- dvt/config/project.py +893 -0
- dvt/config/renderer.py +232 -0
- dvt/config/runtime.py +491 -0
- dvt/config/selectors.py +209 -0
- dvt/config/utils.py +78 -0
- dvt/connectors/.gitignore +6 -0
- dvt/connectors/README.md +306 -0
- dvt/connectors/catalog.yml +217 -0
- dvt/connectors/download_connectors.py +300 -0
- dvt/constants.py +29 -0
- dvt/context/__init__.py +0 -0
- dvt/context/base.py +746 -0
- dvt/context/configured.py +136 -0
- dvt/context/context_config.py +350 -0
- dvt/context/docs.py +82 -0
- dvt/context/exceptions_jinja.py +179 -0
- dvt/context/macro_resolver.py +195 -0
- dvt/context/macros.py +171 -0
- dvt/context/manifest.py +73 -0
- dvt/context/providers.py +2198 -0
- dvt/context/query_header.py +14 -0
- dvt/context/secret.py +59 -0
- dvt/context/target.py +74 -0
- dvt/contracts/__init__.py +0 -0
- dvt/contracts/files.py +413 -0
- dvt/contracts/graph/__init__.py +0 -0
- dvt/contracts/graph/manifest.py +1904 -0
- dvt/contracts/graph/metrics.py +98 -0
- dvt/contracts/graph/model_config.py +71 -0
- dvt/contracts/graph/node_args.py +42 -0
- dvt/contracts/graph/nodes.py +1806 -0
- dvt/contracts/graph/semantic_manifest.py +233 -0
- dvt/contracts/graph/unparsed.py +812 -0
- dvt/contracts/project.py +417 -0
- dvt/contracts/results.py +53 -0
- dvt/contracts/selection.py +23 -0
- dvt/contracts/sql.py +86 -0
- dvt/contracts/state.py +69 -0
- dvt/contracts/util.py +46 -0
- dvt/deprecations.py +347 -0
- dvt/deps/__init__.py +0 -0
- dvt/deps/base.py +153 -0
- dvt/deps/git.py +196 -0
- dvt/deps/local.py +80 -0
- dvt/deps/registry.py +131 -0
- dvt/deps/resolver.py +149 -0
- dvt/deps/tarball.py +121 -0
- dvt/docs/source/_ext/dbt_click.py +118 -0
- dvt/docs/source/conf.py +32 -0
- dvt/env_vars.py +64 -0
- dvt/event_time/event_time.py +40 -0
- dvt/event_time/sample_window.py +60 -0
- dvt/events/__init__.py +16 -0
- dvt/events/base_types.py +37 -0
- dvt/events/core_types_pb2.py +2 -0
- dvt/events/logging.py +109 -0
- dvt/events/types.py +2534 -0
- dvt/exceptions.py +1487 -0
- dvt/flags.py +89 -0
- dvt/graph/__init__.py +11 -0
- dvt/graph/cli.py +248 -0
- dvt/graph/graph.py +172 -0
- dvt/graph/queue.py +213 -0
- dvt/graph/selector.py +375 -0
- dvt/graph/selector_methods.py +976 -0
- dvt/graph/selector_spec.py +223 -0
- dvt/graph/thread_pool.py +18 -0
- dvt/hooks.py +21 -0
- dvt/include/README.md +49 -0
- dvt/include/__init__.py +3 -0
- dvt/include/global_project.py +4 -0
- dvt/include/starter_project/.gitignore +4 -0
- dvt/include/starter_project/README.md +15 -0
- dvt/include/starter_project/__init__.py +3 -0
- dvt/include/starter_project/analyses/.gitkeep +0 -0
- dvt/include/starter_project/dvt_project.yml +36 -0
- dvt/include/starter_project/macros/.gitkeep +0 -0
- dvt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
- dvt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
- dvt/include/starter_project/models/example/schema.yml +21 -0
- dvt/include/starter_project/seeds/.gitkeep +0 -0
- dvt/include/starter_project/snapshots/.gitkeep +0 -0
- dvt/include/starter_project/tests/.gitkeep +0 -0
- dvt/internal_deprecations.py +27 -0
- dvt/jsonschemas/__init__.py +3 -0
- dvt/jsonschemas/jsonschemas.py +309 -0
- dvt/jsonschemas/project/0.0.110.json +4717 -0
- dvt/jsonschemas/project/0.0.85.json +2015 -0
- dvt/jsonschemas/resources/0.0.110.json +2636 -0
- dvt/jsonschemas/resources/0.0.85.json +2536 -0
- dvt/jsonschemas/resources/latest.json +6773 -0
- dvt/links.py +4 -0
- dvt/materializations/__init__.py +0 -0
- dvt/materializations/incremental/__init__.py +0 -0
- dvt/materializations/incremental/microbatch.py +235 -0
- dvt/mp_context.py +8 -0
- dvt/node_types.py +37 -0
- dvt/parser/__init__.py +23 -0
- dvt/parser/analysis.py +21 -0
- dvt/parser/base.py +549 -0
- dvt/parser/common.py +267 -0
- dvt/parser/docs.py +52 -0
- dvt/parser/fixtures.py +51 -0
- dvt/parser/functions.py +30 -0
- dvt/parser/generic_test.py +100 -0
- dvt/parser/generic_test_builders.py +334 -0
- dvt/parser/hooks.py +119 -0
- dvt/parser/macros.py +137 -0
- dvt/parser/manifest.py +2204 -0
- dvt/parser/models.py +574 -0
- dvt/parser/partial.py +1179 -0
- dvt/parser/read_files.py +445 -0
- dvt/parser/schema_generic_tests.py +423 -0
- dvt/parser/schema_renderer.py +111 -0
- dvt/parser/schema_yaml_readers.py +936 -0
- dvt/parser/schemas.py +1467 -0
- dvt/parser/search.py +149 -0
- dvt/parser/seeds.py +28 -0
- dvt/parser/singular_test.py +20 -0
- dvt/parser/snapshots.py +44 -0
- dvt/parser/sources.py +557 -0
- dvt/parser/sql.py +63 -0
- dvt/parser/unit_tests.py +622 -0
- dvt/plugins/__init__.py +20 -0
- dvt/plugins/contracts.py +10 -0
- dvt/plugins/exceptions.py +2 -0
- dvt/plugins/manager.py +164 -0
- dvt/plugins/manifest.py +21 -0
- dvt/profiler.py +20 -0
- dvt/py.typed +1 -0
- dvt/runners/__init__.py +2 -0
- dvt/runners/exposure_runner.py +7 -0
- dvt/runners/no_op_runner.py +46 -0
- dvt/runners/saved_query_runner.py +7 -0
- dvt/selected_resources.py +8 -0
- dvt/task/__init__.py +0 -0
- dvt/task/base.py +504 -0
- dvt/task/build.py +197 -0
- dvt/task/clean.py +57 -0
- dvt/task/clone.py +162 -0
- dvt/task/compile.py +151 -0
- dvt/task/compute.py +366 -0
- dvt/task/debug.py +650 -0
- dvt/task/deps.py +280 -0
- dvt/task/docs/__init__.py +3 -0
- dvt/task/docs/generate.py +408 -0
- dvt/task/docs/index.html +250 -0
- dvt/task/docs/serve.py +28 -0
- dvt/task/freshness.py +323 -0
- dvt/task/function.py +122 -0
- dvt/task/group_lookup.py +46 -0
- dvt/task/init.py +374 -0
- dvt/task/list.py +237 -0
- dvt/task/printer.py +176 -0
- dvt/task/profiles.py +256 -0
- dvt/task/retry.py +175 -0
- dvt/task/run.py +1146 -0
- dvt/task/run_operation.py +142 -0
- dvt/task/runnable.py +802 -0
- dvt/task/seed.py +104 -0
- dvt/task/show.py +150 -0
- dvt/task/snapshot.py +57 -0
- dvt/task/sql.py +111 -0
- dvt/task/test.py +464 -0
- dvt/tests/fixtures/__init__.py +1 -0
- dvt/tests/fixtures/project.py +620 -0
- dvt/tests/util.py +651 -0
- dvt/tracking.py +529 -0
- dvt/utils/__init__.py +3 -0
- dvt/utils/artifact_upload.py +151 -0
- dvt/utils/utils.py +408 -0
- dvt/version.py +249 -0
- dvt_core-1.11.0b4.dist-info/METADATA +252 -0
- dvt_core-1.11.0b4.dist-info/RECORD +261 -0
- dvt_core-1.11.0b4.dist-info/WHEEL +5 -0
- dvt_core-1.11.0b4.dist-info/entry_points.txt +2 -0
- dvt_core-1.11.0b4.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,812 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import re
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Dict, List, Literal, Optional, Sequence, Union
|
|
6
|
+
|
|
7
|
+
from dvt import deprecations
|
|
8
|
+
from dvt.artifacts.resources import (
|
|
9
|
+
ConstantPropertyInput,
|
|
10
|
+
Defaults,
|
|
11
|
+
DimensionValidityParams,
|
|
12
|
+
Docs,
|
|
13
|
+
ExposureType,
|
|
14
|
+
ExternalTable,
|
|
15
|
+
FreshnessThreshold,
|
|
16
|
+
FunctionArgument,
|
|
17
|
+
FunctionReturns,
|
|
18
|
+
MacroArgument,
|
|
19
|
+
MaturityType,
|
|
20
|
+
MeasureAggregationParameters,
|
|
21
|
+
NodeVersion,
|
|
22
|
+
Owner,
|
|
23
|
+
Quoting,
|
|
24
|
+
TimeSpine,
|
|
25
|
+
UnitTestInputFixture,
|
|
26
|
+
UnitTestNodeVersions,
|
|
27
|
+
UnitTestOutputFixture,
|
|
28
|
+
UnitTestOverrides,
|
|
29
|
+
list_str,
|
|
30
|
+
metas,
|
|
31
|
+
)
|
|
32
|
+
from dvt.exceptions import ParsingError
|
|
33
|
+
from dvt.node_types import NodeType
|
|
34
|
+
|
|
35
|
+
# trigger the PathEncoder
|
|
36
|
+
import dbt_common.helper_types # noqa:F401
|
|
37
|
+
from dbt_common.contracts.config.base import CompareBehavior, MergeBehavior
|
|
38
|
+
from dbt_common.contracts.config.metadata import ShowBehavior
|
|
39
|
+
from dbt_common.contracts.config.properties import AdditionalPropertiesMixin
|
|
40
|
+
from dbt_common.contracts.util import Mergeable
|
|
41
|
+
from dbt_common.dataclass_schema import (
|
|
42
|
+
ExtensibleDbtClassMixin,
|
|
43
|
+
StrEnum,
|
|
44
|
+
ValidationError,
|
|
45
|
+
dbtClassMixin,
|
|
46
|
+
)
|
|
47
|
+
from dbt_common.exceptions import DbtInternalError
|
|
48
|
+
from dbt_semantic_interfaces.type_enums import (
|
|
49
|
+
ConversionCalculationType,
|
|
50
|
+
PeriodAggregation,
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@dataclass
|
|
55
|
+
class UnparsedBaseNode(dbtClassMixin):
|
|
56
|
+
package_name: str
|
|
57
|
+
path: str
|
|
58
|
+
original_file_path: str
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def file_id(self):
|
|
62
|
+
return f"{self.package_name}://{self.original_file_path}"
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@dataclass
|
|
66
|
+
class HasCode(dbtClassMixin):
|
|
67
|
+
raw_code: str
|
|
68
|
+
language: str
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def empty(self):
|
|
72
|
+
return not self.raw_code.strip()
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
@dataclass
|
|
76
|
+
class UnparsedMacro(UnparsedBaseNode, HasCode):
|
|
77
|
+
resource_type: Literal[NodeType.Macro]
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@dataclass
|
|
81
|
+
class UnparsedGenericTest(UnparsedBaseNode, HasCode):
|
|
82
|
+
resource_type: Literal[NodeType.Macro]
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
@dataclass
|
|
86
|
+
class UnparsedNode(UnparsedBaseNode, HasCode):
|
|
87
|
+
name: str
|
|
88
|
+
resource_type: NodeType
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def search_name(self):
|
|
92
|
+
return self.name
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@dataclass
|
|
96
|
+
class UnparsedRunHook(UnparsedNode):
|
|
97
|
+
resource_type: Literal[NodeType.Operation]
|
|
98
|
+
index: Optional[int] = None
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
@dataclass
|
|
102
|
+
class HasColumnProps(AdditionalPropertiesMixin, ExtensibleDbtClassMixin):
|
|
103
|
+
name: str
|
|
104
|
+
description: str = ""
|
|
105
|
+
meta: Dict[str, Any] = field(default_factory=dict)
|
|
106
|
+
data_type: Optional[str] = None
|
|
107
|
+
constraints: List[Dict[str, Any]] = field(default_factory=list)
|
|
108
|
+
docs: Docs = field(default_factory=Docs)
|
|
109
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
110
|
+
_extra: Dict[str, Any] = field(default_factory=dict)
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
TestDef = Union[Dict[str, Any], str]
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
@dataclass
|
|
117
|
+
class HasColumnAndTestProps(HasColumnProps):
|
|
118
|
+
data_tests: List[TestDef] = field(default_factory=list)
|
|
119
|
+
tests: List[TestDef] = field(
|
|
120
|
+
default_factory=list
|
|
121
|
+
) # back compat for previous name of 'data_tests'
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
@dataclass
|
|
125
|
+
class HasColumnDocs(dbtClassMixin):
|
|
126
|
+
columns: Sequence[HasColumnProps] = field(default_factory=list)
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
@dataclass
|
|
130
|
+
class HasYamlMetadata(dbtClassMixin):
|
|
131
|
+
original_file_path: str
|
|
132
|
+
yaml_key: str
|
|
133
|
+
package_name: str
|
|
134
|
+
|
|
135
|
+
@property
|
|
136
|
+
def file_id(self):
|
|
137
|
+
return f"{self.package_name}://{self.original_file_path}"
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
@dataclass
|
|
141
|
+
class HasConfig:
|
|
142
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
@dataclass
|
|
146
|
+
class UnparsedColumn(HasConfig, HasColumnAndTestProps):
|
|
147
|
+
quote: Optional[bool] = None
|
|
148
|
+
tags: List[str] = field(default_factory=list)
|
|
149
|
+
granularity: Optional[str] = None # str is really a TimeGranularity Enum
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
@dataclass
|
|
153
|
+
class HasColumnTests(dbtClassMixin):
|
|
154
|
+
columns: Sequence[UnparsedColumn] = field(default_factory=list)
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
@dataclass
|
|
158
|
+
class UnparsedVersion(dbtClassMixin):
|
|
159
|
+
v: NodeVersion
|
|
160
|
+
defined_in: Optional[str] = None
|
|
161
|
+
description: str = ""
|
|
162
|
+
access: Optional[str] = None
|
|
163
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
164
|
+
constraints: List[Dict[str, Any]] = field(default_factory=list)
|
|
165
|
+
docs: Docs = field(default_factory=Docs)
|
|
166
|
+
data_tests: Optional[List[TestDef]] = None
|
|
167
|
+
tests: Optional[List[TestDef]] = None # back compat for previous name of 'data_tests'
|
|
168
|
+
columns: Sequence[Union[dbt_common.helper_types.IncludeExclude, UnparsedColumn]] = field(
|
|
169
|
+
default_factory=list
|
|
170
|
+
)
|
|
171
|
+
deprecation_date: Optional[datetime.datetime] = None
|
|
172
|
+
|
|
173
|
+
def __lt__(self, other):
|
|
174
|
+
try:
|
|
175
|
+
return float(self.v) < float(other.v)
|
|
176
|
+
except ValueError:
|
|
177
|
+
return str(self.v) < str(other.v)
|
|
178
|
+
|
|
179
|
+
@property
|
|
180
|
+
def include_exclude(self) -> dbt_common.helper_types.IncludeExclude:
|
|
181
|
+
return self._include_exclude
|
|
182
|
+
|
|
183
|
+
@property
|
|
184
|
+
def unparsed_columns(self) -> List:
|
|
185
|
+
return self._unparsed_columns
|
|
186
|
+
|
|
187
|
+
@property
|
|
188
|
+
def formatted_v(self) -> str:
|
|
189
|
+
return f"v{self.v}"
|
|
190
|
+
|
|
191
|
+
def __post_init__(self):
|
|
192
|
+
has_include_exclude = False
|
|
193
|
+
self._include_exclude = dbt_common.helper_types.IncludeExclude(include="*")
|
|
194
|
+
self._unparsed_columns = []
|
|
195
|
+
for column in self.columns:
|
|
196
|
+
if isinstance(column, dbt_common.helper_types.IncludeExclude):
|
|
197
|
+
if not has_include_exclude:
|
|
198
|
+
self._include_exclude = column
|
|
199
|
+
has_include_exclude = True
|
|
200
|
+
else:
|
|
201
|
+
raise ParsingError("version can have at most one include/exclude element")
|
|
202
|
+
else:
|
|
203
|
+
self._unparsed_columns.append(column)
|
|
204
|
+
|
|
205
|
+
self.deprecation_date = normalize_date(self.deprecation_date)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
@dataclass
|
|
209
|
+
class UnparsedAnalysisUpdate(HasConfig, HasColumnDocs, HasColumnProps, HasYamlMetadata):
|
|
210
|
+
access: Optional[str] = None
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
@dataclass
|
|
214
|
+
class UnparsedSingularTestUpdate(HasConfig, HasColumnProps, HasYamlMetadata):
|
|
215
|
+
pass
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
@dataclass
|
|
219
|
+
class UnparsedNodeUpdate(HasConfig, HasColumnTests, HasColumnAndTestProps, HasYamlMetadata):
|
|
220
|
+
quote_columns: Optional[bool] = None
|
|
221
|
+
access: Optional[str] = None
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
@dataclass
|
|
225
|
+
class UnparsedModelUpdate(UnparsedNodeUpdate):
|
|
226
|
+
quote_columns: Optional[bool] = None
|
|
227
|
+
access: Optional[str] = None
|
|
228
|
+
latest_version: Optional[NodeVersion] = None
|
|
229
|
+
versions: Sequence[UnparsedVersion] = field(default_factory=list)
|
|
230
|
+
deprecation_date: Optional[datetime.datetime] = None
|
|
231
|
+
time_spine: Optional[TimeSpine] = None
|
|
232
|
+
|
|
233
|
+
def __post_init__(self) -> None:
|
|
234
|
+
if self.latest_version:
|
|
235
|
+
version_values = [version.v for version in self.versions]
|
|
236
|
+
if self.latest_version not in version_values:
|
|
237
|
+
raise ParsingError(
|
|
238
|
+
f"latest_version: {self.latest_version} is not one of model '{self.name}' versions: {version_values} "
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
seen_versions = set()
|
|
242
|
+
for version in self.versions:
|
|
243
|
+
if str(version.v) in seen_versions:
|
|
244
|
+
raise ParsingError(
|
|
245
|
+
f"Found duplicate version: '{version.v}' in versions list of model '{self.name}'"
|
|
246
|
+
)
|
|
247
|
+
seen_versions.add(str(version.v))
|
|
248
|
+
|
|
249
|
+
self._version_map = {version.v: version for version in self.versions}
|
|
250
|
+
|
|
251
|
+
self.deprecation_date = normalize_date(self.deprecation_date)
|
|
252
|
+
|
|
253
|
+
if self.time_spine:
|
|
254
|
+
columns = (
|
|
255
|
+
self.get_columns_for_version(self.latest_version)
|
|
256
|
+
if self.latest_version
|
|
257
|
+
else self.columns
|
|
258
|
+
)
|
|
259
|
+
column_names_to_columns = {column.name: column for column in columns}
|
|
260
|
+
if self.time_spine.standard_granularity_column not in column_names_to_columns:
|
|
261
|
+
raise ParsingError(
|
|
262
|
+
f"Time spine standard granularity column must be defined on the model. Got invalid "
|
|
263
|
+
f"column name '{self.time_spine.standard_granularity_column}' for model '{self.name}'. Valid names"
|
|
264
|
+
f"{' for latest version' if self.latest_version else ''}: {list(column_names_to_columns.keys())}."
|
|
265
|
+
)
|
|
266
|
+
standard_column = column_names_to_columns[self.time_spine.standard_granularity_column]
|
|
267
|
+
if not standard_column.granularity:
|
|
268
|
+
raise ParsingError(
|
|
269
|
+
f"Time spine standard granularity column must have a granularity defined. Please add one for "
|
|
270
|
+
f"column '{self.time_spine.standard_granularity_column}' in model '{self.name}'."
|
|
271
|
+
)
|
|
272
|
+
custom_granularity_columns_not_found = []
|
|
273
|
+
for custom_granularity in self.time_spine.custom_granularities:
|
|
274
|
+
column_name = (
|
|
275
|
+
custom_granularity.column_name
|
|
276
|
+
if custom_granularity.column_name
|
|
277
|
+
else custom_granularity.name
|
|
278
|
+
)
|
|
279
|
+
if column_name not in column_names_to_columns:
|
|
280
|
+
custom_granularity_columns_not_found.append(column_name)
|
|
281
|
+
if custom_granularity_columns_not_found:
|
|
282
|
+
raise ParsingError(
|
|
283
|
+
"Time spine custom granularity columns do not exist in the model. "
|
|
284
|
+
f"Columns not found: {custom_granularity_columns_not_found}; "
|
|
285
|
+
f"Available columns: {list(column_names_to_columns.keys())}"
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
def get_columns_for_version(self, version: NodeVersion) -> List[UnparsedColumn]:
|
|
289
|
+
if version not in self._version_map:
|
|
290
|
+
raise DbtInternalError(
|
|
291
|
+
f"get_columns_for_version called for version '{version}' not in version map"
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
version_columns = []
|
|
295
|
+
unparsed_version = self._version_map[version]
|
|
296
|
+
for base_column in self.columns:
|
|
297
|
+
if unparsed_version.include_exclude.includes(base_column.name):
|
|
298
|
+
version_columns.append(base_column)
|
|
299
|
+
|
|
300
|
+
for column in unparsed_version.unparsed_columns:
|
|
301
|
+
version_columns.append(column)
|
|
302
|
+
|
|
303
|
+
return version_columns
|
|
304
|
+
|
|
305
|
+
def get_tests_for_version(self, version: NodeVersion) -> List[TestDef]:
|
|
306
|
+
if version not in self._version_map:
|
|
307
|
+
raise DbtInternalError(
|
|
308
|
+
f"get_tests_for_version called for version '{version}' not in version map"
|
|
309
|
+
)
|
|
310
|
+
unparsed_version = self._version_map[version]
|
|
311
|
+
return (
|
|
312
|
+
unparsed_version.data_tests
|
|
313
|
+
if unparsed_version.data_tests is not None
|
|
314
|
+
else self.data_tests
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
@dataclass
|
|
319
|
+
class UnparsedMacroUpdate(HasConfig, HasColumnProps, HasYamlMetadata):
|
|
320
|
+
arguments: List[MacroArgument] = field(default_factory=list)
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
@dataclass
|
|
324
|
+
class UnparsedSourceTableDefinition(HasColumnTests, HasColumnAndTestProps):
|
|
325
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
326
|
+
loaded_at_field: Optional[str] = None
|
|
327
|
+
loaded_at_field_present: Optional[bool] = None
|
|
328
|
+
loaded_at_query: Optional[str] = None
|
|
329
|
+
identifier: Optional[str] = None
|
|
330
|
+
quoting: Quoting = field(default_factory=Quoting)
|
|
331
|
+
freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold)
|
|
332
|
+
external: Optional[ExternalTable] = None
|
|
333
|
+
tags: List[str] = field(default_factory=list)
|
|
334
|
+
|
|
335
|
+
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
|
336
|
+
dct = super().__post_serialize__(dct, context)
|
|
337
|
+
if "freshness" not in dct and self.freshness is None:
|
|
338
|
+
dct["freshness"] = None
|
|
339
|
+
return dct
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
@dataclass
|
|
343
|
+
class UnparsedSourceDefinition(dbtClassMixin):
|
|
344
|
+
name: str
|
|
345
|
+
description: str = ""
|
|
346
|
+
meta: Dict[str, Any] = field(default_factory=dict)
|
|
347
|
+
database: Optional[str] = None
|
|
348
|
+
schema: Optional[str] = None
|
|
349
|
+
loader: str = ""
|
|
350
|
+
quoting: Quoting = field(default_factory=Quoting)
|
|
351
|
+
freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold)
|
|
352
|
+
loaded_at_field: Optional[str] = None
|
|
353
|
+
loaded_at_field_present: Optional[bool] = None
|
|
354
|
+
loaded_at_query: Optional[str] = None
|
|
355
|
+
tables: List[UnparsedSourceTableDefinition] = field(default_factory=list)
|
|
356
|
+
tags: List[str] = field(default_factory=list)
|
|
357
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
358
|
+
unrendered_database: Optional[str] = None
|
|
359
|
+
unrendered_schema: Optional[str] = None
|
|
360
|
+
# DVT-specific: profile reference for multi-source support
|
|
361
|
+
profile: Optional[str] = None
|
|
362
|
+
|
|
363
|
+
@classmethod
|
|
364
|
+
def validate(cls, data):
|
|
365
|
+
super(UnparsedSourceDefinition, cls).validate(data)
|
|
366
|
+
|
|
367
|
+
if data.get("loaded_at_field", None) == "":
|
|
368
|
+
raise ValidationError("loaded_at_field cannot be an empty string.")
|
|
369
|
+
if "tables" in data:
|
|
370
|
+
for table in data["tables"]:
|
|
371
|
+
if table.get("loaded_at_field", None) == "":
|
|
372
|
+
raise ValidationError("loaded_at_field cannot be an empty string.")
|
|
373
|
+
|
|
374
|
+
@property
|
|
375
|
+
def yaml_key(self) -> "str":
|
|
376
|
+
return "sources"
|
|
377
|
+
|
|
378
|
+
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
|
379
|
+
dct = super().__post_serialize__(dct, context)
|
|
380
|
+
if "freshness" not in dct and self.freshness is None:
|
|
381
|
+
dct["freshness"] = None
|
|
382
|
+
return dct
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
@dataclass
|
|
386
|
+
class SourceTablePatch(dbtClassMixin):
|
|
387
|
+
name: str
|
|
388
|
+
description: Optional[str] = None
|
|
389
|
+
meta: Optional[Dict[str, Any]] = None
|
|
390
|
+
data_type: Optional[str] = None
|
|
391
|
+
docs: Optional[Docs] = None
|
|
392
|
+
loaded_at_field: Optional[str] = None
|
|
393
|
+
loaded_at_field_present: Optional[bool] = None
|
|
394
|
+
loaded_at_query: Optional[str] = None
|
|
395
|
+
identifier: Optional[str] = None
|
|
396
|
+
quoting: Quoting = field(default_factory=Quoting)
|
|
397
|
+
freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold)
|
|
398
|
+
external: Optional[ExternalTable] = None
|
|
399
|
+
tags: Optional[List[str]] = None
|
|
400
|
+
data_tests: Optional[List[TestDef]] = None
|
|
401
|
+
tests: Optional[List[TestDef]] = None # back compat for previous name of 'data_tests'
|
|
402
|
+
columns: Optional[Sequence[UnparsedColumn]] = None
|
|
403
|
+
|
|
404
|
+
def to_patch_dict(self) -> Dict[str, Any]:
|
|
405
|
+
dct = self.to_dict(omit_none=True)
|
|
406
|
+
remove_keys = "name"
|
|
407
|
+
for key in remove_keys:
|
|
408
|
+
if key in dct:
|
|
409
|
+
del dct[key]
|
|
410
|
+
|
|
411
|
+
if self.freshness is None:
|
|
412
|
+
dct["freshness"] = None
|
|
413
|
+
|
|
414
|
+
return dct
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
@dataclass
|
|
418
|
+
class SourcePatch(dbtClassMixin):
|
|
419
|
+
name: str = field(
|
|
420
|
+
metadata=dict(description="The name of the source to override"),
|
|
421
|
+
)
|
|
422
|
+
overrides: str = field(
|
|
423
|
+
metadata=dict(description="The package of the source to override"),
|
|
424
|
+
)
|
|
425
|
+
path: Path = field(
|
|
426
|
+
metadata=dict(description="The path to the patch-defining yml file"),
|
|
427
|
+
)
|
|
428
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
429
|
+
description: Optional[str] = None
|
|
430
|
+
meta: Optional[Dict[str, Any]] = None
|
|
431
|
+
database: Optional[str] = None
|
|
432
|
+
schema: Optional[str] = None
|
|
433
|
+
loader: Optional[str] = None
|
|
434
|
+
quoting: Optional[Quoting] = None
|
|
435
|
+
freshness: Optional[Optional[FreshnessThreshold]] = field(default_factory=FreshnessThreshold)
|
|
436
|
+
loaded_at_field: Optional[str] = None
|
|
437
|
+
loaded_at_field_present: Optional[bool] = None
|
|
438
|
+
loaded_at_query: Optional[str] = None
|
|
439
|
+
tables: Optional[List[SourceTablePatch]] = None
|
|
440
|
+
tags: Optional[List[str]] = None
|
|
441
|
+
|
|
442
|
+
def to_patch_dict(self) -> Dict[str, Any]:
|
|
443
|
+
dct = self.to_dict(omit_none=True)
|
|
444
|
+
remove_keys = ("name", "overrides", "tables", "path")
|
|
445
|
+
for key in remove_keys:
|
|
446
|
+
if key in dct:
|
|
447
|
+
del dct[key]
|
|
448
|
+
|
|
449
|
+
if self.freshness is None:
|
|
450
|
+
dct["freshness"] = None
|
|
451
|
+
|
|
452
|
+
return dct
|
|
453
|
+
|
|
454
|
+
def get_table_named(self, name: str) -> Optional[SourceTablePatch]:
|
|
455
|
+
if self.tables is not None:
|
|
456
|
+
for table in self.tables:
|
|
457
|
+
if table.name == name:
|
|
458
|
+
return table
|
|
459
|
+
return None
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
@dataclass
|
|
463
|
+
class UnparsedDocumentation(dbtClassMixin):
|
|
464
|
+
package_name: str
|
|
465
|
+
path: str
|
|
466
|
+
original_file_path: str
|
|
467
|
+
|
|
468
|
+
@property
|
|
469
|
+
def file_id(self):
|
|
470
|
+
return f"{self.package_name}://{self.original_file_path}"
|
|
471
|
+
|
|
472
|
+
@property
|
|
473
|
+
def resource_type(self):
|
|
474
|
+
return NodeType.Documentation
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
@dataclass
|
|
478
|
+
class UnparsedDocumentationFile(UnparsedDocumentation):
|
|
479
|
+
file_contents: str
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
# can't use total_ordering decorator here, as str provides an ordering already
|
|
483
|
+
# and it's not the one we want.
|
|
484
|
+
class Maturity(StrEnum):
|
|
485
|
+
low = "low"
|
|
486
|
+
medium = "medium"
|
|
487
|
+
high = "high"
|
|
488
|
+
|
|
489
|
+
def __lt__(self, other):
|
|
490
|
+
if not isinstance(other, Maturity):
|
|
491
|
+
return NotImplemented
|
|
492
|
+
order = (Maturity.low, Maturity.medium, Maturity.high)
|
|
493
|
+
return order.index(self) < order.index(other)
|
|
494
|
+
|
|
495
|
+
def __gt__(self, other):
|
|
496
|
+
if not isinstance(other, Maturity):
|
|
497
|
+
return NotImplemented
|
|
498
|
+
return self != other and not (self < other)
|
|
499
|
+
|
|
500
|
+
def __ge__(self, other):
|
|
501
|
+
if not isinstance(other, Maturity):
|
|
502
|
+
return NotImplemented
|
|
503
|
+
return self == other or not (self < other)
|
|
504
|
+
|
|
505
|
+
def __le__(self, other):
|
|
506
|
+
if not isinstance(other, Maturity):
|
|
507
|
+
return NotImplemented
|
|
508
|
+
return self == other or self < other
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
@dataclass
|
|
512
|
+
class UnparsedExposure(dbtClassMixin):
|
|
513
|
+
name: str
|
|
514
|
+
type: ExposureType
|
|
515
|
+
owner: Owner
|
|
516
|
+
description: str = ""
|
|
517
|
+
label: Optional[str] = None
|
|
518
|
+
maturity: Optional[MaturityType] = None
|
|
519
|
+
meta: Dict[str, Any] = field(default_factory=dict)
|
|
520
|
+
tags: List[str] = field(default_factory=list)
|
|
521
|
+
url: Optional[str] = None
|
|
522
|
+
depends_on: List[str] = field(default_factory=list)
|
|
523
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
524
|
+
|
|
525
|
+
@classmethod
|
|
526
|
+
def validate(cls, data):
|
|
527
|
+
super(UnparsedExposure, cls).validate(data)
|
|
528
|
+
if "name" in data:
|
|
529
|
+
# name can only contain alphanumeric chars and underscores
|
|
530
|
+
if not (re.match(r"[\w-]+$", data["name"])):
|
|
531
|
+
deprecations.warn("exposure-name", exposure=data["name"])
|
|
532
|
+
|
|
533
|
+
if data["owner"].get("name") is None and data["owner"].get("email") is None:
|
|
534
|
+
raise ValidationError("Exposure owner must have at least one of 'name' or 'email'.")
|
|
535
|
+
|
|
536
|
+
|
|
537
|
+
@dataclass
|
|
538
|
+
class MetricFilter(dbtClassMixin):
|
|
539
|
+
field: str
|
|
540
|
+
operator: str
|
|
541
|
+
# TODO : Can we make this Any?
|
|
542
|
+
value: str
|
|
543
|
+
|
|
544
|
+
|
|
545
|
+
class MetricTimePeriod(StrEnum):
|
|
546
|
+
day = "day"
|
|
547
|
+
week = "week"
|
|
548
|
+
month = "month"
|
|
549
|
+
year = "year"
|
|
550
|
+
|
|
551
|
+
def plural(self) -> str:
|
|
552
|
+
return str(self) + "s"
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
@dataclass
|
|
556
|
+
class MetricTime(dbtClassMixin, Mergeable):
|
|
557
|
+
count: Optional[int] = None
|
|
558
|
+
period: Optional[MetricTimePeriod] = None
|
|
559
|
+
|
|
560
|
+
def __bool__(self):
|
|
561
|
+
return self.count is not None and self.period is not None
|
|
562
|
+
|
|
563
|
+
|
|
564
|
+
@dataclass
|
|
565
|
+
class UnparsedMetricInputMeasure(dbtClassMixin):
|
|
566
|
+
name: str
|
|
567
|
+
# Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
|
|
568
|
+
filter: Union[str, List[str], None] = None
|
|
569
|
+
alias: Optional[str] = None
|
|
570
|
+
join_to_timespine: bool = False
|
|
571
|
+
fill_nulls_with: Optional[int] = None
|
|
572
|
+
|
|
573
|
+
|
|
574
|
+
@dataclass
|
|
575
|
+
class UnparsedMetricInput(dbtClassMixin):
|
|
576
|
+
name: str
|
|
577
|
+
# Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
|
|
578
|
+
filter: Union[str, List[str], None] = None
|
|
579
|
+
alias: Optional[str] = None
|
|
580
|
+
offset_window: Optional[str] = None
|
|
581
|
+
offset_to_grain: Optional[str] = None
|
|
582
|
+
|
|
583
|
+
|
|
584
|
+
@dataclass
|
|
585
|
+
class UnparsedConversionTypeParams(dbtClassMixin):
|
|
586
|
+
base_measure: Union[UnparsedMetricInputMeasure, str]
|
|
587
|
+
conversion_measure: Union[UnparsedMetricInputMeasure, str]
|
|
588
|
+
entity: str
|
|
589
|
+
calculation: str = (
|
|
590
|
+
ConversionCalculationType.CONVERSION_RATE.value
|
|
591
|
+
) # ConversionCalculationType Enum
|
|
592
|
+
window: Optional[str] = None
|
|
593
|
+
constant_properties: Optional[List[ConstantPropertyInput]] = None
|
|
594
|
+
|
|
595
|
+
|
|
596
|
+
@dataclass
|
|
597
|
+
class UnparsedCumulativeTypeParams(dbtClassMixin):
|
|
598
|
+
window: Optional[str] = None
|
|
599
|
+
grain_to_date: Optional[str] = None
|
|
600
|
+
period_agg: str = PeriodAggregation.FIRST.value
|
|
601
|
+
|
|
602
|
+
|
|
603
|
+
@dataclass
|
|
604
|
+
class UnparsedMetricTypeParams(dbtClassMixin):
|
|
605
|
+
measure: Optional[Union[UnparsedMetricInputMeasure, str]] = None
|
|
606
|
+
numerator: Optional[Union[UnparsedMetricInput, str]] = None
|
|
607
|
+
denominator: Optional[Union[UnparsedMetricInput, str]] = None
|
|
608
|
+
expr: Optional[Union[str, bool]] = None
|
|
609
|
+
window: Optional[str] = None
|
|
610
|
+
grain_to_date: Optional[str] = None # str is really a TimeGranularity Enum
|
|
611
|
+
metrics: Optional[List[Union[UnparsedMetricInput, str]]] = None
|
|
612
|
+
conversion_type_params: Optional[UnparsedConversionTypeParams] = None
|
|
613
|
+
cumulative_type_params: Optional[UnparsedCumulativeTypeParams] = None
|
|
614
|
+
|
|
615
|
+
|
|
616
|
+
@dataclass
|
|
617
|
+
class UnparsedMetric(dbtClassMixin):
|
|
618
|
+
name: str
|
|
619
|
+
label: str
|
|
620
|
+
type: str
|
|
621
|
+
type_params: UnparsedMetricTypeParams
|
|
622
|
+
description: str = ""
|
|
623
|
+
# Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
|
|
624
|
+
filter: Union[str, List[str], None] = None
|
|
625
|
+
time_granularity: Optional[str] = None
|
|
626
|
+
# metadata: Optional[Unparsedetadata] = None # TODO
|
|
627
|
+
meta: Dict[str, Any] = field(default_factory=dict)
|
|
628
|
+
tags: List[str] = field(default_factory=list)
|
|
629
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
630
|
+
|
|
631
|
+
@classmethod
|
|
632
|
+
def validate(cls, data):
|
|
633
|
+
super(UnparsedMetric, cls).validate(data)
|
|
634
|
+
if "name" in data:
|
|
635
|
+
errors = []
|
|
636
|
+
if " " in data["name"]:
|
|
637
|
+
errors.append("cannot contain spaces")
|
|
638
|
+
# This handles failing queries due to too long metric names.
|
|
639
|
+
# It only occurs in BigQuery and Snowflake (Postgres/Redshift truncate)
|
|
640
|
+
if len(data["name"]) > 250:
|
|
641
|
+
errors.append("cannot contain more than 250 characters")
|
|
642
|
+
if not (re.match(r"^[A-Za-z]", data["name"])):
|
|
643
|
+
errors.append("must begin with a letter")
|
|
644
|
+
if not (re.match(r"[\w-]+$", data["name"])):
|
|
645
|
+
errors.append("must contain only letters, numbers and underscores")
|
|
646
|
+
|
|
647
|
+
if errors:
|
|
648
|
+
raise ParsingError(
|
|
649
|
+
f"The metric name '{data['name']}' is invalid. It {', '.join(e for e in errors)}"
|
|
650
|
+
)
|
|
651
|
+
|
|
652
|
+
|
|
653
|
+
@dataclass
|
|
654
|
+
class UnparsedGroup(dbtClassMixin):
|
|
655
|
+
name: str
|
|
656
|
+
owner: Owner
|
|
657
|
+
description: Optional[str] = None
|
|
658
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
659
|
+
|
|
660
|
+
@classmethod
|
|
661
|
+
def validate(cls, data):
|
|
662
|
+
super(UnparsedGroup, cls).validate(data)
|
|
663
|
+
if data["owner"].get("name") is None and data["owner"].get("email") is None:
|
|
664
|
+
raise ValidationError("Group owner must have at least one of 'name' or 'email'.")
|
|
665
|
+
|
|
666
|
+
|
|
667
|
+
@dataclass
|
|
668
|
+
class UnparsedFunctionReturns(dbtClassMixin):
|
|
669
|
+
returns: FunctionReturns
|
|
670
|
+
|
|
671
|
+
|
|
672
|
+
@dataclass
|
|
673
|
+
class UnparsedFunctionUpdate(HasConfig, HasColumnProps, HasYamlMetadata, UnparsedFunctionReturns):
|
|
674
|
+
access: Optional[str] = None
|
|
675
|
+
arguments: List[FunctionArgument] = field(default_factory=list)
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
#
|
|
679
|
+
# semantic interfaces unparsed objects
|
|
680
|
+
#
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
@dataclass
|
|
684
|
+
class UnparsedEntity(dbtClassMixin):
|
|
685
|
+
name: str
|
|
686
|
+
type: str # EntityType enum
|
|
687
|
+
description: Optional[str] = None
|
|
688
|
+
label: Optional[str] = None
|
|
689
|
+
role: Optional[str] = None
|
|
690
|
+
expr: Optional[str] = None
|
|
691
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
692
|
+
|
|
693
|
+
|
|
694
|
+
@dataclass
|
|
695
|
+
class UnparsedNonAdditiveDimension(dbtClassMixin):
|
|
696
|
+
name: str
|
|
697
|
+
window_choice: str # AggregationType enum
|
|
698
|
+
window_groupings: List[str] = field(default_factory=list)
|
|
699
|
+
|
|
700
|
+
|
|
701
|
+
@dataclass
|
|
702
|
+
class UnparsedMeasure(dbtClassMixin):
|
|
703
|
+
name: str
|
|
704
|
+
agg: str # actually an enum
|
|
705
|
+
description: Optional[str] = None
|
|
706
|
+
label: Optional[str] = None
|
|
707
|
+
expr: Optional[Union[str, bool, int]] = None
|
|
708
|
+
agg_params: Optional[MeasureAggregationParameters] = None
|
|
709
|
+
non_additive_dimension: Optional[UnparsedNonAdditiveDimension] = None
|
|
710
|
+
agg_time_dimension: Optional[str] = None
|
|
711
|
+
create_metric: bool = False
|
|
712
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
713
|
+
|
|
714
|
+
|
|
715
|
+
@dataclass
|
|
716
|
+
class UnparsedDimensionTypeParams(dbtClassMixin):
|
|
717
|
+
time_granularity: str # TimeGranularity enum
|
|
718
|
+
validity_params: Optional[DimensionValidityParams] = None
|
|
719
|
+
|
|
720
|
+
|
|
721
|
+
@dataclass
|
|
722
|
+
class UnparsedDimension(dbtClassMixin):
|
|
723
|
+
name: str
|
|
724
|
+
type: str # actually an enum
|
|
725
|
+
description: Optional[str] = None
|
|
726
|
+
label: Optional[str] = None
|
|
727
|
+
is_partition: bool = False
|
|
728
|
+
type_params: Optional[UnparsedDimensionTypeParams] = None
|
|
729
|
+
expr: Optional[str] = None
|
|
730
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
731
|
+
|
|
732
|
+
|
|
733
|
+
@dataclass
|
|
734
|
+
class UnparsedSemanticModel(dbtClassMixin):
|
|
735
|
+
name: str
|
|
736
|
+
model: str # looks like "ref(...)"
|
|
737
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
738
|
+
description: Optional[str] = None
|
|
739
|
+
label: Optional[str] = None
|
|
740
|
+
defaults: Optional[Defaults] = None
|
|
741
|
+
entities: List[UnparsedEntity] = field(default_factory=list)
|
|
742
|
+
measures: List[UnparsedMeasure] = field(default_factory=list)
|
|
743
|
+
dimensions: List[UnparsedDimension] = field(default_factory=list)
|
|
744
|
+
primary_entity: Optional[str] = None
|
|
745
|
+
|
|
746
|
+
|
|
747
|
+
@dataclass
|
|
748
|
+
class UnparsedQueryParams(dbtClassMixin):
|
|
749
|
+
metrics: List[str] = field(default_factory=list)
|
|
750
|
+
group_by: List[str] = field(default_factory=list)
|
|
751
|
+
# Note: `Union` must be the outermost part of the type annotation for serialization to work properly.
|
|
752
|
+
where: Union[str, List[str], None] = None
|
|
753
|
+
order_by: List[str] = field(default_factory=list)
|
|
754
|
+
limit: Optional[int] = None
|
|
755
|
+
|
|
756
|
+
|
|
757
|
+
@dataclass
|
|
758
|
+
class UnparsedExport(dbtClassMixin):
|
|
759
|
+
"""Configuration for writing query results to a table."""
|
|
760
|
+
|
|
761
|
+
name: str
|
|
762
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
763
|
+
|
|
764
|
+
|
|
765
|
+
@dataclass
|
|
766
|
+
class UnparsedSavedQuery(dbtClassMixin):
|
|
767
|
+
name: str
|
|
768
|
+
query_params: UnparsedQueryParams
|
|
769
|
+
description: Optional[str] = None
|
|
770
|
+
label: Optional[str] = None
|
|
771
|
+
exports: List[UnparsedExport] = field(default_factory=list)
|
|
772
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
773
|
+
# Note: the order of the types is critical; it's the order that they will be checked against inputs.
|
|
774
|
+
# if reversed, a single-string tag like `tag: "good"` becomes ['g','o','o','d']
|
|
775
|
+
tags: Union[str, List[str]] = field(
|
|
776
|
+
default_factory=list_str,
|
|
777
|
+
metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
|
|
778
|
+
)
|
|
779
|
+
|
|
780
|
+
|
|
781
|
+
def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]:
|
|
782
|
+
"""Convert date to datetime (at midnight), and add local time zone if naive"""
|
|
783
|
+
if d is None:
|
|
784
|
+
return None
|
|
785
|
+
|
|
786
|
+
# convert date to datetime
|
|
787
|
+
dt = d if type(d) == datetime.datetime else datetime.datetime(d.year, d.month, d.day)
|
|
788
|
+
|
|
789
|
+
if not dt.tzinfo:
|
|
790
|
+
# date is naive, re-interpret as system time zone
|
|
791
|
+
dt = dt.astimezone()
|
|
792
|
+
|
|
793
|
+
return dt
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
@dataclass
|
|
797
|
+
class UnparsedUnitTest(dbtClassMixin):
|
|
798
|
+
name: str
|
|
799
|
+
model: str # name of the model being unit tested
|
|
800
|
+
given: Sequence[UnitTestInputFixture]
|
|
801
|
+
expect: UnitTestOutputFixture
|
|
802
|
+
description: str = ""
|
|
803
|
+
overrides: Optional[UnitTestOverrides] = None
|
|
804
|
+
config: Dict[str, Any] = field(default_factory=dict)
|
|
805
|
+
versions: Optional[UnitTestNodeVersions] = None
|
|
806
|
+
|
|
807
|
+
@classmethod
|
|
808
|
+
def validate(cls, data):
|
|
809
|
+
super(UnparsedUnitTest, cls).validate(data)
|
|
810
|
+
if data.get("versions", None):
|
|
811
|
+
if data["versions"].get("include") and data["versions"].get("exclude"):
|
|
812
|
+
raise ValidationError("Unit tests can not both include and exclude versions.")
|