dvt-core 0.52.2__cp310-cp310-macosx_10_9_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dvt-core might be problematic. Click here for more details.
- dbt/__init__.py +7 -0
- dbt/_pydantic_shim.py +26 -0
- dbt/artifacts/__init__.py +0 -0
- dbt/artifacts/exceptions/__init__.py +1 -0
- dbt/artifacts/exceptions/schemas.py +31 -0
- dbt/artifacts/resources/__init__.py +116 -0
- dbt/artifacts/resources/base.py +67 -0
- dbt/artifacts/resources/types.py +93 -0
- dbt/artifacts/resources/v1/analysis.py +10 -0
- dbt/artifacts/resources/v1/catalog.py +23 -0
- dbt/artifacts/resources/v1/components.py +274 -0
- dbt/artifacts/resources/v1/config.py +277 -0
- dbt/artifacts/resources/v1/documentation.py +11 -0
- dbt/artifacts/resources/v1/exposure.py +51 -0
- dbt/artifacts/resources/v1/function.py +52 -0
- dbt/artifacts/resources/v1/generic_test.py +31 -0
- dbt/artifacts/resources/v1/group.py +21 -0
- dbt/artifacts/resources/v1/hook.py +11 -0
- dbt/artifacts/resources/v1/macro.py +29 -0
- dbt/artifacts/resources/v1/metric.py +172 -0
- dbt/artifacts/resources/v1/model.py +145 -0
- dbt/artifacts/resources/v1/owner.py +10 -0
- dbt/artifacts/resources/v1/saved_query.py +111 -0
- dbt/artifacts/resources/v1/seed.py +41 -0
- dbt/artifacts/resources/v1/semantic_layer_components.py +72 -0
- dbt/artifacts/resources/v1/semantic_model.py +314 -0
- dbt/artifacts/resources/v1/singular_test.py +14 -0
- dbt/artifacts/resources/v1/snapshot.py +91 -0
- dbt/artifacts/resources/v1/source_definition.py +84 -0
- dbt/artifacts/resources/v1/sql_operation.py +10 -0
- dbt/artifacts/resources/v1/unit_test_definition.py +77 -0
- dbt/artifacts/schemas/__init__.py +0 -0
- dbt/artifacts/schemas/base.py +191 -0
- dbt/artifacts/schemas/batch_results.py +24 -0
- dbt/artifacts/schemas/catalog/__init__.py +11 -0
- dbt/artifacts/schemas/catalog/v1/__init__.py +0 -0
- dbt/artifacts/schemas/catalog/v1/catalog.py +59 -0
- dbt/artifacts/schemas/freshness/__init__.py +1 -0
- dbt/artifacts/schemas/freshness/v3/__init__.py +0 -0
- dbt/artifacts/schemas/freshness/v3/freshness.py +158 -0
- dbt/artifacts/schemas/manifest/__init__.py +2 -0
- dbt/artifacts/schemas/manifest/v12/__init__.py +0 -0
- dbt/artifacts/schemas/manifest/v12/manifest.py +211 -0
- dbt/artifacts/schemas/results.py +147 -0
- dbt/artifacts/schemas/run/__init__.py +2 -0
- dbt/artifacts/schemas/run/v5/__init__.py +0 -0
- dbt/artifacts/schemas/run/v5/run.py +184 -0
- dbt/artifacts/schemas/upgrades/__init__.py +4 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
- dbt/artifacts/utils/validation.py +153 -0
- dbt/cli/__init__.py +1 -0
- dbt/cli/context.py +17 -0
- dbt/cli/exceptions.py +57 -0
- dbt/cli/flags.py +560 -0
- dbt/cli/main.py +2039 -0
- dbt/cli/option_types.py +121 -0
- dbt/cli/options.py +80 -0
- dbt/cli/params.py +804 -0
- dbt/cli/requires.py +490 -0
- dbt/cli/resolvers.py +50 -0
- dbt/cli/types.py +40 -0
- dbt/clients/__init__.py +0 -0
- dbt/clients/checked_load.py +83 -0
- dbt/clients/git.py +164 -0
- dbt/clients/jinja.py +206 -0
- dbt/clients/jinja_static.py +245 -0
- dbt/clients/registry.py +192 -0
- dbt/clients/yaml_helper.py +68 -0
- dbt/compilation.py +876 -0
- dbt/compute/__init__.py +14 -0
- dbt/compute/engines/__init__.py +12 -0
- dbt/compute/engines/spark_engine.py +624 -0
- dbt/compute/federated_executor.py +837 -0
- dbt/compute/filter_pushdown.cpython-310-darwin.so +0 -0
- dbt/compute/filter_pushdown.py +273 -0
- dbt/compute/jar_provisioning.cpython-310-darwin.so +0 -0
- dbt/compute/jar_provisioning.py +255 -0
- dbt/compute/java_compat.cpython-310-darwin.so +0 -0
- dbt/compute/java_compat.py +689 -0
- dbt/compute/jdbc_utils.cpython-310-darwin.so +0 -0
- dbt/compute/jdbc_utils.py +678 -0
- dbt/compute/smart_selector.cpython-310-darwin.so +0 -0
- dbt/compute/smart_selector.py +311 -0
- dbt/compute/strategies/__init__.py +54 -0
- dbt/compute/strategies/base.py +165 -0
- dbt/compute/strategies/dataproc.py +207 -0
- dbt/compute/strategies/emr.py +203 -0
- dbt/compute/strategies/local.py +364 -0
- dbt/compute/strategies/standalone.py +262 -0
- dbt/config/__init__.py +4 -0
- dbt/config/catalogs.py +94 -0
- dbt/config/compute.cpython-310-darwin.so +0 -0
- dbt/config/compute.py +547 -0
- dbt/config/dvt_profile.cpython-310-darwin.so +0 -0
- dbt/config/dvt_profile.py +342 -0
- dbt/config/profile.py +422 -0
- dbt/config/project.py +873 -0
- dbt/config/project_utils.py +28 -0
- dbt/config/renderer.py +231 -0
- dbt/config/runtime.py +553 -0
- dbt/config/selectors.py +208 -0
- dbt/config/utils.py +77 -0
- dbt/constants.py +28 -0
- dbt/context/__init__.py +0 -0
- dbt/context/base.py +745 -0
- dbt/context/configured.py +135 -0
- dbt/context/context_config.py +382 -0
- dbt/context/docs.py +82 -0
- dbt/context/exceptions_jinja.py +178 -0
- dbt/context/macro_resolver.py +195 -0
- dbt/context/macros.py +171 -0
- dbt/context/manifest.py +72 -0
- dbt/context/providers.py +2249 -0
- dbt/context/query_header.py +13 -0
- dbt/context/secret.py +58 -0
- dbt/context/target.py +74 -0
- dbt/contracts/__init__.py +0 -0
- dbt/contracts/files.py +413 -0
- dbt/contracts/graph/__init__.py +0 -0
- dbt/contracts/graph/manifest.py +1904 -0
- dbt/contracts/graph/metrics.py +97 -0
- dbt/contracts/graph/model_config.py +70 -0
- dbt/contracts/graph/node_args.py +42 -0
- dbt/contracts/graph/nodes.py +1806 -0
- dbt/contracts/graph/semantic_manifest.py +232 -0
- dbt/contracts/graph/unparsed.py +811 -0
- dbt/contracts/project.py +417 -0
- dbt/contracts/results.py +53 -0
- dbt/contracts/selection.py +23 -0
- dbt/contracts/sql.py +85 -0
- dbt/contracts/state.py +68 -0
- dbt/contracts/util.py +46 -0
- dbt/deprecations.py +346 -0
- dbt/deps/__init__.py +0 -0
- dbt/deps/base.py +152 -0
- dbt/deps/git.py +195 -0
- dbt/deps/local.py +79 -0
- dbt/deps/registry.py +130 -0
- dbt/deps/resolver.py +149 -0
- dbt/deps/tarball.py +120 -0
- dbt/docs/source/_ext/dbt_click.py +119 -0
- dbt/docs/source/conf.py +32 -0
- dbt/env_vars.py +64 -0
- dbt/event_time/event_time.py +40 -0
- dbt/event_time/sample_window.py +60 -0
- dbt/events/__init__.py +15 -0
- dbt/events/base_types.py +36 -0
- dbt/events/core_types_pb2.py +2 -0
- dbt/events/logging.py +108 -0
- dbt/events/types.py +2516 -0
- dbt/exceptions.py +1486 -0
- dbt/flags.py +89 -0
- dbt/graph/__init__.py +11 -0
- dbt/graph/cli.py +247 -0
- dbt/graph/graph.py +172 -0
- dbt/graph/queue.py +214 -0
- dbt/graph/selector.py +374 -0
- dbt/graph/selector_methods.py +975 -0
- dbt/graph/selector_spec.py +222 -0
- dbt/graph/thread_pool.py +18 -0
- dbt/hooks.py +21 -0
- dbt/include/README.md +49 -0
- dbt/include/__init__.py +3 -0
- dbt/include/starter_project/.gitignore +4 -0
- dbt/include/starter_project/README.md +15 -0
- dbt/include/starter_project/__init__.py +3 -0
- dbt/include/starter_project/analyses/.gitkeep +0 -0
- dbt/include/starter_project/dbt_project.yml +36 -0
- dbt/include/starter_project/macros/.gitkeep +0 -0
- dbt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
- dbt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
- dbt/include/starter_project/models/example/schema.yml +21 -0
- dbt/include/starter_project/seeds/.gitkeep +0 -0
- dbt/include/starter_project/snapshots/.gitkeep +0 -0
- dbt/include/starter_project/tests/.gitkeep +0 -0
- dbt/internal_deprecations.py +26 -0
- dbt/jsonschemas/__init__.py +3 -0
- dbt/jsonschemas/jsonschemas.py +309 -0
- dbt/jsonschemas/project/0.0.110.json +4717 -0
- dbt/jsonschemas/project/0.0.85.json +2015 -0
- dbt/jsonschemas/resources/0.0.110.json +2636 -0
- dbt/jsonschemas/resources/0.0.85.json +2536 -0
- dbt/jsonschemas/resources/latest.json +6773 -0
- dbt/links.py +4 -0
- dbt/materializations/__init__.py +0 -0
- dbt/materializations/incremental/__init__.py +0 -0
- dbt/materializations/incremental/microbatch.py +236 -0
- dbt/mp_context.py +8 -0
- dbt/node_types.py +37 -0
- dbt/parser/__init__.py +23 -0
- dbt/parser/analysis.py +21 -0
- dbt/parser/base.py +548 -0
- dbt/parser/common.py +266 -0
- dbt/parser/docs.py +52 -0
- dbt/parser/fixtures.py +51 -0
- dbt/parser/functions.py +30 -0
- dbt/parser/generic_test.py +100 -0
- dbt/parser/generic_test_builders.py +333 -0
- dbt/parser/hooks.py +118 -0
- dbt/parser/macros.py +137 -0
- dbt/parser/manifest.py +2204 -0
- dbt/parser/models.py +573 -0
- dbt/parser/partial.py +1178 -0
- dbt/parser/read_files.py +445 -0
- dbt/parser/schema_generic_tests.py +422 -0
- dbt/parser/schema_renderer.py +111 -0
- dbt/parser/schema_yaml_readers.py +935 -0
- dbt/parser/schemas.py +1466 -0
- dbt/parser/search.py +149 -0
- dbt/parser/seeds.py +28 -0
- dbt/parser/singular_test.py +20 -0
- dbt/parser/snapshots.py +44 -0
- dbt/parser/sources.py +558 -0
- dbt/parser/sql.py +62 -0
- dbt/parser/unit_tests.py +621 -0
- dbt/plugins/__init__.py +20 -0
- dbt/plugins/contracts.py +9 -0
- dbt/plugins/exceptions.py +2 -0
- dbt/plugins/manager.py +163 -0
- dbt/plugins/manifest.py +21 -0
- dbt/profiler.py +20 -0
- dbt/py.typed +1 -0
- dbt/query_analyzer.cpython-310-darwin.so +0 -0
- dbt/query_analyzer.py +410 -0
- dbt/runners/__init__.py +2 -0
- dbt/runners/exposure_runner.py +7 -0
- dbt/runners/no_op_runner.py +45 -0
- dbt/runners/saved_query_runner.py +7 -0
- dbt/selected_resources.py +8 -0
- dbt/task/__init__.py +0 -0
- dbt/task/base.py +503 -0
- dbt/task/build.py +197 -0
- dbt/task/clean.py +56 -0
- dbt/task/clone.py +161 -0
- dbt/task/compile.py +150 -0
- dbt/task/compute.py +454 -0
- dbt/task/debug.py +505 -0
- dbt/task/deps.py +280 -0
- dbt/task/docs/__init__.py +3 -0
- dbt/task/docs/generate.py +660 -0
- dbt/task/docs/index.html +250 -0
- dbt/task/docs/serve.py +29 -0
- dbt/task/freshness.py +322 -0
- dbt/task/function.py +121 -0
- dbt/task/group_lookup.py +46 -0
- dbt/task/init.py +553 -0
- dbt/task/java.py +316 -0
- dbt/task/list.py +236 -0
- dbt/task/printer.py +175 -0
- dbt/task/retry.py +175 -0
- dbt/task/run.py +1306 -0
- dbt/task/run_operation.py +141 -0
- dbt/task/runnable.py +758 -0
- dbt/task/seed.py +103 -0
- dbt/task/show.py +149 -0
- dbt/task/snapshot.py +56 -0
- dbt/task/spark.py +414 -0
- dbt/task/sql.py +110 -0
- dbt/task/target_sync.py +759 -0
- dbt/task/test.py +464 -0
- dbt/tests/fixtures/__init__.py +1 -0
- dbt/tests/fixtures/project.py +620 -0
- dbt/tests/util.py +651 -0
- dbt/tracking.py +529 -0
- dbt/utils/__init__.py +3 -0
- dbt/utils/artifact_upload.py +151 -0
- dbt/utils/utils.py +408 -0
- dbt/version.py +268 -0
- dvt_cli/__init__.py +72 -0
- dvt_core-0.52.2.dist-info/METADATA +286 -0
- dvt_core-0.52.2.dist-info/RECORD +275 -0
- dvt_core-0.52.2.dist-info/WHEEL +5 -0
- dvt_core-0.52.2.dist-info/entry_points.txt +2 -0
- dvt_core-0.52.2.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Any, Dict, List, Optional, Sequence, Union
|
|
4
|
+
|
|
5
|
+
from dbt.artifacts.resources import FreshnessThreshold
|
|
6
|
+
from dbt.artifacts.schemas.base import (
|
|
7
|
+
ArtifactMixin,
|
|
8
|
+
BaseArtifactMetadata,
|
|
9
|
+
VersionedSchema,
|
|
10
|
+
schema_version,
|
|
11
|
+
)
|
|
12
|
+
from dbt.artifacts.schemas.results import (
|
|
13
|
+
ExecutionResult,
|
|
14
|
+
FreshnessStatus,
|
|
15
|
+
NodeResult,
|
|
16
|
+
TimingInfo,
|
|
17
|
+
)
|
|
18
|
+
from dbt.contracts.graph.nodes import SourceDefinition
|
|
19
|
+
from dbt_common.dataclass_schema import StrEnum, dbtClassMixin
|
|
20
|
+
from dbt_common.exceptions import DbtInternalError
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class SourceFreshnessResult(NodeResult):
|
|
25
|
+
node: SourceDefinition
|
|
26
|
+
status: FreshnessStatus
|
|
27
|
+
max_loaded_at: datetime
|
|
28
|
+
snapshotted_at: datetime
|
|
29
|
+
age: float
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def skipped(self):
|
|
33
|
+
return False
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class PartialSourceFreshnessResult(NodeResult):
|
|
38
|
+
status: FreshnessStatus
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def skipped(self):
|
|
42
|
+
return False
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
FreshnessNodeResult = Union[PartialSourceFreshnessResult, SourceFreshnessResult]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass
|
|
49
|
+
class FreshnessMetadata(BaseArtifactMetadata):
|
|
50
|
+
dbt_schema_version: str = field(
|
|
51
|
+
default_factory=lambda: str(FreshnessExecutionResultArtifact.dbt_schema_version)
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@dataclass
|
|
56
|
+
class FreshnessResult(ExecutionResult):
|
|
57
|
+
metadata: FreshnessMetadata
|
|
58
|
+
results: Sequence[FreshnessNodeResult]
|
|
59
|
+
|
|
60
|
+
@classmethod
|
|
61
|
+
def from_node_results(
|
|
62
|
+
cls,
|
|
63
|
+
results: List[FreshnessNodeResult],
|
|
64
|
+
elapsed_time: float,
|
|
65
|
+
generated_at: datetime,
|
|
66
|
+
):
|
|
67
|
+
meta = FreshnessMetadata(generated_at=generated_at)
|
|
68
|
+
return cls(metadata=meta, results=results, elapsed_time=elapsed_time)
|
|
69
|
+
|
|
70
|
+
def write(self, path):
|
|
71
|
+
FreshnessExecutionResultArtifact.from_result(self).write(path)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@dataclass
|
|
75
|
+
class SourceFreshnessOutput(dbtClassMixin):
|
|
76
|
+
unique_id: str
|
|
77
|
+
max_loaded_at: datetime
|
|
78
|
+
snapshotted_at: datetime
|
|
79
|
+
max_loaded_at_time_ago_in_s: float
|
|
80
|
+
status: FreshnessStatus
|
|
81
|
+
criteria: FreshnessThreshold
|
|
82
|
+
adapter_response: Dict[str, Any]
|
|
83
|
+
timing: List[TimingInfo]
|
|
84
|
+
thread_id: str
|
|
85
|
+
execution_time: float
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class FreshnessErrorEnum(StrEnum):
|
|
89
|
+
runtime_error = "runtime error"
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@dataclass
|
|
93
|
+
class SourceFreshnessRuntimeError(dbtClassMixin):
|
|
94
|
+
unique_id: str
|
|
95
|
+
error: Optional[Union[str, int]]
|
|
96
|
+
status: FreshnessErrorEnum
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
FreshnessNodeOutput = Union[SourceFreshnessRuntimeError, SourceFreshnessOutput]
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
@dataclass
|
|
103
|
+
@schema_version("sources", 3)
|
|
104
|
+
class FreshnessExecutionResultArtifact(
|
|
105
|
+
ArtifactMixin,
|
|
106
|
+
VersionedSchema,
|
|
107
|
+
):
|
|
108
|
+
metadata: FreshnessMetadata
|
|
109
|
+
results: Sequence[FreshnessNodeOutput]
|
|
110
|
+
elapsed_time: float
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def from_result(cls, base: FreshnessResult):
|
|
114
|
+
processed = [
|
|
115
|
+
process_freshness_result(r)
|
|
116
|
+
for r in base.results
|
|
117
|
+
if isinstance(r, SourceFreshnessResult)
|
|
118
|
+
]
|
|
119
|
+
return cls(
|
|
120
|
+
metadata=base.metadata,
|
|
121
|
+
results=processed,
|
|
122
|
+
elapsed_time=base.elapsed_time,
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def process_freshness_result(result: FreshnessNodeResult) -> FreshnessNodeOutput:
|
|
127
|
+
unique_id = result.node.unique_id
|
|
128
|
+
if result.status == FreshnessStatus.RuntimeErr:
|
|
129
|
+
return SourceFreshnessRuntimeError(
|
|
130
|
+
unique_id=unique_id,
|
|
131
|
+
error=result.message,
|
|
132
|
+
status=FreshnessErrorEnum.runtime_error,
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
# we know that this must be a SourceFreshnessResult
|
|
136
|
+
if not isinstance(result, SourceFreshnessResult):
|
|
137
|
+
raise DbtInternalError(
|
|
138
|
+
"Got {} instead of a SourceFreshnessResult for a "
|
|
139
|
+
"non-error result in freshness execution!".format(type(result))
|
|
140
|
+
)
|
|
141
|
+
# if we're here, we must have a non-None freshness threshold
|
|
142
|
+
criteria = result.node.freshness
|
|
143
|
+
if criteria is None:
|
|
144
|
+
raise DbtInternalError(
|
|
145
|
+
"Somehow evaluated a freshness result for a source that has no freshness criteria!"
|
|
146
|
+
)
|
|
147
|
+
return SourceFreshnessOutput(
|
|
148
|
+
unique_id=unique_id,
|
|
149
|
+
max_loaded_at=result.max_loaded_at,
|
|
150
|
+
snapshotted_at=result.snapshotted_at,
|
|
151
|
+
max_loaded_at_time_ago_in_s=result.age,
|
|
152
|
+
status=result.status,
|
|
153
|
+
criteria=criteria,
|
|
154
|
+
adapter_response=result.adapter_response,
|
|
155
|
+
timing=result.timing,
|
|
156
|
+
thread_id=result.thread_id,
|
|
157
|
+
execution_time=result.execution_time,
|
|
158
|
+
)
|
|
File without changes
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union
|
|
4
|
+
from uuid import UUID
|
|
5
|
+
|
|
6
|
+
from dbt import tracking
|
|
7
|
+
from dbt.artifacts.resources import (
|
|
8
|
+
Analysis,
|
|
9
|
+
Documentation,
|
|
10
|
+
Exposure,
|
|
11
|
+
Function,
|
|
12
|
+
GenericTest,
|
|
13
|
+
Group,
|
|
14
|
+
HookNode,
|
|
15
|
+
Macro,
|
|
16
|
+
Metric,
|
|
17
|
+
Model,
|
|
18
|
+
SavedQuery,
|
|
19
|
+
Seed,
|
|
20
|
+
SemanticModel,
|
|
21
|
+
SingularTest,
|
|
22
|
+
Snapshot,
|
|
23
|
+
SourceDefinition,
|
|
24
|
+
SqlOperation,
|
|
25
|
+
UnitTestDefinition,
|
|
26
|
+
)
|
|
27
|
+
from dbt.artifacts.resources.v1.components import Quoting
|
|
28
|
+
from dbt.artifacts.schemas.base import (
|
|
29
|
+
ArtifactMixin,
|
|
30
|
+
BaseArtifactMetadata,
|
|
31
|
+
get_artifact_dbt_version,
|
|
32
|
+
get_artifact_schema_version,
|
|
33
|
+
schema_version,
|
|
34
|
+
)
|
|
35
|
+
from dbt.artifacts.schemas.upgrades import (
|
|
36
|
+
upgrade_manifest_json,
|
|
37
|
+
upgrade_manifest_json_dbt_version,
|
|
38
|
+
)
|
|
39
|
+
from dbt.version import __version__
|
|
40
|
+
from dbt_common.exceptions import DbtInternalError
|
|
41
|
+
|
|
42
|
+
NodeEdgeMap = Dict[str, List[str]]
|
|
43
|
+
UniqueID = str
|
|
44
|
+
ManifestResource = Union[
|
|
45
|
+
Seed, Analysis, SingularTest, HookNode, Model, SqlOperation, GenericTest, Snapshot, Function
|
|
46
|
+
]
|
|
47
|
+
DisabledManifestResource = Union[
|
|
48
|
+
ManifestResource,
|
|
49
|
+
SourceDefinition,
|
|
50
|
+
Exposure,
|
|
51
|
+
Metric,
|
|
52
|
+
SavedQuery,
|
|
53
|
+
SemanticModel,
|
|
54
|
+
UnitTestDefinition,
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@dataclass
|
|
59
|
+
class ManifestMetadata(BaseArtifactMetadata):
|
|
60
|
+
"""Metadata for the manifest."""
|
|
61
|
+
|
|
62
|
+
dbt_schema_version: str = field(
|
|
63
|
+
default_factory=lambda: str(WritableManifest.dbt_schema_version)
|
|
64
|
+
)
|
|
65
|
+
project_name: Optional[str] = field(
|
|
66
|
+
default=None,
|
|
67
|
+
metadata={
|
|
68
|
+
"description": "Name of the root project",
|
|
69
|
+
},
|
|
70
|
+
)
|
|
71
|
+
project_id: Optional[str] = field(
|
|
72
|
+
default=None,
|
|
73
|
+
metadata={
|
|
74
|
+
"description": "A unique identifier for the project, hashed from the project name",
|
|
75
|
+
},
|
|
76
|
+
)
|
|
77
|
+
user_id: Optional[UUID] = field(
|
|
78
|
+
default=None,
|
|
79
|
+
metadata={
|
|
80
|
+
"description": "A unique identifier for the user",
|
|
81
|
+
},
|
|
82
|
+
)
|
|
83
|
+
send_anonymous_usage_stats: Optional[bool] = field(
|
|
84
|
+
default=None,
|
|
85
|
+
metadata=dict(
|
|
86
|
+
description=("Whether dbt is configured to send anonymous usage statistics")
|
|
87
|
+
),
|
|
88
|
+
)
|
|
89
|
+
adapter_type: Optional[str] = field(
|
|
90
|
+
default=None,
|
|
91
|
+
metadata=dict(description="The type name of the adapter"),
|
|
92
|
+
)
|
|
93
|
+
quoting: Optional[Quoting] = field(
|
|
94
|
+
default_factory=Quoting,
|
|
95
|
+
metadata=dict(description="The quoting configuration for the project"),
|
|
96
|
+
)
|
|
97
|
+
run_started_at: Optional[datetime] = field(
|
|
98
|
+
default=tracking.active_user.run_started_at if tracking.active_user is not None else None,
|
|
99
|
+
metadata=dict(description="The timestamp when the run started"),
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
@classmethod
|
|
103
|
+
def default(cls):
|
|
104
|
+
return cls(
|
|
105
|
+
dbt_schema_version=str(WritableManifest.dbt_schema_version),
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
@dataclass
|
|
110
|
+
@schema_version("manifest", 12)
|
|
111
|
+
class WritableManifest(ArtifactMixin):
|
|
112
|
+
nodes: Mapping[UniqueID, ManifestResource] = field(
|
|
113
|
+
metadata=dict(description=("The nodes defined in the dbt project and its dependencies"))
|
|
114
|
+
)
|
|
115
|
+
sources: Mapping[UniqueID, SourceDefinition] = field(
|
|
116
|
+
metadata=dict(description=("The sources defined in the dbt project and its dependencies"))
|
|
117
|
+
)
|
|
118
|
+
macros: Mapping[UniqueID, Macro] = field(
|
|
119
|
+
metadata=dict(description=("The macros defined in the dbt project and its dependencies"))
|
|
120
|
+
)
|
|
121
|
+
docs: Mapping[UniqueID, Documentation] = field(
|
|
122
|
+
metadata=dict(description=("The docs defined in the dbt project and its dependencies"))
|
|
123
|
+
)
|
|
124
|
+
exposures: Mapping[UniqueID, Exposure] = field(
|
|
125
|
+
metadata=dict(
|
|
126
|
+
description=("The exposures defined in the dbt project and its dependencies")
|
|
127
|
+
)
|
|
128
|
+
)
|
|
129
|
+
metrics: Mapping[UniqueID, Metric] = field(
|
|
130
|
+
metadata=dict(description=("The metrics defined in the dbt project and its dependencies"))
|
|
131
|
+
)
|
|
132
|
+
groups: Mapping[UniqueID, Group] = field(
|
|
133
|
+
metadata=dict(description=("The groups defined in the dbt project"))
|
|
134
|
+
)
|
|
135
|
+
selectors: Mapping[UniqueID, Any] = field(
|
|
136
|
+
metadata=dict(description=("The selectors defined in selectors.yml"))
|
|
137
|
+
)
|
|
138
|
+
disabled: Optional[Mapping[UniqueID, List[DisabledManifestResource]]] = field(
|
|
139
|
+
metadata=dict(description="A mapping of the disabled nodes in the target")
|
|
140
|
+
)
|
|
141
|
+
parent_map: Optional[NodeEdgeMap] = field(
|
|
142
|
+
metadata=dict(
|
|
143
|
+
description="A mapping from child nodes to their dependencies",
|
|
144
|
+
)
|
|
145
|
+
)
|
|
146
|
+
child_map: Optional[NodeEdgeMap] = field(
|
|
147
|
+
metadata=dict(
|
|
148
|
+
description="A mapping from parent nodes to their dependents",
|
|
149
|
+
)
|
|
150
|
+
)
|
|
151
|
+
group_map: Optional[NodeEdgeMap] = field(
|
|
152
|
+
metadata=dict(
|
|
153
|
+
description="A mapping from group names to their nodes",
|
|
154
|
+
)
|
|
155
|
+
)
|
|
156
|
+
saved_queries: Mapping[UniqueID, SavedQuery] = field(
|
|
157
|
+
metadata=dict(description=("The saved queries defined in the dbt project"))
|
|
158
|
+
)
|
|
159
|
+
semantic_models: Mapping[UniqueID, SemanticModel] = field(
|
|
160
|
+
metadata=dict(description=("The semantic models defined in the dbt project"))
|
|
161
|
+
)
|
|
162
|
+
metadata: ManifestMetadata = field(
|
|
163
|
+
metadata=dict(
|
|
164
|
+
description="Metadata about the manifest",
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
unit_tests: Mapping[UniqueID, UnitTestDefinition] = field(
|
|
168
|
+
metadata=dict(
|
|
169
|
+
description="The unit tests defined in the project",
|
|
170
|
+
)
|
|
171
|
+
)
|
|
172
|
+
functions: Mapping[UniqueID, Function] = field(
|
|
173
|
+
default_factory=dict,
|
|
174
|
+
metadata=dict(description=("The functions defined in the dbt project")),
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
@classmethod
|
|
178
|
+
def compatible_previous_versions(cls) -> Iterable[Tuple[str, int]]:
|
|
179
|
+
return [
|
|
180
|
+
("manifest", 4),
|
|
181
|
+
("manifest", 5),
|
|
182
|
+
("manifest", 6),
|
|
183
|
+
("manifest", 7),
|
|
184
|
+
("manifest", 8),
|
|
185
|
+
("manifest", 9),
|
|
186
|
+
("manifest", 10),
|
|
187
|
+
("manifest", 11),
|
|
188
|
+
]
|
|
189
|
+
|
|
190
|
+
@classmethod
|
|
191
|
+
def upgrade_schema_version(cls, data):
|
|
192
|
+
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
|
193
|
+
ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest."""
|
|
194
|
+
manifest_schema_version = get_artifact_schema_version(data)
|
|
195
|
+
if manifest_schema_version < cls.dbt_schema_version.version:
|
|
196
|
+
data = upgrade_manifest_json(data, manifest_schema_version)
|
|
197
|
+
|
|
198
|
+
manifest_dbt_version = get_artifact_dbt_version(data)
|
|
199
|
+
if manifest_dbt_version and manifest_dbt_version != __version__:
|
|
200
|
+
data = upgrade_manifest_json_dbt_version(data)
|
|
201
|
+
return cls.from_dict(data)
|
|
202
|
+
|
|
203
|
+
@classmethod
|
|
204
|
+
def validate(cls, _):
|
|
205
|
+
# When dbt try to load an artifact with additional optional fields
|
|
206
|
+
# that are not present in the schema, from_dict will work fine.
|
|
207
|
+
# As long as validate is not called, the schema will not be enforced.
|
|
208
|
+
# This is intentional, as it allows for safer schema upgrades.
|
|
209
|
+
raise DbtInternalError(
|
|
210
|
+
"The WritableManifest should never be validated directly to allow for schema upgrades."
|
|
211
|
+
)
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from datetime import datetime, timezone
|
|
3
|
+
from typing import Any, Callable, Dict, List, Optional, Sequence, Union
|
|
4
|
+
|
|
5
|
+
from dbt.contracts.graph.nodes import ResultNode
|
|
6
|
+
from dbt_common.dataclass_schema import StrEnum, dbtClassMixin
|
|
7
|
+
from dbt_common.events.helpers import datetime_to_json_string
|
|
8
|
+
from dbt_common.utils import cast_to_int, cast_to_str
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class TimingInfo(dbtClassMixin):
|
|
13
|
+
"""
|
|
14
|
+
Represents a step in the execution of a node.
|
|
15
|
+
`name` should be one of: compile, execute, or other
|
|
16
|
+
Do not call directly, use `collect_timing_info` instead.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
name: str
|
|
20
|
+
started_at: Optional[datetime] = None
|
|
21
|
+
completed_at: Optional[datetime] = None
|
|
22
|
+
|
|
23
|
+
def begin(self):
|
|
24
|
+
self.started_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
|
25
|
+
|
|
26
|
+
def end(self):
|
|
27
|
+
self.completed_at = datetime.now(timezone.utc).replace(tzinfo=None)
|
|
28
|
+
|
|
29
|
+
def to_msg_dict(self):
|
|
30
|
+
msg_dict = {"name": str(self.name)}
|
|
31
|
+
if self.started_at:
|
|
32
|
+
msg_dict["started_at"] = datetime_to_json_string(self.started_at)
|
|
33
|
+
if self.completed_at:
|
|
34
|
+
msg_dict["completed_at"] = datetime_to_json_string(self.completed_at)
|
|
35
|
+
return msg_dict
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
# This is a context manager
|
|
39
|
+
class collect_timing_info:
|
|
40
|
+
def __init__(self, name: str, callback: Callable[[TimingInfo], None]) -> None:
|
|
41
|
+
self.timing_info = TimingInfo(name=name)
|
|
42
|
+
self.callback = callback
|
|
43
|
+
|
|
44
|
+
def __enter__(self):
|
|
45
|
+
self.timing_info.begin()
|
|
46
|
+
|
|
47
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
|
48
|
+
self.timing_info.end()
|
|
49
|
+
self.callback(self.timing_info)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class RunningStatus(StrEnum):
|
|
53
|
+
Started = "started"
|
|
54
|
+
Compiling = "compiling"
|
|
55
|
+
Executing = "executing"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class NodeStatus(StrEnum):
|
|
59
|
+
Success = "success"
|
|
60
|
+
Error = "error"
|
|
61
|
+
Fail = "fail"
|
|
62
|
+
Warn = "warn"
|
|
63
|
+
Skipped = "skipped"
|
|
64
|
+
PartialSuccess = "partial success"
|
|
65
|
+
Pass = "pass"
|
|
66
|
+
RuntimeErr = "runtime error"
|
|
67
|
+
NoOp = "no-op"
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class RunStatus(StrEnum):
|
|
71
|
+
Success = NodeStatus.Success
|
|
72
|
+
Error = NodeStatus.Error
|
|
73
|
+
Skipped = NodeStatus.Skipped
|
|
74
|
+
PartialSuccess = NodeStatus.PartialSuccess
|
|
75
|
+
NoOp = NodeStatus.NoOp
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class TestStatus(StrEnum):
|
|
79
|
+
__test__ = False
|
|
80
|
+
Pass = NodeStatus.Pass
|
|
81
|
+
Error = NodeStatus.Error
|
|
82
|
+
Fail = NodeStatus.Fail
|
|
83
|
+
Warn = NodeStatus.Warn
|
|
84
|
+
Skipped = NodeStatus.Skipped
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
class FreshnessStatus(StrEnum):
|
|
88
|
+
Pass = NodeStatus.Pass
|
|
89
|
+
Warn = NodeStatus.Warn
|
|
90
|
+
Error = NodeStatus.Error
|
|
91
|
+
RuntimeErr = NodeStatus.RuntimeErr
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@dataclass
|
|
95
|
+
class BaseResult(dbtClassMixin):
|
|
96
|
+
status: Union[RunStatus, TestStatus, FreshnessStatus]
|
|
97
|
+
timing: List[TimingInfo]
|
|
98
|
+
thread_id: str
|
|
99
|
+
execution_time: float
|
|
100
|
+
adapter_response: Dict[str, Any]
|
|
101
|
+
message: Optional[str]
|
|
102
|
+
failures: Optional[int]
|
|
103
|
+
|
|
104
|
+
@classmethod
|
|
105
|
+
def __pre_deserialize__(cls, data):
|
|
106
|
+
data = super().__pre_deserialize__(data)
|
|
107
|
+
if "message" not in data:
|
|
108
|
+
data["message"] = None
|
|
109
|
+
if "failures" not in data:
|
|
110
|
+
data["failures"] = None
|
|
111
|
+
return data
|
|
112
|
+
|
|
113
|
+
def to_msg_dict(self):
|
|
114
|
+
msg_dict = {
|
|
115
|
+
"status": str(self.status),
|
|
116
|
+
"message": cast_to_str(self.message),
|
|
117
|
+
"thread": self.thread_id,
|
|
118
|
+
"execution_time": self.execution_time,
|
|
119
|
+
"num_failures": cast_to_int(self.failures),
|
|
120
|
+
"timing_info": [ti.to_msg_dict() for ti in self.timing],
|
|
121
|
+
"adapter_response": self.adapter_response,
|
|
122
|
+
}
|
|
123
|
+
return msg_dict
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
@dataclass
|
|
127
|
+
class NodeResult(BaseResult):
|
|
128
|
+
node: ResultNode
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
@dataclass
|
|
132
|
+
class ExecutionResult(dbtClassMixin):
|
|
133
|
+
results: Sequence[BaseResult]
|
|
134
|
+
elapsed_time: float
|
|
135
|
+
|
|
136
|
+
def __len__(self):
|
|
137
|
+
return len(self.results)
|
|
138
|
+
|
|
139
|
+
def __iter__(self):
|
|
140
|
+
return iter(self.results)
|
|
141
|
+
|
|
142
|
+
def __getitem__(self, idx):
|
|
143
|
+
return self.results[idx]
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
# due to issues with typing.Union collapsing subclasses, this can't subclass
|
|
147
|
+
# PartialResult
|
|
File without changes
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import copy
|
|
4
|
+
import threading
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
from typing import Any, Dict, Iterable, Optional, Sequence, Tuple
|
|
8
|
+
|
|
9
|
+
# https://github.com/dbt-labs/dbt-core/issues/10098
|
|
10
|
+
# Needed for Mashumaro serialization of RunResult below
|
|
11
|
+
# TODO: investigate alternative approaches to restore conditional import
|
|
12
|
+
# if TYPE_CHECKING:
|
|
13
|
+
import agate
|
|
14
|
+
|
|
15
|
+
from dbt.artifacts.resources import CompiledResource
|
|
16
|
+
from dbt.artifacts.schemas.base import (
|
|
17
|
+
ArtifactMixin,
|
|
18
|
+
BaseArtifactMetadata,
|
|
19
|
+
get_artifact_schema_version,
|
|
20
|
+
schema_version,
|
|
21
|
+
)
|
|
22
|
+
from dbt.artifacts.schemas.batch_results import BatchResults
|
|
23
|
+
from dbt.artifacts.schemas.results import (
|
|
24
|
+
BaseResult,
|
|
25
|
+
ExecutionResult,
|
|
26
|
+
NodeResult,
|
|
27
|
+
ResultNode,
|
|
28
|
+
RunStatus,
|
|
29
|
+
)
|
|
30
|
+
from dbt.exceptions import scrub_secrets
|
|
31
|
+
from dbt_common.clients.system import write_json
|
|
32
|
+
from dbt_common.constants import SECRET_ENV_PREFIX
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@dataclass
|
|
36
|
+
class RunResult(NodeResult):
|
|
37
|
+
agate_table: Optional["agate.Table"] = field(
|
|
38
|
+
default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}
|
|
39
|
+
)
|
|
40
|
+
batch_results: Optional[BatchResults] = None
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def skipped(self):
|
|
44
|
+
return self.status == RunStatus.Skipped
|
|
45
|
+
|
|
46
|
+
@classmethod
|
|
47
|
+
def from_node(cls, node: ResultNode, status: RunStatus, message: Optional[str]):
|
|
48
|
+
thread_id = threading.current_thread().name
|
|
49
|
+
return RunResult(
|
|
50
|
+
status=status,
|
|
51
|
+
thread_id=thread_id,
|
|
52
|
+
execution_time=0,
|
|
53
|
+
timing=[],
|
|
54
|
+
message=message,
|
|
55
|
+
node=node,
|
|
56
|
+
adapter_response={},
|
|
57
|
+
failures=None,
|
|
58
|
+
batch_results=None,
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@dataclass
|
|
63
|
+
class RunResultsMetadata(BaseArtifactMetadata):
|
|
64
|
+
dbt_schema_version: str = field(
|
|
65
|
+
default_factory=lambda: str(RunResultsArtifact.dbt_schema_version)
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dataclass
|
|
70
|
+
class RunResultOutput(BaseResult):
|
|
71
|
+
unique_id: str
|
|
72
|
+
compiled: Optional[bool]
|
|
73
|
+
compiled_code: Optional[str]
|
|
74
|
+
relation_name: Optional[str]
|
|
75
|
+
batch_results: Optional[BatchResults] = None
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def process_run_result(result: RunResult) -> RunResultOutput:
|
|
79
|
+
|
|
80
|
+
compiled = isinstance(result.node, CompiledResource)
|
|
81
|
+
|
|
82
|
+
return RunResultOutput(
|
|
83
|
+
unique_id=result.node.unique_id,
|
|
84
|
+
status=result.status,
|
|
85
|
+
timing=result.timing,
|
|
86
|
+
thread_id=result.thread_id,
|
|
87
|
+
execution_time=result.execution_time,
|
|
88
|
+
message=result.message,
|
|
89
|
+
adapter_response=result.adapter_response,
|
|
90
|
+
failures=result.failures,
|
|
91
|
+
batch_results=result.batch_results,
|
|
92
|
+
compiled=result.node.compiled if compiled else None, # type:ignore
|
|
93
|
+
compiled_code=result.node.compiled_code if compiled else None, # type:ignore
|
|
94
|
+
relation_name=result.node.relation_name if compiled else None, # type:ignore
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@dataclass
|
|
99
|
+
class RunExecutionResult(
|
|
100
|
+
ExecutionResult,
|
|
101
|
+
):
|
|
102
|
+
results: Sequence[RunResult]
|
|
103
|
+
args: Dict[str, Any] = field(default_factory=dict)
|
|
104
|
+
generated_at: datetime = field(
|
|
105
|
+
default_factory=lambda: datetime.now(timezone.utc).replace(tzinfo=None)
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
def write(self, path: str):
|
|
109
|
+
writable = RunResultsArtifact.from_execution_results(
|
|
110
|
+
results=self.results,
|
|
111
|
+
elapsed_time=self.elapsed_time,
|
|
112
|
+
generated_at=self.generated_at,
|
|
113
|
+
args=self.args,
|
|
114
|
+
)
|
|
115
|
+
writable.write(path)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
@dataclass
|
|
119
|
+
@schema_version("run-results", 6)
|
|
120
|
+
class RunResultsArtifact(ExecutionResult, ArtifactMixin):
|
|
121
|
+
results: Sequence[RunResultOutput]
|
|
122
|
+
args: Dict[str, Any] = field(default_factory=dict)
|
|
123
|
+
|
|
124
|
+
@classmethod
|
|
125
|
+
def from_execution_results(
|
|
126
|
+
cls,
|
|
127
|
+
results: Sequence[RunResult],
|
|
128
|
+
elapsed_time: float,
|
|
129
|
+
generated_at: datetime,
|
|
130
|
+
args: Dict,
|
|
131
|
+
):
|
|
132
|
+
processed_results = [
|
|
133
|
+
process_run_result(result) for result in results if isinstance(result, RunResult)
|
|
134
|
+
]
|
|
135
|
+
meta = RunResultsMetadata(
|
|
136
|
+
dbt_schema_version=str(cls.dbt_schema_version),
|
|
137
|
+
generated_at=generated_at,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
secret_vars = [
|
|
141
|
+
v for k, v in args["vars"].items() if k.startswith(SECRET_ENV_PREFIX) and v.strip()
|
|
142
|
+
]
|
|
143
|
+
|
|
144
|
+
scrubbed_args = copy.deepcopy(args)
|
|
145
|
+
|
|
146
|
+
# scrub secrets in invocation command
|
|
147
|
+
scrubbed_args["invocation_command"] = scrub_secrets(
|
|
148
|
+
scrubbed_args["invocation_command"], secret_vars
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
# scrub secrets in vars dict
|
|
152
|
+
scrubbed_args["vars"] = {
|
|
153
|
+
k: scrub_secrets(v, secret_vars) for k, v in scrubbed_args["vars"].items()
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
return cls(
|
|
157
|
+
metadata=meta, results=processed_results, elapsed_time=elapsed_time, args=scrubbed_args
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
@classmethod
|
|
161
|
+
def compatible_previous_versions(cls) -> Iterable[Tuple[str, int]]:
|
|
162
|
+
return [
|
|
163
|
+
("run-results", 4),
|
|
164
|
+
("run-results", 5),
|
|
165
|
+
]
|
|
166
|
+
|
|
167
|
+
@classmethod
|
|
168
|
+
def upgrade_schema_version(cls, data):
|
|
169
|
+
"""This overrides the "upgrade_schema_version" call in VersionedSchema (via
|
|
170
|
+
ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results.
|
|
171
|
+
"""
|
|
172
|
+
run_results_schema_version = get_artifact_schema_version(data)
|
|
173
|
+
# If less than the current version (v5), preprocess contents to match latest schema version
|
|
174
|
+
if run_results_schema_version <= 5:
|
|
175
|
+
# In v5, we added 'compiled' attributes to each result entry
|
|
176
|
+
# Going forward, dbt expects these to be populated
|
|
177
|
+
for result in data["results"]:
|
|
178
|
+
result["compiled"] = False
|
|
179
|
+
result["compiled_code"] = ""
|
|
180
|
+
result["relation_name"] = ""
|
|
181
|
+
return cls.from_dict(data)
|
|
182
|
+
|
|
183
|
+
def write(self, path: str):
|
|
184
|
+
write_json(path, self.to_dict(omit_none=False))
|