dvt-core 0.52.2__cp310-cp310-macosx_10_9_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dvt-core might be problematic. Click here for more details.
- dbt/__init__.py +7 -0
- dbt/_pydantic_shim.py +26 -0
- dbt/artifacts/__init__.py +0 -0
- dbt/artifacts/exceptions/__init__.py +1 -0
- dbt/artifacts/exceptions/schemas.py +31 -0
- dbt/artifacts/resources/__init__.py +116 -0
- dbt/artifacts/resources/base.py +67 -0
- dbt/artifacts/resources/types.py +93 -0
- dbt/artifacts/resources/v1/analysis.py +10 -0
- dbt/artifacts/resources/v1/catalog.py +23 -0
- dbt/artifacts/resources/v1/components.py +274 -0
- dbt/artifacts/resources/v1/config.py +277 -0
- dbt/artifacts/resources/v1/documentation.py +11 -0
- dbt/artifacts/resources/v1/exposure.py +51 -0
- dbt/artifacts/resources/v1/function.py +52 -0
- dbt/artifacts/resources/v1/generic_test.py +31 -0
- dbt/artifacts/resources/v1/group.py +21 -0
- dbt/artifacts/resources/v1/hook.py +11 -0
- dbt/artifacts/resources/v1/macro.py +29 -0
- dbt/artifacts/resources/v1/metric.py +172 -0
- dbt/artifacts/resources/v1/model.py +145 -0
- dbt/artifacts/resources/v1/owner.py +10 -0
- dbt/artifacts/resources/v1/saved_query.py +111 -0
- dbt/artifacts/resources/v1/seed.py +41 -0
- dbt/artifacts/resources/v1/semantic_layer_components.py +72 -0
- dbt/artifacts/resources/v1/semantic_model.py +314 -0
- dbt/artifacts/resources/v1/singular_test.py +14 -0
- dbt/artifacts/resources/v1/snapshot.py +91 -0
- dbt/artifacts/resources/v1/source_definition.py +84 -0
- dbt/artifacts/resources/v1/sql_operation.py +10 -0
- dbt/artifacts/resources/v1/unit_test_definition.py +77 -0
- dbt/artifacts/schemas/__init__.py +0 -0
- dbt/artifacts/schemas/base.py +191 -0
- dbt/artifacts/schemas/batch_results.py +24 -0
- dbt/artifacts/schemas/catalog/__init__.py +11 -0
- dbt/artifacts/schemas/catalog/v1/__init__.py +0 -0
- dbt/artifacts/schemas/catalog/v1/catalog.py +59 -0
- dbt/artifacts/schemas/freshness/__init__.py +1 -0
- dbt/artifacts/schemas/freshness/v3/__init__.py +0 -0
- dbt/artifacts/schemas/freshness/v3/freshness.py +158 -0
- dbt/artifacts/schemas/manifest/__init__.py +2 -0
- dbt/artifacts/schemas/manifest/v12/__init__.py +0 -0
- dbt/artifacts/schemas/manifest/v12/manifest.py +211 -0
- dbt/artifacts/schemas/results.py +147 -0
- dbt/artifacts/schemas/run/__init__.py +2 -0
- dbt/artifacts/schemas/run/v5/__init__.py +0 -0
- dbt/artifacts/schemas/run/v5/run.py +184 -0
- dbt/artifacts/schemas/upgrades/__init__.py +4 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
- dbt/artifacts/utils/validation.py +153 -0
- dbt/cli/__init__.py +1 -0
- dbt/cli/context.py +17 -0
- dbt/cli/exceptions.py +57 -0
- dbt/cli/flags.py +560 -0
- dbt/cli/main.py +2039 -0
- dbt/cli/option_types.py +121 -0
- dbt/cli/options.py +80 -0
- dbt/cli/params.py +804 -0
- dbt/cli/requires.py +490 -0
- dbt/cli/resolvers.py +50 -0
- dbt/cli/types.py +40 -0
- dbt/clients/__init__.py +0 -0
- dbt/clients/checked_load.py +83 -0
- dbt/clients/git.py +164 -0
- dbt/clients/jinja.py +206 -0
- dbt/clients/jinja_static.py +245 -0
- dbt/clients/registry.py +192 -0
- dbt/clients/yaml_helper.py +68 -0
- dbt/compilation.py +876 -0
- dbt/compute/__init__.py +14 -0
- dbt/compute/engines/__init__.py +12 -0
- dbt/compute/engines/spark_engine.py +624 -0
- dbt/compute/federated_executor.py +837 -0
- dbt/compute/filter_pushdown.cpython-310-darwin.so +0 -0
- dbt/compute/filter_pushdown.py +273 -0
- dbt/compute/jar_provisioning.cpython-310-darwin.so +0 -0
- dbt/compute/jar_provisioning.py +255 -0
- dbt/compute/java_compat.cpython-310-darwin.so +0 -0
- dbt/compute/java_compat.py +689 -0
- dbt/compute/jdbc_utils.cpython-310-darwin.so +0 -0
- dbt/compute/jdbc_utils.py +678 -0
- dbt/compute/smart_selector.cpython-310-darwin.so +0 -0
- dbt/compute/smart_selector.py +311 -0
- dbt/compute/strategies/__init__.py +54 -0
- dbt/compute/strategies/base.py +165 -0
- dbt/compute/strategies/dataproc.py +207 -0
- dbt/compute/strategies/emr.py +203 -0
- dbt/compute/strategies/local.py +364 -0
- dbt/compute/strategies/standalone.py +262 -0
- dbt/config/__init__.py +4 -0
- dbt/config/catalogs.py +94 -0
- dbt/config/compute.cpython-310-darwin.so +0 -0
- dbt/config/compute.py +547 -0
- dbt/config/dvt_profile.cpython-310-darwin.so +0 -0
- dbt/config/dvt_profile.py +342 -0
- dbt/config/profile.py +422 -0
- dbt/config/project.py +873 -0
- dbt/config/project_utils.py +28 -0
- dbt/config/renderer.py +231 -0
- dbt/config/runtime.py +553 -0
- dbt/config/selectors.py +208 -0
- dbt/config/utils.py +77 -0
- dbt/constants.py +28 -0
- dbt/context/__init__.py +0 -0
- dbt/context/base.py +745 -0
- dbt/context/configured.py +135 -0
- dbt/context/context_config.py +382 -0
- dbt/context/docs.py +82 -0
- dbt/context/exceptions_jinja.py +178 -0
- dbt/context/macro_resolver.py +195 -0
- dbt/context/macros.py +171 -0
- dbt/context/manifest.py +72 -0
- dbt/context/providers.py +2249 -0
- dbt/context/query_header.py +13 -0
- dbt/context/secret.py +58 -0
- dbt/context/target.py +74 -0
- dbt/contracts/__init__.py +0 -0
- dbt/contracts/files.py +413 -0
- dbt/contracts/graph/__init__.py +0 -0
- dbt/contracts/graph/manifest.py +1904 -0
- dbt/contracts/graph/metrics.py +97 -0
- dbt/contracts/graph/model_config.py +70 -0
- dbt/contracts/graph/node_args.py +42 -0
- dbt/contracts/graph/nodes.py +1806 -0
- dbt/contracts/graph/semantic_manifest.py +232 -0
- dbt/contracts/graph/unparsed.py +811 -0
- dbt/contracts/project.py +417 -0
- dbt/contracts/results.py +53 -0
- dbt/contracts/selection.py +23 -0
- dbt/contracts/sql.py +85 -0
- dbt/contracts/state.py +68 -0
- dbt/contracts/util.py +46 -0
- dbt/deprecations.py +346 -0
- dbt/deps/__init__.py +0 -0
- dbt/deps/base.py +152 -0
- dbt/deps/git.py +195 -0
- dbt/deps/local.py +79 -0
- dbt/deps/registry.py +130 -0
- dbt/deps/resolver.py +149 -0
- dbt/deps/tarball.py +120 -0
- dbt/docs/source/_ext/dbt_click.py +119 -0
- dbt/docs/source/conf.py +32 -0
- dbt/env_vars.py +64 -0
- dbt/event_time/event_time.py +40 -0
- dbt/event_time/sample_window.py +60 -0
- dbt/events/__init__.py +15 -0
- dbt/events/base_types.py +36 -0
- dbt/events/core_types_pb2.py +2 -0
- dbt/events/logging.py +108 -0
- dbt/events/types.py +2516 -0
- dbt/exceptions.py +1486 -0
- dbt/flags.py +89 -0
- dbt/graph/__init__.py +11 -0
- dbt/graph/cli.py +247 -0
- dbt/graph/graph.py +172 -0
- dbt/graph/queue.py +214 -0
- dbt/graph/selector.py +374 -0
- dbt/graph/selector_methods.py +975 -0
- dbt/graph/selector_spec.py +222 -0
- dbt/graph/thread_pool.py +18 -0
- dbt/hooks.py +21 -0
- dbt/include/README.md +49 -0
- dbt/include/__init__.py +3 -0
- dbt/include/starter_project/.gitignore +4 -0
- dbt/include/starter_project/README.md +15 -0
- dbt/include/starter_project/__init__.py +3 -0
- dbt/include/starter_project/analyses/.gitkeep +0 -0
- dbt/include/starter_project/dbt_project.yml +36 -0
- dbt/include/starter_project/macros/.gitkeep +0 -0
- dbt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
- dbt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
- dbt/include/starter_project/models/example/schema.yml +21 -0
- dbt/include/starter_project/seeds/.gitkeep +0 -0
- dbt/include/starter_project/snapshots/.gitkeep +0 -0
- dbt/include/starter_project/tests/.gitkeep +0 -0
- dbt/internal_deprecations.py +26 -0
- dbt/jsonschemas/__init__.py +3 -0
- dbt/jsonschemas/jsonschemas.py +309 -0
- dbt/jsonschemas/project/0.0.110.json +4717 -0
- dbt/jsonschemas/project/0.0.85.json +2015 -0
- dbt/jsonschemas/resources/0.0.110.json +2636 -0
- dbt/jsonschemas/resources/0.0.85.json +2536 -0
- dbt/jsonschemas/resources/latest.json +6773 -0
- dbt/links.py +4 -0
- dbt/materializations/__init__.py +0 -0
- dbt/materializations/incremental/__init__.py +0 -0
- dbt/materializations/incremental/microbatch.py +236 -0
- dbt/mp_context.py +8 -0
- dbt/node_types.py +37 -0
- dbt/parser/__init__.py +23 -0
- dbt/parser/analysis.py +21 -0
- dbt/parser/base.py +548 -0
- dbt/parser/common.py +266 -0
- dbt/parser/docs.py +52 -0
- dbt/parser/fixtures.py +51 -0
- dbt/parser/functions.py +30 -0
- dbt/parser/generic_test.py +100 -0
- dbt/parser/generic_test_builders.py +333 -0
- dbt/parser/hooks.py +118 -0
- dbt/parser/macros.py +137 -0
- dbt/parser/manifest.py +2204 -0
- dbt/parser/models.py +573 -0
- dbt/parser/partial.py +1178 -0
- dbt/parser/read_files.py +445 -0
- dbt/parser/schema_generic_tests.py +422 -0
- dbt/parser/schema_renderer.py +111 -0
- dbt/parser/schema_yaml_readers.py +935 -0
- dbt/parser/schemas.py +1466 -0
- dbt/parser/search.py +149 -0
- dbt/parser/seeds.py +28 -0
- dbt/parser/singular_test.py +20 -0
- dbt/parser/snapshots.py +44 -0
- dbt/parser/sources.py +558 -0
- dbt/parser/sql.py +62 -0
- dbt/parser/unit_tests.py +621 -0
- dbt/plugins/__init__.py +20 -0
- dbt/plugins/contracts.py +9 -0
- dbt/plugins/exceptions.py +2 -0
- dbt/plugins/manager.py +163 -0
- dbt/plugins/manifest.py +21 -0
- dbt/profiler.py +20 -0
- dbt/py.typed +1 -0
- dbt/query_analyzer.cpython-310-darwin.so +0 -0
- dbt/query_analyzer.py +410 -0
- dbt/runners/__init__.py +2 -0
- dbt/runners/exposure_runner.py +7 -0
- dbt/runners/no_op_runner.py +45 -0
- dbt/runners/saved_query_runner.py +7 -0
- dbt/selected_resources.py +8 -0
- dbt/task/__init__.py +0 -0
- dbt/task/base.py +503 -0
- dbt/task/build.py +197 -0
- dbt/task/clean.py +56 -0
- dbt/task/clone.py +161 -0
- dbt/task/compile.py +150 -0
- dbt/task/compute.py +454 -0
- dbt/task/debug.py +505 -0
- dbt/task/deps.py +280 -0
- dbt/task/docs/__init__.py +3 -0
- dbt/task/docs/generate.py +660 -0
- dbt/task/docs/index.html +250 -0
- dbt/task/docs/serve.py +29 -0
- dbt/task/freshness.py +322 -0
- dbt/task/function.py +121 -0
- dbt/task/group_lookup.py +46 -0
- dbt/task/init.py +553 -0
- dbt/task/java.py +316 -0
- dbt/task/list.py +236 -0
- dbt/task/printer.py +175 -0
- dbt/task/retry.py +175 -0
- dbt/task/run.py +1306 -0
- dbt/task/run_operation.py +141 -0
- dbt/task/runnable.py +758 -0
- dbt/task/seed.py +103 -0
- dbt/task/show.py +149 -0
- dbt/task/snapshot.py +56 -0
- dbt/task/spark.py +414 -0
- dbt/task/sql.py +110 -0
- dbt/task/target_sync.py +759 -0
- dbt/task/test.py +464 -0
- dbt/tests/fixtures/__init__.py +1 -0
- dbt/tests/fixtures/project.py +620 -0
- dbt/tests/util.py +651 -0
- dbt/tracking.py +529 -0
- dbt/utils/__init__.py +3 -0
- dbt/utils/artifact_upload.py +151 -0
- dbt/utils/utils.py +408 -0
- dbt/version.py +268 -0
- dvt_cli/__init__.py +72 -0
- dvt_core-0.52.2.dist-info/METADATA +286 -0
- dvt_core-0.52.2.dist-info/RECORD +275 -0
- dvt_core-0.52.2.dist-info/WHEEL +5 -0
- dvt_core-0.52.2.dist-info/entry_points.txt +2 -0
- dvt_core-0.52.2.dist-info/top_level.txt +2 -0
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Literal
|
|
3
|
+
|
|
4
|
+
from dbt.artifacts.resources.types import NodeType
|
|
5
|
+
from dbt.artifacts.resources.v1.components import CompiledResource
|
|
6
|
+
from dbt.artifacts.resources.v1.config import TestConfig
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class SingularTest(CompiledResource):
|
|
11
|
+
resource_type: Literal[NodeType.Test]
|
|
12
|
+
# Was not able to make mypy happy and keep the code working. We need to
|
|
13
|
+
# refactor the various configs.
|
|
14
|
+
config: TestConfig = field(default_factory=TestConfig) # type: ignore
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import Dict, List, Literal, Optional, Union
|
|
3
|
+
|
|
4
|
+
from dbt.artifacts.resources.types import NodeType
|
|
5
|
+
from dbt.artifacts.resources.v1.components import CompiledResource, DeferRelation
|
|
6
|
+
from dbt.artifacts.resources.v1.config import NodeConfig
|
|
7
|
+
from dbt_common.dataclass_schema import ValidationError, dbtClassMixin
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class SnapshotMetaColumnNames(dbtClassMixin):
|
|
12
|
+
dbt_valid_to: Optional[str] = None
|
|
13
|
+
dbt_valid_from: Optional[str] = None
|
|
14
|
+
dbt_scd_id: Optional[str] = None
|
|
15
|
+
dbt_updated_at: Optional[str] = None
|
|
16
|
+
dbt_is_deleted: Optional[str] = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class SnapshotConfig(NodeConfig):
|
|
21
|
+
materialized: str = "snapshot"
|
|
22
|
+
strategy: Optional[str] = None
|
|
23
|
+
unique_key: Union[str, List[str], None] = None
|
|
24
|
+
target_schema: Optional[str] = None
|
|
25
|
+
target_database: Optional[str] = None
|
|
26
|
+
updated_at: Optional[str] = None
|
|
27
|
+
# Not using Optional because of serialization issues with a Union of str and List[str]
|
|
28
|
+
check_cols: Union[str, List[str], None] = None
|
|
29
|
+
snapshot_meta_column_names: SnapshotMetaColumnNames = field(
|
|
30
|
+
default_factory=SnapshotMetaColumnNames
|
|
31
|
+
)
|
|
32
|
+
dbt_valid_to_current: Optional[str] = None
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def snapshot_table_column_names(self):
|
|
36
|
+
return {
|
|
37
|
+
"dbt_valid_from": self.snapshot_meta_column_names.dbt_valid_from or "dbt_valid_from",
|
|
38
|
+
"dbt_valid_to": self.snapshot_meta_column_names.dbt_valid_to or "dbt_valid_to",
|
|
39
|
+
"dbt_scd_id": self.snapshot_meta_column_names.dbt_scd_id or "dbt_scd_id",
|
|
40
|
+
"dbt_updated_at": self.snapshot_meta_column_names.dbt_updated_at or "dbt_updated_at",
|
|
41
|
+
"dbt_is_deleted": self.snapshot_meta_column_names.dbt_is_deleted or "dbt_is_deleted",
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
def final_validate(self):
|
|
45
|
+
if not self.strategy or not self.unique_key:
|
|
46
|
+
raise ValidationError(
|
|
47
|
+
"Snapshots must be configured with a 'strategy' and 'unique_key'."
|
|
48
|
+
)
|
|
49
|
+
if self.strategy == "check":
|
|
50
|
+
if not self.check_cols:
|
|
51
|
+
raise ValidationError(
|
|
52
|
+
"A snapshot configured with the check strategy must "
|
|
53
|
+
"specify a check_cols configuration."
|
|
54
|
+
)
|
|
55
|
+
if isinstance(self.check_cols, str) and self.check_cols != "all":
|
|
56
|
+
raise ValidationError(
|
|
57
|
+
f"Invalid value for 'check_cols': {self.check_cols}. "
|
|
58
|
+
"Expected 'all' or a list of strings."
|
|
59
|
+
)
|
|
60
|
+
elif self.strategy == "timestamp":
|
|
61
|
+
if not self.updated_at:
|
|
62
|
+
raise ValidationError(
|
|
63
|
+
"A snapshot configured with the timestamp strategy "
|
|
64
|
+
"must specify an updated_at configuration."
|
|
65
|
+
)
|
|
66
|
+
if self.check_cols:
|
|
67
|
+
raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'")
|
|
68
|
+
# If the strategy is not 'check' or 'timestamp' it's a custom strategy,
|
|
69
|
+
# formerly supported with GenericSnapshotConfig
|
|
70
|
+
|
|
71
|
+
if self.materialized and self.materialized != "snapshot":
|
|
72
|
+
raise ValidationError("A snapshot must have a materialized value of 'snapshot'")
|
|
73
|
+
|
|
74
|
+
# Called by "calculate_node_config_dict" in ContextConfigGenerator
|
|
75
|
+
def finalize_and_validate(self):
|
|
76
|
+
data = self.to_dict(omit_none=True)
|
|
77
|
+
self.validate(data)
|
|
78
|
+
return self.from_dict(data)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@dataclass
|
|
82
|
+
class Snapshot(CompiledResource):
|
|
83
|
+
resource_type: Literal[NodeType.Snapshot]
|
|
84
|
+
config: SnapshotConfig
|
|
85
|
+
defer_relation: Optional[DeferRelation] = None
|
|
86
|
+
|
|
87
|
+
def __post_serialize__(self, dct, context: Optional[Dict] = None):
|
|
88
|
+
dct = super().__post_serialize__(dct, context)
|
|
89
|
+
if context and context.get("artifact") and "defer_relation" in dct:
|
|
90
|
+
del dct["defer_relation"]
|
|
91
|
+
return dct
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from typing import Any, Dict, List, Literal, Optional, Union
|
|
4
|
+
|
|
5
|
+
from dbt.artifacts.resources.base import GraphResource
|
|
6
|
+
from dbt.artifacts.resources.types import NodeType
|
|
7
|
+
from dbt.artifacts.resources.v1.components import (
|
|
8
|
+
ColumnInfo,
|
|
9
|
+
FreshnessThreshold,
|
|
10
|
+
HasRelationMetadata,
|
|
11
|
+
Quoting,
|
|
12
|
+
)
|
|
13
|
+
from dbt.artifacts.resources.v1.config import BaseConfig, MergeBehavior
|
|
14
|
+
from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed
|
|
15
|
+
from dbt_common.contracts.util import Mergeable
|
|
16
|
+
from dbt_common.exceptions import CompilationError
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class SourceConfig(BaseConfig):
|
|
21
|
+
enabled: bool = True
|
|
22
|
+
event_time: Any = None
|
|
23
|
+
freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold)
|
|
24
|
+
loaded_at_field: Optional[str] = None
|
|
25
|
+
loaded_at_query: Optional[str] = None
|
|
26
|
+
meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta())
|
|
27
|
+
tags: List[str] = field(default_factory=list)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
@dataclass
|
|
31
|
+
class ExternalPartition(AdditionalPropertiesAllowed):
|
|
32
|
+
name: str = ""
|
|
33
|
+
description: str = ""
|
|
34
|
+
data_type: str = ""
|
|
35
|
+
meta: Dict[str, Any] = field(default_factory=dict)
|
|
36
|
+
|
|
37
|
+
def __post_init__(self):
|
|
38
|
+
if self.name == "" or self.data_type == "":
|
|
39
|
+
raise CompilationError("External partition columns must have names and data types")
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class ExternalTable(AdditionalPropertiesAllowed, Mergeable):
|
|
44
|
+
location: Optional[str] = None
|
|
45
|
+
file_format: Optional[str] = None
|
|
46
|
+
row_format: Optional[str] = None
|
|
47
|
+
tbl_properties: Optional[str] = None
|
|
48
|
+
partitions: Optional[Union[List[str], List[ExternalPartition]]] = None
|
|
49
|
+
|
|
50
|
+
def __bool__(self):
|
|
51
|
+
return self.location is not None
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@dataclass
|
|
55
|
+
class ParsedSourceMandatory(GraphResource, HasRelationMetadata):
|
|
56
|
+
source_name: str
|
|
57
|
+
source_description: str
|
|
58
|
+
loader: str
|
|
59
|
+
identifier: str
|
|
60
|
+
resource_type: Literal[NodeType.Source]
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
@dataclass
|
|
64
|
+
class SourceDefinition(ParsedSourceMandatory):
|
|
65
|
+
quoting: Quoting = field(default_factory=Quoting)
|
|
66
|
+
loaded_at_field: Optional[str] = None
|
|
67
|
+
loaded_at_query: Optional[str] = None
|
|
68
|
+
freshness: Optional[FreshnessThreshold] = None
|
|
69
|
+
external: Optional[ExternalTable] = None
|
|
70
|
+
description: str = ""
|
|
71
|
+
columns: Dict[str, ColumnInfo] = field(default_factory=dict)
|
|
72
|
+
meta: Dict[str, Any] = field(default_factory=dict)
|
|
73
|
+
source_meta: Dict[str, Any] = field(default_factory=dict)
|
|
74
|
+
tags: List[str] = field(default_factory=list)
|
|
75
|
+
config: SourceConfig = field(default_factory=SourceConfig)
|
|
76
|
+
patch_path: Optional[str] = None
|
|
77
|
+
unrendered_config: Dict[str, Any] = field(default_factory=dict)
|
|
78
|
+
relation_name: Optional[str] = None
|
|
79
|
+
created_at: float = field(default_factory=lambda: time.time())
|
|
80
|
+
unrendered_database: Optional[str] = None
|
|
81
|
+
unrendered_schema: Optional[str] = None
|
|
82
|
+
doc_blocks: List[str] = field(default_factory=list)
|
|
83
|
+
# DVT: Connection name from profiles.yml for this source
|
|
84
|
+
connection: Optional[str] = None
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Literal
|
|
3
|
+
|
|
4
|
+
from dbt.artifacts.resources.types import NodeType
|
|
5
|
+
from dbt.artifacts.resources.v1.components import CompiledResource
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class SqlOperation(CompiledResource):
|
|
10
|
+
resource_type: Literal[NodeType.SqlOperation]
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from typing import Any, Dict, List, Optional, Sequence, Union
|
|
4
|
+
|
|
5
|
+
from dbt.artifacts.resources import DependsOn, NodeVersion
|
|
6
|
+
from dbt.artifacts.resources.base import GraphResource
|
|
7
|
+
from dbt.artifacts.resources.v1.config import list_str, metas
|
|
8
|
+
from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior
|
|
9
|
+
from dbt_common.contracts.config.metadata import ShowBehavior
|
|
10
|
+
from dbt_common.dataclass_schema import StrEnum, dbtClassMixin
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class UnitTestConfig(BaseConfig):
|
|
15
|
+
tags: Union[str, List[str]] = field(
|
|
16
|
+
default_factory=list_str,
|
|
17
|
+
metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude),
|
|
18
|
+
)
|
|
19
|
+
meta: Dict[str, Any] = field(
|
|
20
|
+
default_factory=dict,
|
|
21
|
+
metadata=MergeBehavior.Update.meta(),
|
|
22
|
+
)
|
|
23
|
+
enabled: bool = True
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class UnitTestFormat(StrEnum):
|
|
27
|
+
CSV = "csv"
|
|
28
|
+
Dict = "dict"
|
|
29
|
+
SQL = "sql"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class UnitTestInputFixture(dbtClassMixin):
|
|
34
|
+
input: str
|
|
35
|
+
rows: Optional[Union[str, List[Dict[str, Any]]]] = None
|
|
36
|
+
format: UnitTestFormat = UnitTestFormat.Dict
|
|
37
|
+
fixture: Optional[str] = None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass
|
|
41
|
+
class UnitTestOverrides(dbtClassMixin):
|
|
42
|
+
macros: Dict[str, Any] = field(default_factory=dict)
|
|
43
|
+
vars: Dict[str, Any] = field(default_factory=dict)
|
|
44
|
+
env_vars: Dict[str, Any] = field(default_factory=dict)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
@dataclass
|
|
48
|
+
class UnitTestNodeVersions(dbtClassMixin):
|
|
49
|
+
include: Optional[List[NodeVersion]] = None
|
|
50
|
+
exclude: Optional[List[NodeVersion]] = None
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
@dataclass
|
|
54
|
+
class UnitTestOutputFixture(dbtClassMixin):
|
|
55
|
+
rows: Optional[Union[str, List[Dict[str, Any]]]] = None
|
|
56
|
+
format: UnitTestFormat = UnitTestFormat.Dict
|
|
57
|
+
fixture: Optional[str] = None
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass
|
|
61
|
+
class UnitTestDefinitionMandatory:
|
|
62
|
+
model: str
|
|
63
|
+
given: Sequence[UnitTestInputFixture]
|
|
64
|
+
expect: UnitTestOutputFixture
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@dataclass
|
|
68
|
+
class UnitTestDefinition(GraphResource, UnitTestDefinitionMandatory):
|
|
69
|
+
description: str = ""
|
|
70
|
+
overrides: Optional[UnitTestOverrides] = None
|
|
71
|
+
depends_on: DependsOn = field(default_factory=DependsOn)
|
|
72
|
+
config: UnitTestConfig = field(default_factory=UnitTestConfig)
|
|
73
|
+
checksum: Optional[str] = None
|
|
74
|
+
schema: Optional[str] = None
|
|
75
|
+
created_at: float = field(default_factory=lambda: time.time())
|
|
76
|
+
versions: Optional[UnitTestNodeVersions] = None
|
|
77
|
+
version: Optional[NodeVersion] = None
|
|
File without changes
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
import dataclasses
|
|
2
|
+
import functools
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from typing import Any, ClassVar, Dict, Optional, Type, TypeVar
|
|
5
|
+
|
|
6
|
+
from mashumaro.jsonschema import build_json_schema
|
|
7
|
+
from mashumaro.jsonschema.dialects import DRAFT_2020_12
|
|
8
|
+
|
|
9
|
+
from dbt.artifacts.exceptions import IncompatibleSchemaError
|
|
10
|
+
from dbt.version import __version__
|
|
11
|
+
from dbt_common.clients.system import read_json, write_json
|
|
12
|
+
from dbt_common.dataclass_schema import dbtClassMixin
|
|
13
|
+
from dbt_common.events.functions import get_metadata_vars
|
|
14
|
+
from dbt_common.exceptions import DbtInternalError, DbtRuntimeError
|
|
15
|
+
from dbt_common.invocation import get_invocation_id, get_invocation_started_at
|
|
16
|
+
|
|
17
|
+
BASE_SCHEMAS_URL = "https://schemas.getdbt.com/"
|
|
18
|
+
SCHEMA_PATH = "dbt/{name}/v{version}.json"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclasses.dataclass
|
|
22
|
+
class SchemaVersion:
|
|
23
|
+
name: str
|
|
24
|
+
version: int
|
|
25
|
+
|
|
26
|
+
@property
|
|
27
|
+
def path(self) -> str:
|
|
28
|
+
return SCHEMA_PATH.format(name=self.name, version=self.version)
|
|
29
|
+
|
|
30
|
+
def __str__(self) -> str:
|
|
31
|
+
return BASE_SCHEMAS_URL + self.path
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class Writable:
|
|
35
|
+
def write(self, path: str):
|
|
36
|
+
write_json(path, self.to_dict(omit_none=False, context={"artifact": True})) # type: ignore
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class Readable:
|
|
40
|
+
@classmethod
|
|
41
|
+
def read(cls, path: str):
|
|
42
|
+
try:
|
|
43
|
+
data = read_json(path)
|
|
44
|
+
except (EnvironmentError, ValueError) as exc:
|
|
45
|
+
raise DbtRuntimeError(
|
|
46
|
+
f'Could not read {cls.__name__} at "{path}" as JSON: {exc}'
|
|
47
|
+
) from exc
|
|
48
|
+
|
|
49
|
+
return cls.from_dict(data) # type: ignore
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
# This is used in the ManifestMetadata, RunResultsMetadata, RunOperationResultMetadata,
|
|
53
|
+
# FreshnessMetadata, and CatalogMetadata classes
|
|
54
|
+
@dataclasses.dataclass
|
|
55
|
+
class BaseArtifactMetadata(dbtClassMixin):
|
|
56
|
+
dbt_schema_version: str
|
|
57
|
+
dbt_version: str = __version__
|
|
58
|
+
generated_at: datetime = dataclasses.field(
|
|
59
|
+
default_factory=lambda: datetime.now(timezone.utc).replace(tzinfo=None)
|
|
60
|
+
)
|
|
61
|
+
invocation_id: Optional[str] = dataclasses.field(default_factory=get_invocation_id)
|
|
62
|
+
invocation_started_at: Optional[datetime] = dataclasses.field(
|
|
63
|
+
default_factory=get_invocation_started_at
|
|
64
|
+
)
|
|
65
|
+
env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_vars)
|
|
66
|
+
|
|
67
|
+
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
|
68
|
+
dct = super().__post_serialize__(dct, context)
|
|
69
|
+
if dct["generated_at"] and dct["generated_at"].endswith("+00:00"):
|
|
70
|
+
dct["generated_at"] = dct["generated_at"].replace("+00:00", "") + "Z"
|
|
71
|
+
return dct
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# This is used as a class decorator to set the schema_version in the
|
|
75
|
+
# 'dbt_schema_version' class attribute. (It's copied into the metadata objects.)
|
|
76
|
+
# Name attributes of SchemaVersion in classes with the 'schema_version' decorator:
|
|
77
|
+
# manifest
|
|
78
|
+
# run-results
|
|
79
|
+
# run-operation-result
|
|
80
|
+
# sources
|
|
81
|
+
# catalog
|
|
82
|
+
# remote-compile-result
|
|
83
|
+
# remote-execution-result
|
|
84
|
+
# remote-run-result
|
|
85
|
+
S = TypeVar("S", bound="VersionedSchema")
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def schema_version(name: str, version: int):
|
|
89
|
+
def inner(cls: Type[S]):
|
|
90
|
+
cls.dbt_schema_version = SchemaVersion(
|
|
91
|
+
name=name,
|
|
92
|
+
version=version,
|
|
93
|
+
)
|
|
94
|
+
return cls
|
|
95
|
+
|
|
96
|
+
return inner
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
# This is used in the ArtifactMixin and RemoteCompileResultMixin classes
|
|
100
|
+
@dataclasses.dataclass
|
|
101
|
+
class VersionedSchema(dbtClassMixin):
|
|
102
|
+
dbt_schema_version: ClassVar[SchemaVersion]
|
|
103
|
+
|
|
104
|
+
@classmethod
|
|
105
|
+
@functools.lru_cache
|
|
106
|
+
def json_schema(cls) -> Dict[str, Any]:
|
|
107
|
+
json_schema_obj = build_json_schema(cls, dialect=DRAFT_2020_12, with_dialect_uri=True)
|
|
108
|
+
json_schema = json_schema_obj.to_dict()
|
|
109
|
+
json_schema["$id"] = str(cls.dbt_schema_version)
|
|
110
|
+
return json_schema
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def is_compatible_version(cls, schema_version):
|
|
114
|
+
compatible_versions = [str(cls.dbt_schema_version)]
|
|
115
|
+
if hasattr(cls, "compatible_previous_versions"):
|
|
116
|
+
for name, version in cls.compatible_previous_versions():
|
|
117
|
+
compatible_versions.append(str(SchemaVersion(name, version)))
|
|
118
|
+
return str(schema_version) in compatible_versions
|
|
119
|
+
|
|
120
|
+
@classmethod
|
|
121
|
+
def read_and_check_versions(cls, path: str):
|
|
122
|
+
try:
|
|
123
|
+
data = read_json(path)
|
|
124
|
+
except (EnvironmentError, ValueError) as exc:
|
|
125
|
+
raise DbtRuntimeError(
|
|
126
|
+
f'Could not read {cls.__name__} at "{path}" as JSON: {exc}'
|
|
127
|
+
) from exc
|
|
128
|
+
|
|
129
|
+
# Check metadata version. There is a class variable 'dbt_schema_version', but
|
|
130
|
+
# that doesn't show up in artifacts, where it only exists in the 'metadata'
|
|
131
|
+
# dictionary.
|
|
132
|
+
if hasattr(cls, "dbt_schema_version"):
|
|
133
|
+
if "metadata" in data and "dbt_schema_version" in data["metadata"]:
|
|
134
|
+
previous_schema_version = data["metadata"]["dbt_schema_version"]
|
|
135
|
+
# cls.dbt_schema_version is a SchemaVersion object
|
|
136
|
+
if not cls.is_compatible_version(previous_schema_version):
|
|
137
|
+
raise IncompatibleSchemaError(
|
|
138
|
+
expected=str(cls.dbt_schema_version),
|
|
139
|
+
found=previous_schema_version,
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
return cls.upgrade_schema_version(data)
|
|
143
|
+
|
|
144
|
+
@classmethod
|
|
145
|
+
def upgrade_schema_version(cls, data):
|
|
146
|
+
"""This will modify the data (dictionary) passed in to match the current
|
|
147
|
+
artifact schema code, if necessary. This is the default method, which
|
|
148
|
+
just returns the instantiated object via from_dict."""
|
|
149
|
+
return cls.from_dict(data)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
T = TypeVar("T", bound="ArtifactMixin")
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
# metadata should really be a Generic[T_M] where T_M is a TypeVar bound to
|
|
156
|
+
# BaseArtifactMetadata. Unfortunately this isn't possible due to a mypy issue:
|
|
157
|
+
# https://github.com/python/mypy/issues/7520
|
|
158
|
+
# This is used in the WritableManifest, RunResultsArtifact, RunOperationResultsArtifact,
|
|
159
|
+
# and CatalogArtifact
|
|
160
|
+
@dataclasses.dataclass(init=False)
|
|
161
|
+
class ArtifactMixin(VersionedSchema, Writable, Readable):
|
|
162
|
+
metadata: BaseArtifactMetadata
|
|
163
|
+
|
|
164
|
+
@classmethod
|
|
165
|
+
def validate(cls, data):
|
|
166
|
+
super().validate(data)
|
|
167
|
+
if cls.dbt_schema_version is None:
|
|
168
|
+
raise DbtInternalError("Cannot call from_dict with no schema version!")
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def get_artifact_schema_version(dct: dict) -> int:
|
|
172
|
+
schema_version = dct.get("metadata", {}).get("dbt_schema_version", None)
|
|
173
|
+
if not schema_version:
|
|
174
|
+
raise ValueError("Artifact is missing schema version")
|
|
175
|
+
|
|
176
|
+
# schema_version is in this format: https://schemas.getdbt.com/dbt/manifest/v10.json
|
|
177
|
+
# What the code below is doing:
|
|
178
|
+
# 1. Split on "/" – v10.json
|
|
179
|
+
# 2. Split on "." – v10
|
|
180
|
+
# 3. Skip first character – 10
|
|
181
|
+
# 4. Convert to int
|
|
182
|
+
# TODO: If this gets more complicated, turn into a regex
|
|
183
|
+
return int(schema_version.split("/")[-1].split(".")[0][1:])
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def get_artifact_dbt_version(dct: dict) -> Optional[str]:
|
|
187
|
+
dbt_version = dct.get("metadata", {}).get("dbt_version", None)
|
|
188
|
+
if dbt_version is None:
|
|
189
|
+
return None
|
|
190
|
+
|
|
191
|
+
return str(dbt_version)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from typing import List, Tuple
|
|
6
|
+
|
|
7
|
+
from dbt_common.dataclass_schema import dbtClassMixin
|
|
8
|
+
|
|
9
|
+
BatchType = Tuple[datetime, datetime]
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class BatchResults(dbtClassMixin):
|
|
14
|
+
successful: List[BatchType] = field(default_factory=list)
|
|
15
|
+
failed: List[BatchType] = field(default_factory=list)
|
|
16
|
+
|
|
17
|
+
def __add__(self, other: BatchResults) -> BatchResults:
|
|
18
|
+
return BatchResults(
|
|
19
|
+
successful=self.successful + other.successful,
|
|
20
|
+
failed=self.failed + other.failed,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
def __len__(self):
|
|
24
|
+
return len(self.successful) + len(self.failed)
|
|
File without changes
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from typing import Any, Dict, List, Optional, Union
|
|
4
|
+
|
|
5
|
+
from dbt.artifacts.schemas.base import (
|
|
6
|
+
ArtifactMixin,
|
|
7
|
+
BaseArtifactMetadata,
|
|
8
|
+
schema_version,
|
|
9
|
+
)
|
|
10
|
+
from dbt_common.contracts.metadata import CatalogTable
|
|
11
|
+
from dbt_common.dataclass_schema import dbtClassMixin
|
|
12
|
+
|
|
13
|
+
Primitive = Union[bool, str, float, None]
|
|
14
|
+
PrimitiveDict = Dict[str, Primitive]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@dataclass
|
|
18
|
+
class CatalogMetadata(BaseArtifactMetadata):
|
|
19
|
+
dbt_schema_version: str = field(
|
|
20
|
+
default_factory=lambda: str(CatalogArtifact.dbt_schema_version)
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class CatalogResults(dbtClassMixin):
|
|
26
|
+
nodes: Dict[str, CatalogTable]
|
|
27
|
+
sources: Dict[str, CatalogTable]
|
|
28
|
+
errors: Optional[List[str]] = None
|
|
29
|
+
_compile_results: Optional[Any] = None
|
|
30
|
+
|
|
31
|
+
def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None):
|
|
32
|
+
dct = super().__post_serialize__(dct, context)
|
|
33
|
+
if "_compile_results" in dct:
|
|
34
|
+
del dct["_compile_results"]
|
|
35
|
+
return dct
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass
|
|
39
|
+
@schema_version("catalog", 1)
|
|
40
|
+
class CatalogArtifact(CatalogResults, ArtifactMixin):
|
|
41
|
+
metadata: CatalogMetadata
|
|
42
|
+
|
|
43
|
+
@classmethod
|
|
44
|
+
def from_results(
|
|
45
|
+
cls,
|
|
46
|
+
generated_at: datetime,
|
|
47
|
+
nodes: Dict[str, CatalogTable],
|
|
48
|
+
sources: Dict[str, CatalogTable],
|
|
49
|
+
compile_results: Optional[Any],
|
|
50
|
+
errors: Optional[List[str]],
|
|
51
|
+
) -> "CatalogArtifact":
|
|
52
|
+
meta = CatalogMetadata(generated_at=generated_at)
|
|
53
|
+
return cls(
|
|
54
|
+
metadata=meta,
|
|
55
|
+
nodes=nodes,
|
|
56
|
+
sources=sources,
|
|
57
|
+
errors=errors,
|
|
58
|
+
_compile_results=compile_results,
|
|
59
|
+
)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from dbt.artifacts.schemas.freshness.v3.freshness import * # noqa
|
|
File without changes
|