dvt-core 0.52.2__cp310-cp310-macosx_10_9_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbt/__init__.py +7 -0
- dbt/_pydantic_shim.py +26 -0
- dbt/artifacts/__init__.py +0 -0
- dbt/artifacts/exceptions/__init__.py +1 -0
- dbt/artifacts/exceptions/schemas.py +31 -0
- dbt/artifacts/resources/__init__.py +116 -0
- dbt/artifacts/resources/base.py +67 -0
- dbt/artifacts/resources/types.py +93 -0
- dbt/artifacts/resources/v1/analysis.py +10 -0
- dbt/artifacts/resources/v1/catalog.py +23 -0
- dbt/artifacts/resources/v1/components.py +274 -0
- dbt/artifacts/resources/v1/config.py +277 -0
- dbt/artifacts/resources/v1/documentation.py +11 -0
- dbt/artifacts/resources/v1/exposure.py +51 -0
- dbt/artifacts/resources/v1/function.py +52 -0
- dbt/artifacts/resources/v1/generic_test.py +31 -0
- dbt/artifacts/resources/v1/group.py +21 -0
- dbt/artifacts/resources/v1/hook.py +11 -0
- dbt/artifacts/resources/v1/macro.py +29 -0
- dbt/artifacts/resources/v1/metric.py +172 -0
- dbt/artifacts/resources/v1/model.py +145 -0
- dbt/artifacts/resources/v1/owner.py +10 -0
- dbt/artifacts/resources/v1/saved_query.py +111 -0
- dbt/artifacts/resources/v1/seed.py +41 -0
- dbt/artifacts/resources/v1/semantic_layer_components.py +72 -0
- dbt/artifacts/resources/v1/semantic_model.py +314 -0
- dbt/artifacts/resources/v1/singular_test.py +14 -0
- dbt/artifacts/resources/v1/snapshot.py +91 -0
- dbt/artifacts/resources/v1/source_definition.py +84 -0
- dbt/artifacts/resources/v1/sql_operation.py +10 -0
- dbt/artifacts/resources/v1/unit_test_definition.py +77 -0
- dbt/artifacts/schemas/__init__.py +0 -0
- dbt/artifacts/schemas/base.py +191 -0
- dbt/artifacts/schemas/batch_results.py +24 -0
- dbt/artifacts/schemas/catalog/__init__.py +11 -0
- dbt/artifacts/schemas/catalog/v1/__init__.py +0 -0
- dbt/artifacts/schemas/catalog/v1/catalog.py +59 -0
- dbt/artifacts/schemas/freshness/__init__.py +1 -0
- dbt/artifacts/schemas/freshness/v3/__init__.py +0 -0
- dbt/artifacts/schemas/freshness/v3/freshness.py +158 -0
- dbt/artifacts/schemas/manifest/__init__.py +2 -0
- dbt/artifacts/schemas/manifest/v12/__init__.py +0 -0
- dbt/artifacts/schemas/manifest/v12/manifest.py +211 -0
- dbt/artifacts/schemas/results.py +147 -0
- dbt/artifacts/schemas/run/__init__.py +2 -0
- dbt/artifacts/schemas/run/v5/__init__.py +0 -0
- dbt/artifacts/schemas/run/v5/run.py +184 -0
- dbt/artifacts/schemas/upgrades/__init__.py +4 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
- dbt/artifacts/utils/validation.py +153 -0
- dbt/cli/__init__.py +1 -0
- dbt/cli/context.py +17 -0
- dbt/cli/exceptions.py +57 -0
- dbt/cli/flags.py +560 -0
- dbt/cli/main.py +2039 -0
- dbt/cli/option_types.py +121 -0
- dbt/cli/options.py +80 -0
- dbt/cli/params.py +804 -0
- dbt/cli/requires.py +490 -0
- dbt/cli/resolvers.py +50 -0
- dbt/cli/types.py +40 -0
- dbt/clients/__init__.py +0 -0
- dbt/clients/checked_load.py +83 -0
- dbt/clients/git.py +164 -0
- dbt/clients/jinja.py +206 -0
- dbt/clients/jinja_static.py +245 -0
- dbt/clients/registry.py +192 -0
- dbt/clients/yaml_helper.py +68 -0
- dbt/compilation.py +876 -0
- dbt/compute/__init__.py +14 -0
- dbt/compute/engines/__init__.py +12 -0
- dbt/compute/engines/spark_engine.py +624 -0
- dbt/compute/federated_executor.py +837 -0
- dbt/compute/filter_pushdown.cpython-310-darwin.so +0 -0
- dbt/compute/filter_pushdown.py +273 -0
- dbt/compute/jar_provisioning.cpython-310-darwin.so +0 -0
- dbt/compute/jar_provisioning.py +255 -0
- dbt/compute/java_compat.cpython-310-darwin.so +0 -0
- dbt/compute/java_compat.py +689 -0
- dbt/compute/jdbc_utils.cpython-310-darwin.so +0 -0
- dbt/compute/jdbc_utils.py +678 -0
- dbt/compute/smart_selector.cpython-310-darwin.so +0 -0
- dbt/compute/smart_selector.py +311 -0
- dbt/compute/strategies/__init__.py +54 -0
- dbt/compute/strategies/base.py +165 -0
- dbt/compute/strategies/dataproc.py +207 -0
- dbt/compute/strategies/emr.py +203 -0
- dbt/compute/strategies/local.py +364 -0
- dbt/compute/strategies/standalone.py +262 -0
- dbt/config/__init__.py +4 -0
- dbt/config/catalogs.py +94 -0
- dbt/config/compute.cpython-310-darwin.so +0 -0
- dbt/config/compute.py +547 -0
- dbt/config/dvt_profile.cpython-310-darwin.so +0 -0
- dbt/config/dvt_profile.py +342 -0
- dbt/config/profile.py +422 -0
- dbt/config/project.py +873 -0
- dbt/config/project_utils.py +28 -0
- dbt/config/renderer.py +231 -0
- dbt/config/runtime.py +553 -0
- dbt/config/selectors.py +208 -0
- dbt/config/utils.py +77 -0
- dbt/constants.py +28 -0
- dbt/context/__init__.py +0 -0
- dbt/context/base.py +745 -0
- dbt/context/configured.py +135 -0
- dbt/context/context_config.py +382 -0
- dbt/context/docs.py +82 -0
- dbt/context/exceptions_jinja.py +178 -0
- dbt/context/macro_resolver.py +195 -0
- dbt/context/macros.py +171 -0
- dbt/context/manifest.py +72 -0
- dbt/context/providers.py +2249 -0
- dbt/context/query_header.py +13 -0
- dbt/context/secret.py +58 -0
- dbt/context/target.py +74 -0
- dbt/contracts/__init__.py +0 -0
- dbt/contracts/files.py +413 -0
- dbt/contracts/graph/__init__.py +0 -0
- dbt/contracts/graph/manifest.py +1904 -0
- dbt/contracts/graph/metrics.py +97 -0
- dbt/contracts/graph/model_config.py +70 -0
- dbt/contracts/graph/node_args.py +42 -0
- dbt/contracts/graph/nodes.py +1806 -0
- dbt/contracts/graph/semantic_manifest.py +232 -0
- dbt/contracts/graph/unparsed.py +811 -0
- dbt/contracts/project.py +417 -0
- dbt/contracts/results.py +53 -0
- dbt/contracts/selection.py +23 -0
- dbt/contracts/sql.py +85 -0
- dbt/contracts/state.py +68 -0
- dbt/contracts/util.py +46 -0
- dbt/deprecations.py +346 -0
- dbt/deps/__init__.py +0 -0
- dbt/deps/base.py +152 -0
- dbt/deps/git.py +195 -0
- dbt/deps/local.py +79 -0
- dbt/deps/registry.py +130 -0
- dbt/deps/resolver.py +149 -0
- dbt/deps/tarball.py +120 -0
- dbt/docs/source/_ext/dbt_click.py +119 -0
- dbt/docs/source/conf.py +32 -0
- dbt/env_vars.py +64 -0
- dbt/event_time/event_time.py +40 -0
- dbt/event_time/sample_window.py +60 -0
- dbt/events/__init__.py +15 -0
- dbt/events/base_types.py +36 -0
- dbt/events/core_types_pb2.py +2 -0
- dbt/events/logging.py +108 -0
- dbt/events/types.py +2516 -0
- dbt/exceptions.py +1486 -0
- dbt/flags.py +89 -0
- dbt/graph/__init__.py +11 -0
- dbt/graph/cli.py +247 -0
- dbt/graph/graph.py +172 -0
- dbt/graph/queue.py +214 -0
- dbt/graph/selector.py +374 -0
- dbt/graph/selector_methods.py +975 -0
- dbt/graph/selector_spec.py +222 -0
- dbt/graph/thread_pool.py +18 -0
- dbt/hooks.py +21 -0
- dbt/include/README.md +49 -0
- dbt/include/__init__.py +3 -0
- dbt/include/starter_project/.gitignore +4 -0
- dbt/include/starter_project/README.md +15 -0
- dbt/include/starter_project/__init__.py +3 -0
- dbt/include/starter_project/analyses/.gitkeep +0 -0
- dbt/include/starter_project/dbt_project.yml +36 -0
- dbt/include/starter_project/macros/.gitkeep +0 -0
- dbt/include/starter_project/models/example/my_first_dbt_model.sql +27 -0
- dbt/include/starter_project/models/example/my_second_dbt_model.sql +6 -0
- dbt/include/starter_project/models/example/schema.yml +21 -0
- dbt/include/starter_project/seeds/.gitkeep +0 -0
- dbt/include/starter_project/snapshots/.gitkeep +0 -0
- dbt/include/starter_project/tests/.gitkeep +0 -0
- dbt/internal_deprecations.py +26 -0
- dbt/jsonschemas/__init__.py +3 -0
- dbt/jsonschemas/jsonschemas.py +309 -0
- dbt/jsonschemas/project/0.0.110.json +4717 -0
- dbt/jsonschemas/project/0.0.85.json +2015 -0
- dbt/jsonschemas/resources/0.0.110.json +2636 -0
- dbt/jsonschemas/resources/0.0.85.json +2536 -0
- dbt/jsonschemas/resources/latest.json +6773 -0
- dbt/links.py +4 -0
- dbt/materializations/__init__.py +0 -0
- dbt/materializations/incremental/__init__.py +0 -0
- dbt/materializations/incremental/microbatch.py +236 -0
- dbt/mp_context.py +8 -0
- dbt/node_types.py +37 -0
- dbt/parser/__init__.py +23 -0
- dbt/parser/analysis.py +21 -0
- dbt/parser/base.py +548 -0
- dbt/parser/common.py +266 -0
- dbt/parser/docs.py +52 -0
- dbt/parser/fixtures.py +51 -0
- dbt/parser/functions.py +30 -0
- dbt/parser/generic_test.py +100 -0
- dbt/parser/generic_test_builders.py +333 -0
- dbt/parser/hooks.py +118 -0
- dbt/parser/macros.py +137 -0
- dbt/parser/manifest.py +2204 -0
- dbt/parser/models.py +573 -0
- dbt/parser/partial.py +1178 -0
- dbt/parser/read_files.py +445 -0
- dbt/parser/schema_generic_tests.py +422 -0
- dbt/parser/schema_renderer.py +111 -0
- dbt/parser/schema_yaml_readers.py +935 -0
- dbt/parser/schemas.py +1466 -0
- dbt/parser/search.py +149 -0
- dbt/parser/seeds.py +28 -0
- dbt/parser/singular_test.py +20 -0
- dbt/parser/snapshots.py +44 -0
- dbt/parser/sources.py +558 -0
- dbt/parser/sql.py +62 -0
- dbt/parser/unit_tests.py +621 -0
- dbt/plugins/__init__.py +20 -0
- dbt/plugins/contracts.py +9 -0
- dbt/plugins/exceptions.py +2 -0
- dbt/plugins/manager.py +163 -0
- dbt/plugins/manifest.py +21 -0
- dbt/profiler.py +20 -0
- dbt/py.typed +1 -0
- dbt/query_analyzer.cpython-310-darwin.so +0 -0
- dbt/query_analyzer.py +410 -0
- dbt/runners/__init__.py +2 -0
- dbt/runners/exposure_runner.py +7 -0
- dbt/runners/no_op_runner.py +45 -0
- dbt/runners/saved_query_runner.py +7 -0
- dbt/selected_resources.py +8 -0
- dbt/task/__init__.py +0 -0
- dbt/task/base.py +503 -0
- dbt/task/build.py +197 -0
- dbt/task/clean.py +56 -0
- dbt/task/clone.py +161 -0
- dbt/task/compile.py +150 -0
- dbt/task/compute.py +454 -0
- dbt/task/debug.py +505 -0
- dbt/task/deps.py +280 -0
- dbt/task/docs/__init__.py +3 -0
- dbt/task/docs/generate.py +660 -0
- dbt/task/docs/index.html +250 -0
- dbt/task/docs/serve.py +29 -0
- dbt/task/freshness.py +322 -0
- dbt/task/function.py +121 -0
- dbt/task/group_lookup.py +46 -0
- dbt/task/init.py +553 -0
- dbt/task/java.py +316 -0
- dbt/task/list.py +236 -0
- dbt/task/printer.py +175 -0
- dbt/task/retry.py +175 -0
- dbt/task/run.py +1306 -0
- dbt/task/run_operation.py +141 -0
- dbt/task/runnable.py +758 -0
- dbt/task/seed.py +103 -0
- dbt/task/show.py +149 -0
- dbt/task/snapshot.py +56 -0
- dbt/task/spark.py +414 -0
- dbt/task/sql.py +110 -0
- dbt/task/target_sync.py +759 -0
- dbt/task/test.py +464 -0
- dbt/tests/fixtures/__init__.py +1 -0
- dbt/tests/fixtures/project.py +620 -0
- dbt/tests/util.py +651 -0
- dbt/tracking.py +529 -0
- dbt/utils/__init__.py +3 -0
- dbt/utils/artifact_upload.py +151 -0
- dbt/utils/utils.py +408 -0
- dbt/version.py +268 -0
- dvt_cli/__init__.py +72 -0
- dvt_core-0.52.2.dist-info/METADATA +286 -0
- dvt_core-0.52.2.dist-info/RECORD +275 -0
- dvt_core-0.52.2.dist-info/WHEEL +5 -0
- dvt_core-0.52.2.dist-info/entry_points.txt +2 -0
- dvt_core-0.52.2.dist-info/top_level.txt +2 -0
dbt/parser/common.py
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any, Dict, Generic, List, Optional, TypeVar, Union
|
|
3
|
+
|
|
4
|
+
from dbt.artifacts.resources import ColumnConfig, ColumnInfo, NodeVersion
|
|
5
|
+
from dbt.contracts.graph.nodes import UnpatchedSourceDefinition
|
|
6
|
+
from dbt.contracts.graph.unparsed import (
|
|
7
|
+
HasColumnDocs,
|
|
8
|
+
HasColumnProps,
|
|
9
|
+
HasColumnTests,
|
|
10
|
+
UnparsedAnalysisUpdate,
|
|
11
|
+
UnparsedColumn,
|
|
12
|
+
UnparsedExposure,
|
|
13
|
+
UnparsedFunctionUpdate,
|
|
14
|
+
UnparsedMacroUpdate,
|
|
15
|
+
UnparsedModelUpdate,
|
|
16
|
+
UnparsedNodeUpdate,
|
|
17
|
+
UnparsedSingularTestUpdate,
|
|
18
|
+
)
|
|
19
|
+
from dbt.exceptions import ParsingError
|
|
20
|
+
from dbt.node_types import NodeType
|
|
21
|
+
from dbt.parser.search import FileBlock
|
|
22
|
+
from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType
|
|
23
|
+
from dbt_common.exceptions import DbtInternalError
|
|
24
|
+
from dbt_semantic_interfaces.type_enums import TimeGranularity
|
|
25
|
+
|
|
26
|
+
schema_file_keys_to_resource_types = {
|
|
27
|
+
"models": NodeType.Model,
|
|
28
|
+
"seeds": NodeType.Seed,
|
|
29
|
+
"snapshots": NodeType.Snapshot,
|
|
30
|
+
"sources": NodeType.Source,
|
|
31
|
+
"macros": NodeType.Macro,
|
|
32
|
+
"analyses": NodeType.Analysis,
|
|
33
|
+
"exposures": NodeType.Exposure,
|
|
34
|
+
"metrics": NodeType.Metric,
|
|
35
|
+
"semantic_models": NodeType.SemanticModel,
|
|
36
|
+
"saved_queries": NodeType.SavedQuery,
|
|
37
|
+
"functions": NodeType.Function,
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
resource_types_to_schema_file_keys = {
|
|
41
|
+
v: k for (k, v) in schema_file_keys_to_resource_types.items()
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
schema_file_keys = list(schema_file_keys_to_resource_types.keys())
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def trimmed(inp: str) -> str:
|
|
48
|
+
if len(inp) < 50:
|
|
49
|
+
return inp
|
|
50
|
+
return inp[:44] + "..." + inp[-3:]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
TestDef = Union[str, Dict[str, Any]]
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
Target = TypeVar(
|
|
57
|
+
"Target",
|
|
58
|
+
UnparsedNodeUpdate,
|
|
59
|
+
UnparsedMacroUpdate,
|
|
60
|
+
UnparsedAnalysisUpdate,
|
|
61
|
+
UnpatchedSourceDefinition,
|
|
62
|
+
UnparsedExposure,
|
|
63
|
+
UnparsedModelUpdate,
|
|
64
|
+
UnparsedFunctionUpdate,
|
|
65
|
+
UnparsedSingularTestUpdate,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
ColumnTarget = TypeVar(
|
|
70
|
+
"ColumnTarget",
|
|
71
|
+
UnparsedModelUpdate,
|
|
72
|
+
UnparsedNodeUpdate,
|
|
73
|
+
UnparsedAnalysisUpdate,
|
|
74
|
+
UnpatchedSourceDefinition,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
Versioned = TypeVar("Versioned", bound=UnparsedModelUpdate)
|
|
78
|
+
|
|
79
|
+
Testable = TypeVar("Testable", UnparsedNodeUpdate, UnpatchedSourceDefinition, UnparsedModelUpdate)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@dataclass
|
|
83
|
+
class YamlBlock(FileBlock):
|
|
84
|
+
data: Dict[str, Any]
|
|
85
|
+
|
|
86
|
+
@classmethod
|
|
87
|
+
def from_file_block(cls, src: FileBlock, data: Dict[str, Any]):
|
|
88
|
+
return cls(
|
|
89
|
+
file=src.file,
|
|
90
|
+
data=data,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@dataclass
|
|
95
|
+
class TargetBlock(YamlBlock, Generic[Target]):
|
|
96
|
+
target: Target
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def name(self):
|
|
100
|
+
return self.target.name
|
|
101
|
+
|
|
102
|
+
@property
|
|
103
|
+
def columns(self):
|
|
104
|
+
return []
|
|
105
|
+
|
|
106
|
+
@property
|
|
107
|
+
def data_tests(self) -> List[TestDef]:
|
|
108
|
+
return []
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def tests(self) -> List[TestDef]:
|
|
112
|
+
return []
|
|
113
|
+
|
|
114
|
+
@classmethod
|
|
115
|
+
def from_yaml_block(cls, src: YamlBlock, target: Target) -> "TargetBlock[Target]":
|
|
116
|
+
return cls(
|
|
117
|
+
file=src.file,
|
|
118
|
+
data=src.data,
|
|
119
|
+
target=target,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
@dataclass
|
|
124
|
+
class TargetColumnsBlock(TargetBlock[ColumnTarget], Generic[ColumnTarget]):
|
|
125
|
+
@property
|
|
126
|
+
def columns(self):
|
|
127
|
+
if self.target.columns is None:
|
|
128
|
+
return []
|
|
129
|
+
else:
|
|
130
|
+
return self.target.columns
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
@dataclass
|
|
134
|
+
class TestBlock(TargetColumnsBlock[Testable], Generic[Testable]):
|
|
135
|
+
@property
|
|
136
|
+
def data_tests(self) -> List[TestDef]:
|
|
137
|
+
if self.target.data_tests is None:
|
|
138
|
+
return []
|
|
139
|
+
else:
|
|
140
|
+
return self.target.data_tests
|
|
141
|
+
|
|
142
|
+
@property
|
|
143
|
+
def quote_columns(self) -> Optional[bool]:
|
|
144
|
+
return self.target.quote_columns
|
|
145
|
+
|
|
146
|
+
@classmethod
|
|
147
|
+
def from_yaml_block(cls, src: YamlBlock, target: Testable) -> "TestBlock[Testable]":
|
|
148
|
+
return cls(
|
|
149
|
+
file=src.file,
|
|
150
|
+
data=src.data,
|
|
151
|
+
target=target,
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
@dataclass
|
|
156
|
+
class VersionedTestBlock(TestBlock, Generic[Versioned]):
|
|
157
|
+
@property
|
|
158
|
+
def columns(self):
|
|
159
|
+
if not self.target.versions:
|
|
160
|
+
return super().columns
|
|
161
|
+
else:
|
|
162
|
+
raise DbtInternalError(".columns for VersionedTestBlock with versions")
|
|
163
|
+
|
|
164
|
+
@property
|
|
165
|
+
def data_tests(self) -> List[TestDef]:
|
|
166
|
+
if not self.target.versions:
|
|
167
|
+
return super().data_tests
|
|
168
|
+
else:
|
|
169
|
+
raise DbtInternalError(".data_tests for VersionedTestBlock with versions")
|
|
170
|
+
|
|
171
|
+
@classmethod
|
|
172
|
+
def from_yaml_block(cls, src: YamlBlock, target: Versioned) -> "VersionedTestBlock[Versioned]":
|
|
173
|
+
return cls(
|
|
174
|
+
file=src.file,
|
|
175
|
+
data=src.data,
|
|
176
|
+
target=target,
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
@dataclass
|
|
181
|
+
class GenericTestBlock(TestBlock[Testable], Generic[Testable]):
|
|
182
|
+
data_test: Dict[str, Any]
|
|
183
|
+
column_name: Optional[str]
|
|
184
|
+
tags: List[str]
|
|
185
|
+
version: Optional[NodeVersion]
|
|
186
|
+
|
|
187
|
+
@classmethod
|
|
188
|
+
def from_test_block(
|
|
189
|
+
cls,
|
|
190
|
+
src: TestBlock,
|
|
191
|
+
data_test: Dict[str, Any],
|
|
192
|
+
column_name: Optional[str],
|
|
193
|
+
tags: List[str],
|
|
194
|
+
version: Optional[NodeVersion],
|
|
195
|
+
) -> "GenericTestBlock":
|
|
196
|
+
return cls(
|
|
197
|
+
file=src.file,
|
|
198
|
+
data=src.data,
|
|
199
|
+
target=src.target,
|
|
200
|
+
data_test=data_test,
|
|
201
|
+
column_name=column_name,
|
|
202
|
+
tags=tags,
|
|
203
|
+
version=version,
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class ParserRef:
|
|
208
|
+
"""A helper object to hold parse-time references."""
|
|
209
|
+
|
|
210
|
+
def __init__(self) -> None:
|
|
211
|
+
self.column_info: Dict[str, ColumnInfo] = {}
|
|
212
|
+
|
|
213
|
+
def _add(self, column: HasColumnProps) -> None:
|
|
214
|
+
tags: List[str] = getattr(column, "tags", [])
|
|
215
|
+
quote: Optional[bool] = None
|
|
216
|
+
granularity: Optional[TimeGranularity] = None
|
|
217
|
+
if isinstance(column, UnparsedColumn):
|
|
218
|
+
quote = column.quote
|
|
219
|
+
granularity = TimeGranularity(column.granularity) if column.granularity else None
|
|
220
|
+
|
|
221
|
+
if any(
|
|
222
|
+
c
|
|
223
|
+
for c in column.constraints
|
|
224
|
+
if "type" not in c or not ConstraintType.is_valid(c["type"])
|
|
225
|
+
):
|
|
226
|
+
raise ParsingError(f"Invalid constraint type on column {column.name}")
|
|
227
|
+
|
|
228
|
+
# Merge meta and tags from column and config
|
|
229
|
+
column_config_meta = (
|
|
230
|
+
column.config["meta"] if isinstance(column.config.get("meta"), dict) else {}
|
|
231
|
+
)
|
|
232
|
+
column_config_tags = []
|
|
233
|
+
if "tags" in column.config:
|
|
234
|
+
if isinstance(column.config["tags"], list):
|
|
235
|
+
column_config_tags = column.config["tags"]
|
|
236
|
+
elif isinstance(column.config["tags"], str):
|
|
237
|
+
column_config_tags = [column.config["tags"]]
|
|
238
|
+
|
|
239
|
+
column_meta = {**column.meta, **column_config_meta}
|
|
240
|
+
column_tags = list(set(tags + column_config_tags))
|
|
241
|
+
self.column_info[column.name] = ColumnInfo(
|
|
242
|
+
name=column.name,
|
|
243
|
+
description=column.description,
|
|
244
|
+
data_type=column.data_type,
|
|
245
|
+
constraints=[ColumnLevelConstraint.from_dict(c) for c in column.constraints],
|
|
246
|
+
meta=column_meta,
|
|
247
|
+
tags=column_tags,
|
|
248
|
+
quote=quote,
|
|
249
|
+
_extra=column.extra,
|
|
250
|
+
granularity=granularity,
|
|
251
|
+
config=ColumnConfig(meta=column_meta, tags=column_tags),
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
@classmethod
|
|
255
|
+
def from_target(cls, target: Union[HasColumnDocs, HasColumnTests]) -> "ParserRef":
|
|
256
|
+
refs = cls()
|
|
257
|
+
for column in target.columns:
|
|
258
|
+
refs._add(column)
|
|
259
|
+
return refs
|
|
260
|
+
|
|
261
|
+
@classmethod
|
|
262
|
+
def from_versioned_target(cls, target: Versioned, version: NodeVersion) -> "ParserRef":
|
|
263
|
+
refs = cls()
|
|
264
|
+
for base_column in target.get_columns_for_version(version):
|
|
265
|
+
refs._add(base_column)
|
|
266
|
+
return refs
|
dbt/parser/docs.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from typing import Iterable, Optional
|
|
3
|
+
|
|
4
|
+
from dbt.clients.jinja import get_rendered
|
|
5
|
+
from dbt.contracts.files import SourceFile
|
|
6
|
+
from dbt.contracts.graph.nodes import Documentation
|
|
7
|
+
from dbt.node_types import NodeType
|
|
8
|
+
from dbt.parser.base import Parser
|
|
9
|
+
from dbt.parser.search import BlockContents, BlockSearcher, FileBlock
|
|
10
|
+
|
|
11
|
+
SHOULD_PARSE_RE = re.compile(r"{[{%]")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class DocumentationParser(Parser[Documentation]):
|
|
15
|
+
@property
|
|
16
|
+
def resource_type(self) -> NodeType:
|
|
17
|
+
return NodeType.Documentation
|
|
18
|
+
|
|
19
|
+
@classmethod
|
|
20
|
+
def get_compiled_path(cls, block: FileBlock):
|
|
21
|
+
return block.path.relative_path
|
|
22
|
+
|
|
23
|
+
def generate_unique_id(self, resource_name: str, _: Optional[str] = None) -> str:
|
|
24
|
+
# For consistency, use the same format for doc unique_ids
|
|
25
|
+
return f"doc.{self.project.project_name}.{resource_name}"
|
|
26
|
+
|
|
27
|
+
def parse_block(self, block: BlockContents) -> Iterable[Documentation]:
|
|
28
|
+
unique_id = self.generate_unique_id(block.name)
|
|
29
|
+
contents = get_rendered(block.contents, {}).strip()
|
|
30
|
+
|
|
31
|
+
doc = Documentation(
|
|
32
|
+
path=block.file.path.relative_path,
|
|
33
|
+
original_file_path=block.path.original_file_path,
|
|
34
|
+
package_name=self.project.project_name,
|
|
35
|
+
unique_id=unique_id,
|
|
36
|
+
name=block.name,
|
|
37
|
+
block_contents=contents,
|
|
38
|
+
resource_type=NodeType.Documentation,
|
|
39
|
+
)
|
|
40
|
+
return [doc]
|
|
41
|
+
|
|
42
|
+
def parse_file(self, file_block: FileBlock):
|
|
43
|
+
assert isinstance(file_block.file, SourceFile)
|
|
44
|
+
searcher: Iterable[BlockContents] = BlockSearcher(
|
|
45
|
+
source=[file_block],
|
|
46
|
+
allowed_blocks={"docs"},
|
|
47
|
+
source_tag_factory=BlockContents,
|
|
48
|
+
check_jinja=False,
|
|
49
|
+
)
|
|
50
|
+
for block in searcher:
|
|
51
|
+
for parsed in self.parse_block(block):
|
|
52
|
+
self.manifest.add_doc(file_block.file, parsed)
|
dbt/parser/fixtures.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import csv
|
|
2
|
+
from io import StringIO
|
|
3
|
+
from typing import Any, Dict, List, Optional
|
|
4
|
+
|
|
5
|
+
from dbt.contracts.files import FixtureSourceFile
|
|
6
|
+
from dbt.contracts.graph.nodes import UnitTestFileFixture
|
|
7
|
+
from dbt.node_types import NodeType
|
|
8
|
+
from dbt.parser.base import Parser
|
|
9
|
+
from dbt.parser.search import FileBlock
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class FixtureParser(Parser[UnitTestFileFixture]):
|
|
13
|
+
@property
|
|
14
|
+
def resource_type(self) -> NodeType:
|
|
15
|
+
return NodeType.Fixture
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def get_compiled_path(cls, block: FileBlock):
|
|
19
|
+
# Is this necessary?
|
|
20
|
+
return block.path.relative_path
|
|
21
|
+
|
|
22
|
+
def generate_unique_id(self, resource_name: str, _: Optional[str] = None) -> str:
|
|
23
|
+
return f"fixture.{self.project.project_name}.{resource_name}"
|
|
24
|
+
|
|
25
|
+
def parse_file(self, file_block: FileBlock):
|
|
26
|
+
assert isinstance(file_block.file, FixtureSourceFile)
|
|
27
|
+
unique_id = self.generate_unique_id(file_block.name)
|
|
28
|
+
|
|
29
|
+
if file_block.file.path.relative_path.endswith(".sql"):
|
|
30
|
+
rows = file_block.file.contents # type: ignore
|
|
31
|
+
else: # endswith('.csv')
|
|
32
|
+
rows = self.get_rows(file_block.file.contents) # type: ignore
|
|
33
|
+
|
|
34
|
+
fixture = UnitTestFileFixture(
|
|
35
|
+
name=file_block.name,
|
|
36
|
+
path=file_block.file.path.relative_path,
|
|
37
|
+
original_file_path=file_block.path.original_file_path,
|
|
38
|
+
package_name=self.project.project_name,
|
|
39
|
+
unique_id=unique_id,
|
|
40
|
+
resource_type=NodeType.Fixture,
|
|
41
|
+
rows=rows,
|
|
42
|
+
)
|
|
43
|
+
self.manifest.add_fixture(file_block.file, fixture)
|
|
44
|
+
|
|
45
|
+
def get_rows(self, contents) -> List[Dict[str, Any]]:
|
|
46
|
+
rows = []
|
|
47
|
+
dummy_file = StringIO(contents)
|
|
48
|
+
reader = csv.DictReader(dummy_file)
|
|
49
|
+
for row in reader:
|
|
50
|
+
rows.append(row)
|
|
51
|
+
return rows
|
dbt/parser/functions.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from dbt.artifacts.resources.types import NodeType
|
|
2
|
+
from dbt.contracts.graph.nodes import FunctionNode, ManifestNode
|
|
3
|
+
from dbt.parser.base import SimpleParser
|
|
4
|
+
from dbt.parser.search import FileBlock
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class FunctionParser(SimpleParser[FileBlock, FunctionNode]):
|
|
8
|
+
def parse_from_dict(self, dct, validate=True) -> FunctionNode:
|
|
9
|
+
if validate:
|
|
10
|
+
FunctionNode.validate(dct)
|
|
11
|
+
return FunctionNode.from_dict(dct)
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def resource_type(self) -> NodeType:
|
|
15
|
+
return NodeType.Function
|
|
16
|
+
|
|
17
|
+
@classmethod
|
|
18
|
+
def get_compiled_path(cls, block: FileBlock):
|
|
19
|
+
return block.path.relative_path
|
|
20
|
+
|
|
21
|
+
# overrides SimpleSQLParser.add_result_node
|
|
22
|
+
def add_result_node(self, block: FileBlock, node: ManifestNode):
|
|
23
|
+
assert isinstance(node, FunctionNode), "Got non FunctionNode in FunctionParser"
|
|
24
|
+
if node.config.enabled:
|
|
25
|
+
self.manifest.add_function(node)
|
|
26
|
+
else:
|
|
27
|
+
self.manifest.add_disabled(block.file, node)
|
|
28
|
+
|
|
29
|
+
def parse_file(self, file_block: FileBlock) -> None:
|
|
30
|
+
self.parse_node(file_block)
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
from typing import Iterable, List
|
|
2
|
+
|
|
3
|
+
import jinja2
|
|
4
|
+
|
|
5
|
+
from dbt.contracts.files import SourceFile
|
|
6
|
+
from dbt.contracts.graph.nodes import GenericTestNode, Macro
|
|
7
|
+
from dbt.contracts.graph.unparsed import UnparsedMacro
|
|
8
|
+
from dbt.exceptions import ParsingError
|
|
9
|
+
from dbt.node_types import NodeType
|
|
10
|
+
from dbt.parser.base import BaseParser
|
|
11
|
+
from dbt.parser.search import FileBlock
|
|
12
|
+
from dbt_common.clients import jinja
|
|
13
|
+
from dbt_common.utils import MACRO_PREFIX
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class GenericTestParser(BaseParser[GenericTestNode]):
|
|
17
|
+
@property
|
|
18
|
+
def resource_type(self) -> NodeType:
|
|
19
|
+
return NodeType.Macro
|
|
20
|
+
|
|
21
|
+
@classmethod
|
|
22
|
+
def get_compiled_path(cls, block: FileBlock):
|
|
23
|
+
return block.path.relative_path
|
|
24
|
+
|
|
25
|
+
def create_generic_test_macro(
|
|
26
|
+
self, block: jinja.BlockTag, base_node: UnparsedMacro, name: str
|
|
27
|
+
) -> Macro:
|
|
28
|
+
unique_id = self.generate_unique_id(name)
|
|
29
|
+
macro_sql = block.full_block or ""
|
|
30
|
+
|
|
31
|
+
return Macro(
|
|
32
|
+
path=base_node.path,
|
|
33
|
+
macro_sql=macro_sql,
|
|
34
|
+
original_file_path=base_node.original_file_path,
|
|
35
|
+
package_name=base_node.package_name,
|
|
36
|
+
resource_type=base_node.resource_type,
|
|
37
|
+
name=name,
|
|
38
|
+
unique_id=unique_id,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
def parse_unparsed_generic_test(self, base_node: UnparsedMacro) -> Iterable[Macro]:
|
|
42
|
+
try:
|
|
43
|
+
blocks: List[jinja.BlockTag] = [
|
|
44
|
+
t
|
|
45
|
+
for t in jinja.extract_toplevel_blocks(
|
|
46
|
+
base_node.raw_code,
|
|
47
|
+
allowed_blocks={"test", "data_test"},
|
|
48
|
+
collect_raw_data=False,
|
|
49
|
+
)
|
|
50
|
+
if isinstance(t, jinja.BlockTag)
|
|
51
|
+
]
|
|
52
|
+
except ParsingError as exc:
|
|
53
|
+
exc.add_node(base_node)
|
|
54
|
+
raise
|
|
55
|
+
|
|
56
|
+
for block in blocks:
|
|
57
|
+
try:
|
|
58
|
+
ast = jinja.parse(block.full_block)
|
|
59
|
+
except ParsingError as e:
|
|
60
|
+
e.add_node(base_node)
|
|
61
|
+
raise
|
|
62
|
+
|
|
63
|
+
# generic tests are structured as macros so we want to count the number of macro blocks
|
|
64
|
+
generic_test_nodes = list(ast.find_all(jinja2.nodes.Macro))
|
|
65
|
+
|
|
66
|
+
if len(generic_test_nodes) != 1:
|
|
67
|
+
# things have gone disastrously wrong, we thought we only
|
|
68
|
+
# parsed one block!
|
|
69
|
+
raise ParsingError(
|
|
70
|
+
f"Found multiple generic tests in {block.full_block}, expected 1",
|
|
71
|
+
node=base_node,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
generic_test_name = generic_test_nodes[0].name
|
|
75
|
+
|
|
76
|
+
if not generic_test_name.startswith(MACRO_PREFIX):
|
|
77
|
+
continue
|
|
78
|
+
|
|
79
|
+
name: str = generic_test_name.replace(MACRO_PREFIX, "")
|
|
80
|
+
node = self.create_generic_test_macro(block, base_node, name)
|
|
81
|
+
yield node
|
|
82
|
+
|
|
83
|
+
def parse_file(self, block: FileBlock):
|
|
84
|
+
assert isinstance(block.file, SourceFile)
|
|
85
|
+
source_file = block.file
|
|
86
|
+
assert isinstance(source_file.contents, str)
|
|
87
|
+
original_file_path = source_file.path.original_file_path
|
|
88
|
+
|
|
89
|
+
# this is really only used for error messages
|
|
90
|
+
base_node = UnparsedMacro(
|
|
91
|
+
path=original_file_path,
|
|
92
|
+
original_file_path=original_file_path,
|
|
93
|
+
package_name=self.project.project_name,
|
|
94
|
+
raw_code=source_file.contents,
|
|
95
|
+
resource_type=NodeType.Macro,
|
|
96
|
+
language="sql",
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
for node in self.parse_unparsed_generic_test(base_node):
|
|
100
|
+
self.manifest.add_macro(block.file, node)
|