dagster-dbt 0.23.3__py3-none-any.whl → 0.28.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. dagster_dbt/__init__.py +41 -140
  2. dagster_dbt/asset_decorator.py +49 -230
  3. dagster_dbt/asset_specs.py +65 -0
  4. dagster_dbt/asset_utils.py +655 -338
  5. dagster_dbt/cli/app.py +44 -43
  6. dagster_dbt/cloud/__init__.py +6 -4
  7. dagster_dbt/cloud/asset_defs.py +119 -177
  8. dagster_dbt/cloud/cli.py +3 -4
  9. dagster_dbt/cloud/ops.py +9 -6
  10. dagster_dbt/cloud/resources.py +9 -4
  11. dagster_dbt/cloud/types.py +12 -7
  12. dagster_dbt/cloud/utils.py +186 -0
  13. dagster_dbt/cloud_v2/__init__.py +10 -0
  14. dagster_dbt/cloud_v2/asset_decorator.py +81 -0
  15. dagster_dbt/cloud_v2/cli_invocation.py +67 -0
  16. dagster_dbt/cloud_v2/client.py +438 -0
  17. dagster_dbt/cloud_v2/resources.py +462 -0
  18. dagster_dbt/cloud_v2/run_handler.py +229 -0
  19. dagster_dbt/cloud_v2/sensor_builder.py +254 -0
  20. dagster_dbt/cloud_v2/types.py +143 -0
  21. dagster_dbt/compat.py +107 -0
  22. dagster_dbt/components/__init__.py +0 -0
  23. dagster_dbt/components/dbt_project/__init__.py +0 -0
  24. dagster_dbt/components/dbt_project/component.py +545 -0
  25. dagster_dbt/components/dbt_project/scaffolder.py +65 -0
  26. dagster_dbt/core/__init__.py +0 -10
  27. dagster_dbt/core/dbt_cli_event.py +612 -0
  28. dagster_dbt/core/dbt_cli_invocation.py +474 -0
  29. dagster_dbt/core/dbt_event_iterator.py +399 -0
  30. dagster_dbt/core/resource.py +733 -0
  31. dagster_dbt/core/utils.py +14 -279
  32. dagster_dbt/dagster_dbt_translator.py +317 -74
  33. dagster_dbt/dbt_core_version.py +1 -0
  34. dagster_dbt/dbt_manifest.py +6 -5
  35. dagster_dbt/dbt_manifest_asset_selection.py +62 -22
  36. dagster_dbt/dbt_project.py +179 -40
  37. dagster_dbt/dbt_project_manager.py +173 -0
  38. dagster_dbt/dbt_version.py +0 -0
  39. dagster_dbt/errors.py +9 -84
  40. dagster_dbt/freshness_builder.py +147 -0
  41. dagster_dbt/include/pyproject.toml.jinja +21 -0
  42. dagster_dbt/include/scaffold/assets.py.jinja +1 -8
  43. dagster_dbt/include/scaffold/definitions.py.jinja +0 -15
  44. dagster_dbt/include/scaffold/project.py.jinja +1 -0
  45. dagster_dbt/include/setup.py.jinja +2 -3
  46. dagster_dbt/metadata_set.py +18 -0
  47. dagster_dbt/utils.py +136 -234
  48. dagster_dbt/version.py +1 -1
  49. dagster_dbt-0.28.4.dist-info/METADATA +47 -0
  50. dagster_dbt-0.28.4.dist-info/RECORD +59 -0
  51. {dagster_dbt-0.23.3.dist-info → dagster_dbt-0.28.4.dist-info}/WHEEL +1 -1
  52. {dagster_dbt-0.23.3.dist-info → dagster_dbt-0.28.4.dist-info}/entry_points.txt +3 -0
  53. {dagster_dbt-0.23.3.dist-info → dagster_dbt-0.28.4.dist-info/licenses}/LICENSE +1 -1
  54. dagster_dbt/asset_defs.py +0 -1049
  55. dagster_dbt/core/resources.py +0 -527
  56. dagster_dbt/core/resources_v2.py +0 -1542
  57. dagster_dbt/core/types.py +0 -63
  58. dagster_dbt/dbt_resource.py +0 -220
  59. dagster_dbt/include/scaffold/constants.py.jinja +0 -21
  60. dagster_dbt/ops.py +0 -134
  61. dagster_dbt/types.py +0 -22
  62. dagster_dbt-0.23.3.dist-info/METADATA +0 -31
  63. dagster_dbt-0.23.3.dist-info/RECORD +0 -43
  64. {dagster_dbt-0.23.3.dist-info → dagster_dbt-0.28.4.dist-info}/top_level.txt +0 -0
dagster_dbt/utils.py CHANGED
@@ -1,244 +1,111 @@
1
1
  from argparse import Namespace
2
- from typing import (
3
- AbstractSet,
4
- Any,
5
- Callable,
6
- Dict,
7
- Iterator,
8
- Mapping,
9
- Optional,
10
- Sequence,
11
- Union,
12
- )
13
-
14
- import dateutil
15
- from dagster import (
16
- AssetKey,
17
- AssetMaterialization,
18
- AssetObservation,
19
- MetadataValue,
20
- Output,
21
- _check as check,
22
- )
23
- from dagster._core.definitions.metadata import RawMetadataValue
24
-
25
- from .types import DbtOutput
2
+ from collections.abc import Mapping
3
+ from typing import TYPE_CHECKING, AbstractSet, Any, Optional, cast # noqa: UP035
4
+
5
+ import dagster_shared.check as check
6
+ import orjson
7
+ from dagster import AssetKey
8
+ from dagster._utils.names import clean_name_lower
9
+ from packaging import version
10
+
11
+ from dagster_dbt.compat import DBT_PYTHON_VERSION
12
+
13
+ if TYPE_CHECKING:
14
+ from dagster_dbt.core.resource import DbtProject
26
15
 
27
16
  # dbt resource types that may be considered assets
28
17
  ASSET_RESOURCE_TYPES = ["model", "seed", "snapshot"]
29
18
 
30
19
 
31
- def default_node_info_to_asset_key(node_info: Mapping[str, Any]) -> AssetKey:
32
- return AssetKey(node_info["unique_id"].split("."))
20
+ clean_name = clean_name_lower
33
21
 
34
22
 
35
- def _resource_type(unique_id: str) -> str:
36
- # returns the type of the node (e.g. model, test, snapshot)
37
- return unique_id.split(".")[0]
23
+ def default_node_info_to_asset_key(node_info: Mapping[str, Any]) -> AssetKey:
24
+ return AssetKey(node_info["unique_id"].split("."))
38
25
 
39
26
 
40
27
  def dagster_name_fn(dbt_resource_props: Mapping[str, Any]) -> str:
41
28
  return dbt_resource_props["unique_id"].replace(".", "_").replace("-", "_").replace("*", "_star")
42
29
 
43
30
 
44
- def _node_result_to_metadata(node_result: Mapping[str, Any]) -> Mapping[str, RawMetadataValue]:
45
- return {
46
- "Materialization Strategy": node_result["config"]["materialized"],
47
- "Database": node_result["database"],
48
- "Schema": node_result["schema"],
49
- "Alias": node_result["alias"],
50
- "Description": node_result["description"],
51
- }
52
-
53
-
54
- def _timing_to_metadata(timings: Sequence[Mapping[str, Any]]) -> Mapping[str, RawMetadataValue]:
55
- metadata: Dict[str, RawMetadataValue] = {}
56
- for timing in timings:
57
- if timing["name"] == "execute":
58
- desc = "Execution"
59
- elif timing["name"] == "compile":
60
- desc = "Compilation"
61
- else:
62
- continue
63
-
64
- # dateutil does not properly expose its modules to static checkers
65
- started_at = dateutil.parser.isoparse(timing["started_at"]) # type: ignore
66
- completed_at = dateutil.parser.isoparse(timing["completed_at"]) # type: ignore
67
- duration = completed_at - started_at
68
- metadata.update(
69
- {
70
- f"{desc} Started At": started_at.isoformat(timespec="seconds"),
71
- f"{desc} Completed At": started_at.isoformat(timespec="seconds"),
72
- f"{desc} Duration": duration.total_seconds(),
73
- }
74
- )
75
- return metadata
76
-
77
-
78
- def result_to_events(
79
- result: Mapping[str, Any],
80
- docs_url: Optional[str] = None,
81
- node_info_to_asset_key: Optional[Callable[[Mapping[str, Any]], AssetKey]] = None,
82
- manifest_json: Optional[Mapping[str, Any]] = None,
83
- extra_metadata: Optional[Mapping[str, RawMetadataValue]] = None,
84
- generate_asset_outputs: bool = False,
85
- ) -> Iterator[Union[AssetMaterialization, AssetObservation, Output]]:
86
- """This is a hacky solution that attempts to consolidate parsing many of the potential formats
87
- that dbt can provide its results in. This is known to work for CLI Outputs for dbt versions 0.18+,
88
- as well as RPC responses for a similar time period, but as the RPC response schema is not documented
89
- nor enforced, this can become out of date easily.
90
- """
91
- node_info_to_asset_key = check.opt_callable_param(
92
- node_info_to_asset_key, "node_info_to_asset_key", default=default_node_info_to_asset_key
93
- )
94
-
95
- # status comes from set of fields rather than "status"
96
- if "fail" in result:
97
- status = (
98
- "fail"
99
- if result.get("fail")
100
- else "skip"
101
- if result.get("skip")
102
- else "error"
103
- if result.get("error")
104
- else "success"
105
- )
106
- else:
107
- status = result["status"]
108
-
109
- # all versions represent timing the same way
110
- metadata = {"Status": status, "Execution Time (seconds)": result["execution_time"]}
111
- metadata.update(_timing_to_metadata(result["timing"]))
112
-
113
- # working with a response that contains the node block (RPC and CLI 0.18.x)
114
- if "node" in result:
115
- unique_id = result["node"]["unique_id"]
116
- metadata.update(_node_result_to_metadata(result["node"]))
31
+ def select_unique_ids(
32
+ select: str,
33
+ exclude: str,
34
+ selector: str,
35
+ project: Optional["DbtProject"],
36
+ manifest_json: Mapping[str, Any],
37
+ ) -> AbstractSet[str]:
38
+ """Given dbt selection paramters, return the unique ids of all resources that match that selection."""
39
+ manifest_version = version.parse(manifest_json.get("metadata", {}).get("dbt_version", "0.0.0"))
40
+ # using dbt Fusion, efficient to invoke the CLI for selection
41
+ if manifest_version.major >= 2 and project is not None:
42
+ return _select_unique_ids_from_cli(select, exclude, selector, project)
43
+ # using dbt-core, too slow to invoke the CLI, so we use library functions instead
44
+ elif DBT_PYTHON_VERSION is not None:
45
+ return _select_unique_ids_from_manifest(select, exclude, selector, manifest_json)
117
46
  else:
118
- unique_id = result["unique_id"]
119
-
120
- if docs_url:
121
- metadata["docs_url"] = MetadataValue.url(f"{docs_url}#!/model/{unique_id}")
47
+ # in theory, as long as dbt-core is a dependency of dagster-dbt, this can't happen, but adding
48
+ # this for now to be safe
49
+ check.failed(
50
+ "dbt-core is not installed and no `project` was passed to `select_unique_ids`. "
51
+ "This can happen if you are using the dbt Cloud integration without the dbt-core package installed."
52
+ )
122
53
 
123
- if extra_metadata:
124
- metadata.update(extra_metadata)
125
54
 
126
- # if you have a manifest available, get the full node info, otherwise just populate unique_id
127
- dbt_resource_props = (
128
- manifest_json["nodes"][unique_id] if manifest_json else {"unique_id": unique_id}
129
- )
55
+ def _select_unique_ids_from_cli(
56
+ select: str,
57
+ exclude: str,
58
+ selector: str,
59
+ project: "DbtProject",
60
+ ) -> AbstractSet[str]:
61
+ """Uses the available dbt CLI to list the unique ids of the selected models. This is not recommended if
62
+ dbt-core is available, as it will be slower than using the manifest.
63
+ """
64
+ from dagster_dbt.core.resource import DbtCliResource
130
65
 
131
- node_resource_type = _resource_type(unique_id)
132
-
133
- if node_resource_type in ASSET_RESOURCE_TYPES and status == "success":
134
- if generate_asset_outputs:
135
- yield Output(
136
- value=None,
137
- output_name=dagster_name_fn(dbt_resource_props),
138
- metadata=metadata,
139
- )
140
- else:
141
- yield AssetMaterialization(
142
- asset_key=node_info_to_asset_key(dbt_resource_props),
143
- description=f"dbt node: {unique_id}",
144
- metadata=metadata,
145
- )
146
- # can only associate tests with assets if we have manifest_json available
147
- elif node_resource_type == "test" and manifest_json and status != "skipped":
148
- upstream_unique_ids = manifest_json["nodes"][unique_id]["depends_on"]["nodes"]
149
- # tests can apply to multiple asset keys
150
- for upstream_id in upstream_unique_ids:
151
- # the upstream id can reference a node or a source
152
- dbt_resource_props = manifest_json["nodes"].get(upstream_id) or manifest_json[
153
- "sources"
154
- ].get(upstream_id)
155
- if dbt_resource_props is None:
66
+ cmd = ["list", "--output", "json"]
67
+ if select and select != "fqn:*":
68
+ cmd.append("--select")
69
+ cmd.append(select)
70
+ if exclude:
71
+ cmd.append("--exclude")
72
+ cmd.append(exclude)
73
+ if selector:
74
+ cmd.append("--selector")
75
+ cmd.append(selector)
76
+
77
+ raw_events = DbtCliResource(project_dir=project).cli(cmd)._stream_stdout() # noqa
78
+ unique_ids = set()
79
+ for event in raw_events:
80
+ if isinstance(event, dict):
81
+ try:
82
+ msg = orjson.loads(event.get("info", {}).get("msg", "{}"))
83
+ except orjson.JSONDecodeError:
156
84
  continue
157
- upstream_asset_key = node_info_to_asset_key(dbt_resource_props)
158
- yield AssetObservation(
159
- asset_key=upstream_asset_key,
160
- metadata={
161
- "Test ID": result["unique_id"],
162
- "Test Status": status,
163
- "Test Message": result.get("message") or "",
164
- },
165
- )
166
-
167
-
168
- def generate_events(
169
- dbt_output: DbtOutput,
170
- node_info_to_asset_key: Optional[Callable[[Mapping[str, Any]], AssetKey]] = None,
171
- manifest_json: Optional[Mapping[str, Any]] = None,
172
- ) -> Iterator[Union[AssetMaterialization, AssetObservation]]:
173
- """This function yields :py:class:`dagster.AssetMaterialization` events for each model updated by
174
- a dbt command, and :py:class:`dagster.AssetObservation` events for each test run.
175
-
176
- Information parsed from a :py:class:`~DbtOutput` object.
177
- """
178
- for result in dbt_output.result["results"]:
179
- for event in result_to_events(
180
- result,
181
- docs_url=dbt_output.docs_url,
182
- node_info_to_asset_key=node_info_to_asset_key,
183
- manifest_json=manifest_json,
184
- ):
185
- yield check.inst(
186
- event,
187
- (AssetMaterialization, AssetObservation),
188
- )
189
-
190
-
191
- def generate_materializations(
192
- dbt_output: DbtOutput,
193
- asset_key_prefix: Optional[Sequence[str]] = None,
194
- ) -> Iterator[AssetMaterialization]:
195
- """This function yields :py:class:`dagster.AssetMaterialization` events for each model updated by
196
- a dbt command.
197
-
198
- Information parsed from a :py:class:`~DbtOutput` object.
199
-
200
- Examples:
201
- .. code-block:: python
202
-
203
- from dagster import op, Output
204
- from dagster_dbt.utils import generate_materializations
205
- from dagster_dbt import dbt_cli_resource
206
-
207
- @op(required_resource_keys={"dbt"})
208
- def my_custom_dbt_run(context):
209
- dbt_output = context.resources.dbt.run()
210
- for materialization in generate_materializations(dbt_output):
211
- # you can modify the materialization object to add extra metadata, if desired
212
- yield materialization
213
- yield Output(my_dbt_output)
214
-
215
- @job(resource_defs={{"dbt":dbt_cli_resource}})
216
- def my_dbt_cli_job():
217
- my_custom_dbt_run()
218
- """
219
- asset_key_prefix = check.opt_sequence_param(asset_key_prefix, "asset_key_prefix", of_type=str)
85
+ unique_ids.add(msg.get("unique_id"))
220
86
 
221
- for event in generate_events(
222
- dbt_output,
223
- node_info_to_asset_key=lambda info: AssetKey(
224
- asset_key_prefix + info["unique_id"].split(".")
225
- ),
226
- ):
227
- yield check.inst(event, AssetMaterialization)
87
+ return unique_ids - {None}
228
88
 
229
89
 
230
- def select_unique_ids_from_manifest(
231
- select: str,
232
- exclude: str,
233
- manifest_json: Mapping[str, Any],
90
+ def _select_unique_ids_from_manifest(
91
+ select: str, exclude: str, selector: str, manifest_json: Mapping[str, Any]
234
92
  ) -> AbstractSet[str]:
235
93
  """Method to apply a selection string to an existing manifest.json file."""
236
94
  import dbt.graph.cli as graph_cli
237
95
  import dbt.graph.selector as graph_selector
238
96
  from dbt.contracts.graph.manifest import Manifest
97
+ from dbt.contracts.graph.nodes import SavedQuery, SemanticModel
98
+ from dbt.contracts.selection import SelectorFile
239
99
  from dbt.graph.selector_spec import IndirectSelection, SelectionSpec
240
100
  from networkx import DiGraph
241
101
 
102
+ select_specified = select and select != "fqn:*"
103
+ check.param_invariant(
104
+ not ((select_specified or exclude) and selector),
105
+ "selector",
106
+ "Cannot provide both a selector and a select/exclude param.",
107
+ )
108
+
242
109
  # NOTE: this was faster than calling `Manifest.from_dict`, so we are keeping this.
243
110
  class _DictShim(dict):
244
111
  """Shim to enable hydrating a dictionary into a dot-accessible object. We need this because
@@ -252,11 +119,24 @@ def select_unique_ids_from_manifest(
252
119
  # allow recursive access e.g. foo.bar.baz
253
120
  return _DictShim(ret) if isinstance(ret, dict) else ret
254
121
 
122
+ unit_tests = {}
123
+ if DBT_PYTHON_VERSION is not None and DBT_PYTHON_VERSION >= version.parse("1.8.0"):
124
+ from dbt.contracts.graph.nodes import UnitTestDefinition
125
+
126
+ unit_tests = (
127
+ {
128
+ "unit_tests": {
129
+ # Starting in dbt 1.8 unit test nodes must be defined using the UnitTestDefinition class
130
+ unique_id: UnitTestDefinition.from_dict(info)
131
+ for unique_id, info in manifest_json["unit_tests"].items()
132
+ },
133
+ }
134
+ if manifest_json.get("unit_tests")
135
+ else {}
136
+ )
137
+
255
138
  manifest = Manifest(
256
- nodes={
257
- unique_id: _DictShim(info)
258
- for unique_id, info in manifest_json["nodes"].items() # type: ignore
259
- },
139
+ nodes={unique_id: _DictShim(info) for unique_id, info in manifest_json["nodes"].items()},
260
140
  sources={
261
141
  unique_id: _DictShim(info)
262
142
  for unique_id, info in manifest_json["sources"].items() # type: ignore
@@ -272,14 +152,38 @@ def select_unique_ids_from_manifest(
272
152
  **( # type: ignore
273
153
  {
274
154
  "semantic_models": {
155
+ # Semantic model nodes must be defined using the SemanticModel class
156
+ unique_id: SemanticModel.from_dict(info)
157
+ for unique_id, info in manifest_json["semantic_models"].items()
158
+ },
159
+ }
160
+ if manifest_json.get("semantic_models")
161
+ else {}
162
+ ),
163
+ **(
164
+ {
165
+ "saved_queries": {
166
+ # Saved query nodes must be defined using the SavedQuery class
167
+ unique_id: SavedQuery.from_dict(info)
168
+ for unique_id, info in manifest_json["saved_queries"].items()
169
+ },
170
+ }
171
+ if manifest_json.get("saved_queries")
172
+ else {}
173
+ ),
174
+ **(
175
+ {
176
+ "selectors": {
275
177
  unique_id: _DictShim(info)
276
- for unique_id, info in manifest_json.get("semantic_models", {}).items()
178
+ for unique_id, info in manifest_json["selectors"].items()
277
179
  }
278
180
  }
279
- if manifest_json.get("semantic_models")
181
+ if manifest_json.get("selectors")
280
182
  else {}
281
183
  ),
184
+ **unit_tests,
282
185
  )
186
+
283
187
  child_map = manifest_json["child_map"]
284
188
 
285
189
  graph = graph_selector.Graph(DiGraph(incoming_graph_data=child_map))
@@ -291,32 +195,30 @@ def select_unique_ids_from_manifest(
291
195
  "WARN_ERROR": True,
292
196
  }
293
197
  )
294
- parsed_spec: SelectionSpec = graph_cli.parse_union([select], True)
198
+
199
+ if selector:
200
+ # must parse all selectors to handle dependencies, then grab the specific selector
201
+ # that was specified
202
+ result = graph_cli.parse_from_selectors_definition(
203
+ source=SelectorFile.from_dict({"selectors": manifest.selectors.values()})
204
+ )
205
+ if selector not in result:
206
+ raise ValueError(f"Selector `{selector}` not found in manifest.")
207
+ parsed_spec: SelectionSpec = cast("SelectionSpec", result[selector]["definition"])
208
+ else:
209
+ parsed_spec: SelectionSpec = graph_cli.parse_union([select], True)
295
210
 
296
211
  if exclude:
297
212
  parsed_exclude_spec = graph_cli.parse_union([exclude], False)
298
213
  parsed_spec = graph_cli.SelectionDifference(components=[parsed_spec, parsed_exclude_spec])
299
214
 
300
215
  # execute this selection against the graph
301
- selector = graph_selector.NodeSelector(graph, manifest)
302
- selected, _ = selector.select_nodes(parsed_spec)
216
+ node_selector = graph_selector.NodeSelector(graph, manifest)
217
+ selected, _ = node_selector.select_nodes(parsed_spec)
303
218
  return selected
304
219
 
305
220
 
306
- def get_dbt_resource_props_by_dbt_unique_id_from_manifest(
307
- manifest: Mapping[str, Any],
308
- ) -> Mapping[str, Mapping[str, Any]]:
309
- """A mapping of a dbt node's unique id to the node's dictionary representation in the manifest."""
310
- return {
311
- **manifest["nodes"],
312
- **manifest["sources"],
313
- **manifest["exposures"],
314
- **manifest["metrics"],
315
- **manifest.get("semantic_models", {}),
316
- }
317
-
318
-
319
- def _set_flag_attrs(kvs: Dict[str, Any]):
221
+ def _set_flag_attrs(kvs: dict[str, Any]):
320
222
  from dbt.flags import get_flag_dict, set_flags
321
223
 
322
224
  new_flags = Namespace()
dagster_dbt/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.23.3"
1
+ __version__ = "0.28.4"
@@ -0,0 +1,47 @@
1
+ Metadata-Version: 2.4
2
+ Name: dagster-dbt
3
+ Version: 0.28.4
4
+ Summary: A Dagster integration for dbt
5
+ Home-page: https://github.com/dagster-io/dagster/tree/master/python_modules/libraries/dagster-dbt
6
+ Author: Dagster Labs
7
+ Author-email: hello@dagsterlabs.com
8
+ License: Apache-2.0
9
+ Classifier: Programming Language :: Python :: 3.10
10
+ Classifier: Programming Language :: Python :: 3.11
11
+ Classifier: Programming Language :: Python :: 3.12
12
+ Classifier: Programming Language :: Python :: 3.13
13
+ Classifier: License :: OSI Approved :: Apache Software License
14
+ Classifier: Operating System :: OS Independent
15
+ Requires-Python: >=3.10,<3.14
16
+ License-File: LICENSE
17
+ Requires-Dist: dagster==1.12.4
18
+ Requires-Dist: dbt-core<1.11,>=1.7
19
+ Requires-Dist: gitpython
20
+ Requires-Dist: Jinja2
21
+ Requires-Dist: networkx
22
+ Requires-Dist: orjson
23
+ Requires-Dist: requests
24
+ Requires-Dist: rich
25
+ Requires-Dist: sqlglot[rs]<28.1.0
26
+ Requires-Dist: typer>=0.9.0
27
+ Requires-Dist: packaging
28
+ Provides-Extra: test-bare
29
+ Requires-Dist: pytest-rerunfailures; extra == "test-bare"
30
+ Requires-Dist: pytest-order; extra == "test-bare"
31
+ Provides-Extra: test
32
+ Requires-Dist: pytest-rerunfailures; extra == "test"
33
+ Requires-Dist: pytest-order; extra == "test"
34
+ Requires-Dist: dagster-duckdb; extra == "test"
35
+ Requires-Dist: dagster-duckdb-pandas; extra == "test"
36
+ Requires-Dist: dbt-duckdb<1.9.2; extra == "test"
37
+ Requires-Dist: duckdb<1.4.0; extra == "test"
38
+ Dynamic: author
39
+ Dynamic: author-email
40
+ Dynamic: classifier
41
+ Dynamic: home-page
42
+ Dynamic: license
43
+ Dynamic: license-file
44
+ Dynamic: provides-extra
45
+ Dynamic: requires-dist
46
+ Dynamic: requires-python
47
+ Dynamic: summary
@@ -0,0 +1,59 @@
1
+ dagster_dbt/__init__.py,sha256=rlPCxCzovXNDiqXBUEcgNVHjZrXQXwcX3kwUhe_69C4,4708
2
+ dagster_dbt/asset_decorator.py,sha256=OFFjhmDQO6dK8N3U7VQF7gergswkIS3eHnMRwAe2BZY,14892
3
+ dagster_dbt/asset_specs.py,sha256=2EdWIhY2QZhtGXM7N-kkkeK3ClnGgYX7ayRi_X11cLg,2741
4
+ dagster_dbt/asset_utils.py,sha256=HMy-_ifUG7vtsUU0lGaPum99Wn6NXEOjIfg4nW0sw7U,46657
5
+ dagster_dbt/compat.py,sha256=lqzGonzQE7Lb825yRqUaQPXeNQp8umAR4LqyD6COXdc,3609
6
+ dagster_dbt/dagster_dbt_translator.py,sha256=skUpl7NxG7ysDfDOs1pSq1c9-ciByN7LrGvO0ZsC4W4,30274
7
+ dagster_dbt/dbt_core_version.py,sha256=w1P62qDdbApXKv0XvUNr2p7FlwiW68csugsNaqmjNjM,38
8
+ dagster_dbt/dbt_manifest.py,sha256=q10Qq1whh-dLfWtFbTYXYbqBVCf0oU8T1yRPyy9ASw0,1307
9
+ dagster_dbt/dbt_manifest_asset_selection.py,sha256=KSEHcVdtfFZaEqSQDUsAx8H8BQe62jZxAOE_CLtwVlI,5072
10
+ dagster_dbt/dbt_project.py,sha256=HtUuNYbKMAPWqTOPgPeJK4u6b1nKyqHrRNB8sYwOWVw,12203
11
+ dagster_dbt/dbt_project_manager.py,sha256=d4Jq5D5x4ElUOGVG-enlhYr0GMLgC-8uL3FegVHNAE0,6047
12
+ dagster_dbt/dbt_version.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
13
+ dagster_dbt/errors.py,sha256=a8xag0tjh8OQVkiL10_uwY4hkAgRCH6HtTGaZxIbXZI,1073
14
+ dagster_dbt/freshness_builder.py,sha256=DHAC3AGWAwIA7psDf72R0t5p8NjxDytGU9g5xnca1hc,6395
15
+ dagster_dbt/metadata_set.py,sha256=lqjASYoYeM_Ey6r8UsPUkRMwmuAIfFCFvkNm0xW5xTg,512
16
+ dagster_dbt/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
17
+ dagster_dbt/utils.py,sha256=gT6xO7buRolkhc2fa5ySUPfD1eXo3e6RJWZAKkM6yFo,8513
18
+ dagster_dbt/version.py,sha256=le8GUWDg2-I-aN9BEcWW_cGqOJzU27CUKxY6F59kr5k,23
19
+ dagster_dbt/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ dagster_dbt/cli/app.py,sha256=9oBx85uzAkMtBdV39vNm5rxuAGPVYzzDUs6Ek-KL0XY,13516
21
+ dagster_dbt/cloud/__init__.py,sha256=8WKaLuPl_pUG9Cv78GW782vrWQfqK8QtAWegkTxA9r4,441
22
+ dagster_dbt/cloud/asset_defs.py,sha256=4mdXd3tiGd9OPBBF6xvH81DZhcyGL9pAw3WH8dB7Qu8,29311
23
+ dagster_dbt/cloud/cli.py,sha256=VnKzBjn-BPpjn4nPZm5xSrboAKpRhlCa-4IxsN1ROCo,4525
24
+ dagster_dbt/cloud/ops.py,sha256=rsU4qPCRUUzeHRInZph7YEz_iynwu_KidO5vMAYHX5E,4615
25
+ dagster_dbt/cloud/resources.py,sha256=AWW3VTBS6zrSKeDjMJovbnu54sKena93FoZle6ZSSq8,31747
26
+ dagster_dbt/cloud/types.py,sha256=gsigGlKjR3gLePppiYX09S2vx-0PyPvH7AkzOphDSMA,2886
27
+ dagster_dbt/cloud/utils.py,sha256=NnPxHp3r2cpJ0Vi1Gt4eBmBlNbAhdwaR7GUZRkFsLRc,7158
28
+ dagster_dbt/cloud_v2/__init__.py,sha256=9NjPjqm8ytsKiogiW9cUpHgQFaht1q6g_3K7KSKtzsU,471
29
+ dagster_dbt/cloud_v2/asset_decorator.py,sha256=wHL77K2NXvnU0PDEsqyqpiRX1yp1cuQiit1oxRpNY3s,3141
30
+ dagster_dbt/cloud_v2/cli_invocation.py,sha256=EX2a0G7HIlOJBES-Lf3DIp0y1L1vGeiO3ms1a0F9tWc,2235
31
+ dagster_dbt/cloud_v2/client.py,sha256=V7UsepwHteJiEhQ8XT-lzFV-xDlspzVDgMJDxd6FLEg,16674
32
+ dagster_dbt/cloud_v2/resources.py,sha256=ENzllq2rJlD4Tb3_9JTsMVsoBjyKBs28543-L3lfxLU,16925
33
+ dagster_dbt/cloud_v2/run_handler.py,sha256=rLzCMmqX7H6Eos19e5BW8ZYwmfh6F0DSwZOpDtZl-NM,9301
34
+ dagster_dbt/cloud_v2/sensor_builder.py,sha256=8mAm-1ZFvoiVVdxSkSQbMxg18aQUY5kazur7nBVgotM,10446
35
+ dagster_dbt/cloud_v2/types.py,sha256=dI-NIguj582LwTTMXdY5r0U4-INDn9anNy-ciGuEc1s,4136
36
+ dagster_dbt/components/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ dagster_dbt/components/dbt_project/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
+ dagster_dbt/components/dbt_project/component.py,sha256=IGjNWN7FdyFRxenli-wCI56fMqGfYyhvpZfMH9-EZiE,20599
39
+ dagster_dbt/components/dbt_project/scaffolder.py,sha256=QVnHbDoGtCHXzEUYC9Z3PAvLuJ0A0EecpV2MsBPn7TQ,2922
40
+ dagster_dbt/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
+ dagster_dbt/core/dbt_cli_event.py,sha256=Yqr-UuzFF7jIcI4KRAmiAhGTX9Ay0tIMN2NaJegsrJQ,25203
42
+ dagster_dbt/core/dbt_cli_invocation.py,sha256=lJLoRvy630xUAEdnH76yl5PJ98-OsLx06l813XjzZOM,17505
43
+ dagster_dbt/core/dbt_event_iterator.py,sha256=IL3QsVUDMfGk04SFTkrVEfbbYu745Kn8OyMQlrlHxIE,16953
44
+ dagster_dbt/core/resource.py,sha256=1qIEneW2aZYeArF5w_QV4-Xmf7Kp9kpGaQaxUlL3ZZs,30851
45
+ dagster_dbt/core/utils.py,sha256=ciXjLhFTNVTyDtVWP4Kjee0LWZkIuVJOltvlv0COzDo,577
46
+ dagster_dbt/include/__init__.py,sha256=8ujr-ROlJ5x64POs-bH-0zfjZ2QHx-FgKUZAvAFbSs8,89
47
+ dagster_dbt/include/pyproject.toml.jinja,sha256=_BbC3zmV6ajtGnDMesyut0Yl0Fc7bY-F887KTIKMm1o,661
48
+ dagster_dbt/include/setup.py.jinja,sha256=N3NjYuXvWmaY8bj5GftVxVvImDUGC4YbFc_MdP1-quA,618
49
+ dagster_dbt/include/scaffold/__init__.py.jinja,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
+ dagster_dbt/include/scaffold/assets.py.jinja,sha256=JImqnDUP5ewy8RVti4IuXL70yJnoke54dMuZ3G8Wlx8,562
51
+ dagster_dbt/include/scaffold/definitions.py.jinja,sha256=Hou7emwkEeh5YXTdqjYFrAc2SK-Q6MgTNsQOKA_Vy3s,364
52
+ dagster_dbt/include/scaffold/project.py.jinja,sha256=YNtkT5Hq4VbGw-b7QcxdelhXsesIKORwVuBFGFdfeUs,432
53
+ dagster_dbt/include/scaffold/schedules.py.jinja,sha256=Xua_VtPjYFc498A5uaBGQ36GwV1gqciO4P3D8Yt9M-Y,413
54
+ dagster_dbt-0.28.4.dist-info/licenses/LICENSE,sha256=4lsMW-RCvfVD4_F57wrmpe3vX1xwUk_OAKKmV_XT7Z0,11348
55
+ dagster_dbt-0.28.4.dist-info/METADATA,sha256=yNSxqahe2ufGcj2pOBVNZXED1-AD-RQ51l4epQZkg9g,1579
56
+ dagster_dbt-0.28.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
57
+ dagster_dbt-0.28.4.dist-info/entry_points.txt,sha256=pbv0tVoTB7cByG-noE8rC6atvthh64qBaTo7PkQ9HbM,163
58
+ dagster_dbt-0.28.4.dist-info/top_level.txt,sha256=hoOwFvw9OpJUN1azE6UVHcxMKqhUwR_BTN0Ay-iKUDA,12
59
+ dagster_dbt-0.28.4.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.41.2)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,3 +1,6 @@
1
1
  [console_scripts]
2
2
  dagster-dbt = dagster_dbt.cli.app:app
3
3
  dagster-dbt-cloud = dagster_dbt.cloud.cli:app
4
+
5
+ [dagster_dg_cli.registry_modules]
6
+ dagster_dbt = dagster_dbt
@@ -186,7 +186,7 @@
186
186
  same "printed page" as the copyright notice for easier
187
187
  identification within third-party archives.
188
188
 
189
- Copyright 2023 Dagster Labs, Inc".
189
+ Copyright 2025 Dagster Labs, Inc.
190
190
 
191
191
  Licensed under the Apache License, Version 2.0 (the "License");
192
192
  you may not use this file except in compliance with the License.