apache-airflow-providers-dbt-cloud 4.4.4rc1__py3-none-any.whl → 4.6.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.4.4"
32
+ __version__ = "4.6.3"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-dbt-cloud:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-dbt-cloud:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -34,7 +34,7 @@ from requests.auth import AuthBase
34
34
  from requests.sessions import Session
35
35
  from tenacity import AsyncRetrying, RetryCallState, retry_if_exception, stop_after_attempt, wait_exponential
36
36
 
37
- from airflow.exceptions import AirflowException
37
+ from airflow.providers.common.compat.sdk import AirflowException
38
38
  from airflow.providers.http.hooks.http import HttpHook
39
39
 
40
40
  if TYPE_CHECKING:
@@ -23,8 +23,7 @@ from functools import cached_property
23
23
  from pathlib import Path
24
24
  from typing import TYPE_CHECKING, Any
25
25
 
26
- from airflow.configuration import conf
27
- from airflow.providers.common.compat.sdk import BaseOperator, BaseOperatorLink, XCom
26
+ from airflow.providers.common.compat.sdk import BaseOperator, BaseOperatorLink, XCom, conf
28
27
  from airflow.providers.dbt.cloud.hooks.dbt import (
29
28
  DbtCloudHook,
30
29
  DbtCloudJobRunException,
@@ -36,7 +35,7 @@ from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_e
36
35
 
37
36
  if TYPE_CHECKING:
38
37
  from airflow.providers.openlineage.extractors import OperatorLineage
39
- from airflow.utils.context import Context
38
+ from airflow.sdk import Context
40
39
 
41
40
 
42
41
  class DbtCloudRunJobOperatorLink(BaseOperatorLink):
@@ -20,16 +20,14 @@ import time
20
20
  from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
- from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
- from airflow.providers.common.compat.sdk import BaseSensorOperator
23
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator, conf
26
24
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
27
25
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
28
26
  from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_events_from_dbt_cloud_run
29
27
 
30
28
  if TYPE_CHECKING:
31
29
  from airflow.providers.openlineage.extractors import OperatorLineage
32
- from airflow.utils.context import Context
30
+ from airflow.sdk import Context
33
31
 
34
32
 
35
33
  class DbtCloudJobRunSensor(BaseSensorOperator):
@@ -55,6 +53,7 @@ class DbtCloudJobRunSensor(BaseSensorOperator):
55
53
  run_id: int,
56
54
  account_id: int | None = None,
57
55
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
56
+ hook_params: dict[str, Any] | None = None,
58
57
  **kwargs,
59
58
  ) -> None:
60
59
  if deferrable:
@@ -68,13 +67,13 @@ class DbtCloudJobRunSensor(BaseSensorOperator):
68
67
  self.dbt_cloud_conn_id = dbt_cloud_conn_id
69
68
  self.run_id = run_id
70
69
  self.account_id = account_id
71
-
70
+ self.hook_params = hook_params or {}
72
71
  self.deferrable = deferrable
73
72
 
74
73
  @cached_property
75
74
  def hook(self):
76
75
  """Returns DBT Cloud hook."""
77
- return DbtCloudHook(self.dbt_cloud_conn_id)
76
+ return DbtCloudHook(self.dbt_cloud_conn_id, **self.hook_params)
78
77
 
79
78
  def poke(self, context: Context) -> bool:
80
79
  job_run_status = self.hook.get_job_run_status(run_id=self.run_id, account_id=self.account_id)
@@ -110,6 +109,7 @@ class DbtCloudJobRunSensor(BaseSensorOperator):
110
109
  account_id=self.account_id,
111
110
  poll_interval=self.poke_interval,
112
111
  end_time=end_time,
112
+ hook_params=self.hook_params,
113
113
  ),
114
114
  method_name="execute_complete",
115
115
  )
@@ -56,7 +56,48 @@ def _get_dag_run_clear_number(task_instance):
56
56
  return task_instance.dag_run.clear_number
57
57
 
58
58
 
59
- @require_openlineage_version(provider_min_version="2.3.0")
59
+ def _get_parent_run_metadata(task_instance):
60
+ """
61
+ Retrieve the ParentRunMetadata associated with a specific Airflow task instance.
62
+
63
+ This metadata helps link OpenLineage events of child jobs to the original Airflow task execution.
64
+ Establishing this connection enables better lineage tracking and observability.
65
+ """
66
+ from openlineage.common.provider.dbt import ParentRunMetadata
67
+
68
+ from airflow.providers.openlineage.plugins.macros import (
69
+ lineage_job_name,
70
+ lineage_job_namespace,
71
+ lineage_root_job_name,
72
+ lineage_root_run_id,
73
+ lineage_run_id,
74
+ )
75
+
76
+ parent_run_id = lineage_run_id(task_instance)
77
+ parent_job_name = lineage_job_name(task_instance)
78
+ parent_job_namespace = lineage_job_namespace()
79
+
80
+ root_parent_run_id = lineage_root_run_id(task_instance)
81
+ rot_parent_job_name = lineage_root_job_name(task_instance)
82
+
83
+ try: # Added in OL provider 2.9.0, try to use it if possible
84
+ from airflow.providers.openlineage.plugins.macros import lineage_root_job_namespace
85
+
86
+ root_parent_job_namespace = lineage_root_job_namespace(task_instance)
87
+ except ImportError:
88
+ root_parent_job_namespace = lineage_job_namespace()
89
+
90
+ return ParentRunMetadata(
91
+ run_id=parent_run_id,
92
+ job_name=parent_job_name,
93
+ job_namespace=parent_job_namespace,
94
+ root_parent_run_id=root_parent_run_id,
95
+ root_parent_job_name=rot_parent_job_name,
96
+ root_parent_job_namespace=root_parent_job_namespace,
97
+ )
98
+
99
+
100
+ @require_openlineage_version(provider_min_version="2.5.0")
60
101
  def generate_openlineage_events_from_dbt_cloud_run(
61
102
  operator: DbtCloudRunJobOperator | DbtCloudJobRunSensor, task_instance: TaskInstance
62
103
  ) -> OperatorLineage:
@@ -74,14 +115,10 @@ def generate_openlineage_events_from_dbt_cloud_run(
74
115
 
75
116
  :return: An empty OperatorLineage object indicating the completion of events generation.
76
117
  """
77
- from openlineage.common.provider.dbt import DbtCloudArtifactProcessor, ParentRunMetadata
118
+ from openlineage.common.provider.dbt import DbtCloudArtifactProcessor
78
119
 
79
- from airflow.providers.openlineage.conf import namespace
80
120
  from airflow.providers.openlineage.extractors import OperatorLineage
81
- from airflow.providers.openlineage.plugins.adapter import (
82
- _PRODUCER,
83
- OpenLineageAdapter,
84
- )
121
+ from airflow.providers.openlineage.plugins.adapter import _PRODUCER
85
122
  from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
86
123
 
87
124
  # if no account_id set this will fallback
@@ -140,29 +177,7 @@ def generate_openlineage_events_from_dbt_cloud_run(
140
177
  )
141
178
 
142
179
  log.debug("Preparing OpenLineage parent job information to be included in DBT events.")
143
- # generate same run id of current task instance
144
- parent_run_id = OpenLineageAdapter.build_task_instance_run_id(
145
- dag_id=task_instance.dag_id,
146
- task_id=operator.task_id,
147
- logical_date=_get_logical_date(task_instance),
148
- try_number=task_instance.try_number,
149
- map_index=task_instance.map_index,
150
- )
151
-
152
- root_parent_run_id = OpenLineageAdapter.build_dag_run_id(
153
- dag_id=task_instance.dag_id,
154
- logical_date=_get_logical_date(task_instance),
155
- clear_number=_get_dag_run_clear_number(task_instance),
156
- )
157
-
158
- parent_job = ParentRunMetadata(
159
- run_id=parent_run_id,
160
- job_name=f"{task_instance.dag_id}.{task_instance.task_id}",
161
- job_namespace=namespace(),
162
- root_parent_run_id=root_parent_run_id,
163
- root_parent_job_name=task_instance.dag_id,
164
- root_parent_job_namespace=namespace(),
165
- )
180
+ parent_metadata = _get_parent_run_metadata(task_instance)
166
181
  adapter = get_openlineage_listener().adapter
167
182
 
168
183
  # process each step in loop, sending generated events in the same order as steps
@@ -178,7 +193,7 @@ def generate_openlineage_events_from_dbt_cloud_run(
178
193
 
179
194
  processor = DbtCloudArtifactProcessor(
180
195
  producer=_PRODUCER,
181
- job_namespace=namespace(),
196
+ job_namespace=parent_metadata.job_namespace,
182
197
  skip_errors=False,
183
198
  logger=operator.log,
184
199
  manifest=manifest,
@@ -187,7 +202,7 @@ def generate_openlineage_events_from_dbt_cloud_run(
187
202
  catalog=catalog,
188
203
  )
189
204
 
190
- processor.dbt_run_metadata = parent_job
205
+ processor.dbt_run_metadata = parent_metadata
191
206
 
192
207
  events = processor.parse().events()
193
208
  log.debug("Found %s OpenLineage events for artifact no. %s.", len(events), counter)
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 4.4.4rc1
3
+ Version: 4.6.3
4
4
  Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
5
  Keywords: airflow-provider,dbt.cloud,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
8
  Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,22 +15,23 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.8.0rc1
23
+ License-File: LICENSE
24
+ License-File: NOTICE
25
+ Requires-Dist: apache-airflow>=2.11.0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.12.0
25
27
  Requires-Dist: apache-airflow-providers-http
26
28
  Requires-Dist: asgiref>=2.3.0
27
29
  Requires-Dist: aiohttp>=3.9.2
28
30
  Requires-Dist: tenacity>=8.3.0
29
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
31
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
30
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
31
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/changelog.html
32
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4
33
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.3/changelog.html
34
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.3
33
35
  Project-URL: Mastodon, https://fosstodon.org/@airflow
34
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
35
37
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -61,7 +63,7 @@ Provides-Extra: openlineage
61
63
 
62
64
  Package ``apache-airflow-providers-dbt-cloud``
63
65
 
64
- Release: ``4.4.4``
66
+ Release: ``4.6.3``
65
67
 
66
68
 
67
69
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -74,7 +76,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
74
76
  are in ``airflow.providers.dbt.cloud`` python package.
75
77
 
76
78
  You can find package information and changelog for the provider
77
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.3/>`_.
78
80
 
79
81
  Installation
80
82
  ------------
@@ -91,8 +93,8 @@ Requirements
91
93
  ========================================== ==================
92
94
  PIP package Version required
93
95
  ========================================== ==================
94
- ``apache-airflow`` ``>=2.10.0``
95
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
96
+ ``apache-airflow`` ``>=2.11.0``
97
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
96
98
  ``apache-airflow-providers-http``
97
99
  ``asgiref`` ``>=2.3.0``
98
100
  ``aiohttp`` ``>=3.9.2``
@@ -130,5 +132,5 @@ Extra Dependencies
130
132
  =============== ===============================================
131
133
 
132
134
  The changelog for the provider package can be found in the
133
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/changelog.html>`_.
135
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.3/changelog.html>`_.
134
136
 
@@ -0,0 +1,19 @@
1
+ airflow/providers/dbt/cloud/__init__.py,sha256=vuHyroquzQsGN_LgJ93kDuuQp5YJX300V1lYlL7rj4Y,1498
2
+ airflow/providers/dbt/cloud/get_provider_info.py,sha256=ufODYanp90_NPW1UftzGIwwG65Olb5l5PfSp0Mo1SOs,2507
3
+ airflow/providers/dbt/cloud/version_compat.py,sha256=RQbdCueLOaFZWekpQmF0BoAoJInW8EoyvJ3Ah-HbrPo,1577
4
+ airflow/providers/dbt/cloud/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
+ airflow/providers/dbt/cloud/hooks/dbt.py,sha256=EmnLW_WpAVgq6PK7JvyHAxLaTh-Pc0zD8atDmwd_iR8,38471
6
+ airflow/providers/dbt/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
+ airflow/providers/dbt/cloud/operators/dbt.py,sha256=jQorRwZoFFhbJEXyECFC9lpxs4s0OcAQ1-Eao6iVDXQ,18346
8
+ airflow/providers/dbt/cloud/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
9
+ airflow/providers/dbt/cloud/sensors/dbt.py,sha256=dwIq6pHBlwQWFMGOyUUjOYd_PJLDmQcNC9ng5j-pUOM,5268
10
+ airflow/providers/dbt/cloud/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
11
+ airflow/providers/dbt/cloud/triggers/dbt.py,sha256=p6-LU2GPpEZ2DtL4ZTkG7OIVAFCblKYbjV9z49ooltI,4952
12
+ airflow/providers/dbt/cloud/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
13
+ airflow/providers/dbt/cloud/utils/openlineage.py,sha256=tg1yqGWgwqGDYpe79CSGSdLE-_5ADubtWVxrjqm1De8,8635
14
+ apache_airflow_providers_dbt_cloud-4.6.3.dist-info/entry_points.txt,sha256=c18L1WEEK18WQeEGrm9kMVqutiYJHiWGH5jU1JqnToE,105
15
+ apache_airflow_providers_dbt_cloud-4.6.3.dist-info/licenses/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
16
+ apache_airflow_providers_dbt_cloud-4.6.3.dist-info/licenses/NOTICE,sha256=_cWHznIoUSbLCY_KfmKqetlKlsoH0c2VBjmZjElAzuc,168
17
+ apache_airflow_providers_dbt_cloud-4.6.3.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
18
+ apache_airflow_providers_dbt_cloud-4.6.3.dist-info/METADATA,sha256=2QcqbQw8mo6KmqaccXLPe7f3qTskR_-gGg8j3Q4MKN8,6198
19
+ apache_airflow_providers_dbt_cloud-4.6.3.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2026 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,18 +0,0 @@
1
- airflow/providers/dbt/cloud/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
- airflow/providers/dbt/cloud/__init__.py,sha256=W4cZ3hUYJLy1BNH2L-IsRndS3FN2pW2xA5z-n-BRQmY,1498
3
- airflow/providers/dbt/cloud/get_provider_info.py,sha256=ufODYanp90_NPW1UftzGIwwG65Olb5l5PfSp0Mo1SOs,2507
4
- airflow/providers/dbt/cloud/version_compat.py,sha256=RQbdCueLOaFZWekpQmF0BoAoJInW8EoyvJ3Ah-HbrPo,1577
5
- airflow/providers/dbt/cloud/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
6
- airflow/providers/dbt/cloud/hooks/dbt.py,sha256=uIzeZ5RlpubgNWNiQCcb57R2uTeTqW3R4rlD3ftFdSQ,38454
7
- airflow/providers/dbt/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
8
- airflow/providers/dbt/cloud/operators/dbt.py,sha256=YAdnm_FT6cq9iprIrSmpAzgd_O240VJqNs51zExSak4,18389
9
- airflow/providers/dbt/cloud/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
10
- airflow/providers/dbt/cloud/sensors/dbt.py,sha256=NigUphQQPjIy4ySsrFL8xJHVfgoOWKoyhRFMspuf6_4,5172
11
- airflow/providers/dbt/cloud/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
12
- airflow/providers/dbt/cloud/triggers/dbt.py,sha256=p6-LU2GPpEZ2DtL4ZTkG7OIVAFCblKYbjV9z49ooltI,4952
13
- airflow/providers/dbt/cloud/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
14
- airflow/providers/dbt/cloud/utils/openlineage.py,sha256=uUgIF6GyVMLY2G1g6TP3ai2C22avap5nESDJ5TeJMFw,8042
15
- apache_airflow_providers_dbt_cloud-4.4.4rc1.dist-info/entry_points.txt,sha256=c18L1WEEK18WQeEGrm9kMVqutiYJHiWGH5jU1JqnToE,105
16
- apache_airflow_providers_dbt_cloud-4.4.4rc1.dist-info/WHEEL,sha256=G2gURzTEtmeR8nrdXUJfNiB3VYVxigPQ-bEQujpNiNs,82
17
- apache_airflow_providers_dbt_cloud-4.4.4rc1.dist-info/METADATA,sha256=n4yy9MqBpKl-VRLt2_xbI4VUtfKLGCq9faXmhD2VFhA,6211
18
- apache_airflow_providers_dbt_cloud-4.4.4rc1.dist-info/RECORD,,