apache-airflow-providers-dbt-cloud 4.5.0__tar.gz → 4.6.2rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/PKG-INFO +11 -11
  2. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/README.rst +5 -5
  3. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/changelog.rst +48 -3
  4. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/index.rst +7 -7
  5. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/provider.yaml +4 -1
  6. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/pyproject.toml +6 -6
  7. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/__init__.py +3 -3
  8. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/hooks/dbt.py +1 -1
  9. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/operators/dbt.py +1 -1
  10. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/sensors/dbt.py +2 -3
  11. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/utils/openlineage.py +47 -32
  12. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/hooks/test_dbt.py +1 -1
  13. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/operators/test_dbt.py +5 -4
  14. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/sensors/test_dbt.py +1 -4
  15. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/utils/test_openlineage.py +37 -6
  16. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/LICENSE +0 -0
  17. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/NOTICE +0 -0
  18. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/.latest-doc-only-change.txt +0 -0
  19. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/commits.rst +0 -0
  20. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/conf.py +0 -0
  21. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/connections.rst +0 -0
  22. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/installing-providers-from-sources.rst +0 -0
  23. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/integration-logos/dbt.png +0 -0
  24. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/operators.rst +0 -0
  25. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/docs/security.rst +0 -0
  26. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/__init__.py +0 -0
  27. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/__init__.py +0 -0
  28. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/__init__.py +0 -0
  29. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/get_provider_info.py +0 -0
  30. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/hooks/__init__.py +0 -0
  31. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/operators/__init__.py +0 -0
  32. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/sensors/__init__.py +0 -0
  33. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/triggers/__init__.py +0 -0
  34. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/triggers/dbt.py +0 -0
  35. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/utils/__init__.py +0 -0
  36. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/src/airflow/providers/dbt/cloud/version_compat.py +0 -0
  37. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/conftest.py +0 -0
  38. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/system/__init__.py +0 -0
  39. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/system/dbt/__init__.py +0 -0
  40. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/system/dbt/cloud/__init__.py +0 -0
  41. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/system/dbt/cloud/example_dbt_cloud.py +0 -0
  42. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/__init__.py +0 -0
  43. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/__init__.py +0 -0
  44. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/__init__.py +0 -0
  45. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/hooks/__init__.py +0 -0
  46. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/operators/__init__.py +0 -0
  47. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/sensors/__init__.py +0 -0
  48. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/test_data/__init__.py +0 -0
  49. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/test_data/catalog.json +0 -0
  50. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/test_data/job_run.json +0 -0
  51. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/test_data/manifest.json +0 -0
  52. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/test_data/run_results.json +0 -0
  53. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/triggers/__init__.py +0 -0
  54. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/triggers/test_dbt.py +0 -0
  55. {apache_airflow_providers_dbt_cloud-4.5.0 → apache_airflow_providers_dbt_cloud-4.6.2rc1}/tests/unit/dbt/cloud/utils/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 4.5.0
3
+ Version: 4.6.2rc1
4
4
  Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
5
  Keywords: airflow-provider,dbt.cloud,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -22,16 +22,16 @@ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
23
  License-File: LICENSE
24
24
  License-File: NOTICE
25
- Requires-Dist: apache-airflow>=2.10.0
26
- Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
25
+ Requires-Dist: apache-airflow>=2.11.0rc1
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.10.1rc1
27
27
  Requires-Dist: apache-airflow-providers-http
28
28
  Requires-Dist: asgiref>=2.3.0
29
29
  Requires-Dist: aiohttp>=3.9.2
30
30
  Requires-Dist: tenacity>=8.3.0
31
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
31
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
32
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
33
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html
34
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0
33
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.2/changelog.html
34
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.2
35
35
  Project-URL: Mastodon, https://fosstodon.org/@airflow
36
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
37
37
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -63,7 +63,7 @@ Provides-Extra: openlineage
63
63
 
64
64
  Package ``apache-airflow-providers-dbt-cloud``
65
65
 
66
- Release: ``4.5.0``
66
+ Release: ``4.6.2``
67
67
 
68
68
 
69
69
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -76,7 +76,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
76
76
  are in ``airflow.providers.dbt.cloud`` python package.
77
77
 
78
78
  You can find package information and changelog for the provider
79
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.2/>`_.
80
80
 
81
81
  Installation
82
82
  ------------
@@ -93,8 +93,8 @@ Requirements
93
93
  ========================================== ==================
94
94
  PIP package Version required
95
95
  ========================================== ==================
96
- ``apache-airflow`` ``>=2.10.0``
97
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
96
+ ``apache-airflow`` ``>=2.11.0``
97
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
98
98
  ``apache-airflow-providers-http``
99
99
  ``asgiref`` ``>=2.3.0``
100
100
  ``aiohttp`` ``>=3.9.2``
@@ -132,5 +132,5 @@ Extra Dependencies
132
132
  =============== ===============================================
133
133
 
134
134
  The changelog for the provider package can be found in the
135
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html>`_.
135
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.2/changelog.html>`_.
136
136
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-dbt-cloud``
25
25
 
26
- Release: ``4.5.0``
26
+ Release: ``4.6.2``
27
27
 
28
28
 
29
29
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
36
36
  are in ``airflow.providers.dbt.cloud`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.2/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -53,8 +53,8 @@ Requirements
53
53
  ========================================== ==================
54
54
  PIP package Version required
55
55
  ========================================== ==================
56
- ``apache-airflow`` ``>=2.10.0``
57
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
56
+ ``apache-airflow`` ``>=2.11.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
58
58
  ``apache-airflow-providers-http``
59
59
  ``asgiref`` ``>=2.3.0``
60
60
  ``aiohttp`` ``>=3.9.2``
@@ -92,4 +92,4 @@ Extra Dependencies
92
92
  =============== ===============================================
93
93
 
94
94
  The changelog for the provider package can be found in the
95
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html>`_.
95
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.2/changelog.html>`_.
@@ -28,6 +28,52 @@
28
28
  Changelog
29
29
  ---------
30
30
 
31
+ 4.6.2
32
+ .....
33
+
34
+ Misc
35
+ ~~~~
36
+
37
+ * ``Remove top-level SDK reference in Core (#59817)``
38
+
39
+ .. Below changes are excluded from the changelog. Move them to
40
+ appropriate section above if needed. Do not delete the lines(!):
41
+ * ``TaskInstance unused method cleanup (#59835)``
42
+
43
+ 4.6.1
44
+ .....
45
+
46
+ Misc
47
+ ~~~~
48
+
49
+ * ``chore: use OL macros instead of building OL ids from scratch (#59197)``
50
+ * ``Add backcompat for exceptions in providers (#58727)``
51
+
52
+ Doc-only
53
+ ~~~~~~~~
54
+
55
+ * ``CHG: fix address (#59193)``
56
+
57
+ .. Below changes are excluded from the changelog. Move them to
58
+ appropriate section above if needed. Do not delete the lines(!):
59
+
60
+ 4.6.0
61
+ .....
62
+
63
+ .. note::
64
+ This release of provider is only available for Airflow 2.11+ as explained in the
65
+ Apache Airflow providers support policy <https://github.com/apache/airflow/blob/main/PROVIDERS.rst#minimum-supported-version-of-airflow-for-community-managed-providers>_.
66
+
67
+ Misc
68
+ ~~~~
69
+
70
+ * ``Bump minimum Airflow version in providers to Airflow 2.11.0 (#58612)``
71
+
72
+ .. Below changes are excluded from the changelog. Move them to
73
+ appropriate section above if needed. Do not delete the lines(!):
74
+ * ``Updates to release process of providers (#58316)``
75
+ * ``Bump min version of openlineage libraries to 1.40.0 to fix compat issues (#58302)``
76
+
31
77
  4.5.0
32
78
  .....
33
79
 
@@ -86,8 +132,6 @@ Doc-only
86
132
  .. Below changes are excluded from the changelog. Move them to
87
133
  appropriate section above if needed. Do not delete the lines(!):
88
134
  * ``Switch pre-commit to prek (#54258)``
89
-
90
- .. Review and move the new changes to one of the sections above:
91
135
  * ``Fix Airflow 2 reference in README/index of providers (#55240)``
92
136
 
93
137
  4.4.2
@@ -548,7 +592,8 @@ Misc
548
592
  * ``Remove some useless try/except from providers code (#33967)``
549
593
  * ``Use a single statement with multiple contexts instead of nested statements in providers (#33768)``
550
594
 
551
- .. Review and move the new changes to one of the sections above:
595
+ .. Below changes are excluded from the changelog. Move them to
596
+ appropriate section above if needed. Do not delete the lines(!):
552
597
  * ``Prepare docs for 09 2023 - 1st wave of Providers (#34201)``
553
598
 
554
599
  3.2.3
@@ -60,7 +60,7 @@ an Integrated Developer Environment (IDE).
60
60
  :maxdepth: 1
61
61
  :caption: Resources
62
62
 
63
- Example Dags <https://github.com/apache/airflow/tree/providers-dbt-cloud/|version|/providers/dbt/tests/system/dbt/cloud/example_dbt_cloud.py>
63
+ Example Dags <https://github.com/apache/airflow/tree/providers-dbt-cloud/|version|/providers/dbt/cloud/tests/system/dbt/cloud/example_dbt_cloud.py>
64
64
  PyPI Repository <https://pypi.org/project/apache-airflow-providers-dbt-cloud/>
65
65
  Installing from sources <installing-providers-from-sources>
66
66
 
@@ -81,7 +81,7 @@ apache-airflow-providers-dbt-cloud package
81
81
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
82
82
 
83
83
 
84
- Release: 4.5.0
84
+ Release: 4.6.2
85
85
 
86
86
  Provider package
87
87
  ----------------
@@ -99,13 +99,13 @@ For the minimum Airflow version supported, see ``Requirements`` below.
99
99
  Requirements
100
100
  ------------
101
101
 
102
- The minimum Apache Airflow version supported by this provider distribution is ``2.10.0``.
102
+ The minimum Apache Airflow version supported by this provider distribution is ``2.11.0``.
103
103
 
104
104
  ========================================== ==================
105
105
  PIP package Version required
106
106
  ========================================== ==================
107
- ``apache-airflow`` ``>=2.10.0``
108
- ``apache-airflow-providers-common-compat`` ``>=1.8.0``
107
+ ``apache-airflow`` ``>=2.11.0``
108
+ ``apache-airflow-providers-common-compat`` ``>=1.10.1``
109
109
  ``apache-airflow-providers-http``
110
110
  ``asgiref`` ``>=2.3.0``
111
111
  ``aiohttp`` ``>=3.9.2``
@@ -139,5 +139,5 @@ Downloading official packages
139
139
  You can download officially released packages and verify their checksums and signatures from the
140
140
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
141
141
 
142
- * `The apache-airflow-providers-dbt-cloud 4.5.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz.sha512>`__)
143
- * `The apache-airflow-providers-dbt-cloud 4.5.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl.sha512>`__)
142
+ * `The apache-airflow-providers-dbt-cloud 4.6.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.6.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.6.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.6.2.tar.gz.sha512>`__)
143
+ * `The apache-airflow-providers-dbt-cloud 4.6.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.6.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.6.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.6.2-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,15 @@ description: |
22
22
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1763069126
25
+ source-date-epoch: 1767124372
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 4.6.2
32
+ - 4.6.1
33
+ - 4.6.0
31
34
  - 4.5.0
32
35
  - 4.4.4
33
36
  - 4.4.3
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-dbt-cloud"
28
- version = "4.5.0"
28
+ version = "4.6.2rc1"
29
29
  description = "Provider package apache-airflow-providers-dbt-cloud for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  license = "Apache-2.0"
@@ -58,8 +58,8 @@ requires-python = ">=3.10"
58
58
  # Make sure to run ``prek update-providers-dependencies --all-files``
59
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
60
60
  dependencies = [
61
- "apache-airflow>=2.10.0",
62
- "apache-airflow-providers-common-compat>=1.8.0",
61
+ "apache-airflow>=2.11.0rc1",
62
+ "apache-airflow-providers-common-compat>=1.10.1rc1",
63
63
  "apache-airflow-providers-http",
64
64
  "asgiref>=2.3.0",
65
65
  "aiohttp>=3.9.2",
@@ -71,7 +71,7 @@ dependencies = [
71
71
  [project.optional-dependencies]
72
72
  # pip install apache-airflow-providers-dbt-cloud[openlineage]
73
73
  "openlineage" = [
74
- "apache-airflow-providers-openlineage>=2.3.0",
74
+ "apache-airflow-providers-openlineage>=2.3.0rc1",
75
75
  ]
76
76
 
77
77
  [dependency-groups]
@@ -111,8 +111,8 @@ apache-airflow-providers-common-sql = {workspace = true}
111
111
  apache-airflow-providers-standard = {workspace = true}
112
112
 
113
113
  [project.urls]
114
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0"
115
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html"
114
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.2"
115
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.6.2/changelog.html"
116
116
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
117
117
  "Source Code" = "https://github.com/apache/airflow"
118
118
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,11 +29,11 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.5.0"
32
+ __version__ = "4.6.2"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
- "2.10.0"
35
+ "2.11.0"
36
36
  ):
37
37
  raise RuntimeError(
38
- f"The package `apache-airflow-providers-dbt-cloud:{__version__}` needs Apache Airflow 2.10.0+"
38
+ f"The package `apache-airflow-providers-dbt-cloud:{__version__}` needs Apache Airflow 2.11.0+"
39
39
  )
@@ -34,7 +34,7 @@ from requests.auth import AuthBase
34
34
  from requests.sessions import Session
35
35
  from tenacity import AsyncRetrying, RetryCallState, retry_if_exception, stop_after_attempt, wait_exponential
36
36
 
37
- from airflow.exceptions import AirflowException
37
+ from airflow.providers.common.compat.sdk import AirflowException
38
38
  from airflow.providers.http.hooks.http import HttpHook
39
39
 
40
40
  if TYPE_CHECKING:
@@ -36,7 +36,7 @@ from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_e
36
36
 
37
37
  if TYPE_CHECKING:
38
38
  from airflow.providers.openlineage.extractors import OperatorLineage
39
- from airflow.utils.context import Context
39
+ from airflow.sdk import Context
40
40
 
41
41
 
42
42
  class DbtCloudRunJobOperatorLink(BaseOperatorLink):
@@ -21,15 +21,14 @@ from functools import cached_property
21
21
  from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
- from airflow.exceptions import AirflowException
25
- from airflow.providers.common.compat.sdk import BaseSensorOperator
24
+ from airflow.providers.common.compat.sdk import AirflowException, BaseSensorOperator
26
25
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
27
26
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
28
27
  from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_events_from_dbt_cloud_run
29
28
 
30
29
  if TYPE_CHECKING:
31
30
  from airflow.providers.openlineage.extractors import OperatorLineage
32
- from airflow.utils.context import Context
31
+ from airflow.sdk import Context
33
32
 
34
33
 
35
34
  class DbtCloudJobRunSensor(BaseSensorOperator):
@@ -56,7 +56,48 @@ def _get_dag_run_clear_number(task_instance):
56
56
  return task_instance.dag_run.clear_number
57
57
 
58
58
 
59
- @require_openlineage_version(provider_min_version="2.3.0")
59
+ def _get_parent_run_metadata(task_instance):
60
+ """
61
+ Retrieve the ParentRunMetadata associated with a specific Airflow task instance.
62
+
63
+ This metadata helps link OpenLineage events of child jobs to the original Airflow task execution.
64
+ Establishing this connection enables better lineage tracking and observability.
65
+ """
66
+ from openlineage.common.provider.dbt import ParentRunMetadata
67
+
68
+ from airflow.providers.openlineage.plugins.macros import (
69
+ lineage_job_name,
70
+ lineage_job_namespace,
71
+ lineage_root_job_name,
72
+ lineage_root_run_id,
73
+ lineage_run_id,
74
+ )
75
+
76
+ parent_run_id = lineage_run_id(task_instance)
77
+ parent_job_name = lineage_job_name(task_instance)
78
+ parent_job_namespace = lineage_job_namespace()
79
+
80
+ root_parent_run_id = lineage_root_run_id(task_instance)
81
+ rot_parent_job_name = lineage_root_job_name(task_instance)
82
+
83
+ try: # Added in OL provider 2.9.0, try to use it if possible
84
+ from airflow.providers.openlineage.plugins.macros import lineage_root_job_namespace
85
+
86
+ root_parent_job_namespace = lineage_root_job_namespace(task_instance)
87
+ except ImportError:
88
+ root_parent_job_namespace = lineage_job_namespace()
89
+
90
+ return ParentRunMetadata(
91
+ run_id=parent_run_id,
92
+ job_name=parent_job_name,
93
+ job_namespace=parent_job_namespace,
94
+ root_parent_run_id=root_parent_run_id,
95
+ root_parent_job_name=rot_parent_job_name,
96
+ root_parent_job_namespace=root_parent_job_namespace,
97
+ )
98
+
99
+
100
+ @require_openlineage_version(provider_min_version="2.5.0")
60
101
  def generate_openlineage_events_from_dbt_cloud_run(
61
102
  operator: DbtCloudRunJobOperator | DbtCloudJobRunSensor, task_instance: TaskInstance
62
103
  ) -> OperatorLineage:
@@ -74,14 +115,10 @@ def generate_openlineage_events_from_dbt_cloud_run(
74
115
 
75
116
  :return: An empty OperatorLineage object indicating the completion of events generation.
76
117
  """
77
- from openlineage.common.provider.dbt import DbtCloudArtifactProcessor, ParentRunMetadata
118
+ from openlineage.common.provider.dbt import DbtCloudArtifactProcessor
78
119
 
79
- from airflow.providers.openlineage.conf import namespace
80
120
  from airflow.providers.openlineage.extractors import OperatorLineage
81
- from airflow.providers.openlineage.plugins.adapter import (
82
- _PRODUCER,
83
- OpenLineageAdapter,
84
- )
121
+ from airflow.providers.openlineage.plugins.adapter import _PRODUCER
85
122
  from airflow.providers.openlineage.plugins.listener import get_openlineage_listener
86
123
 
87
124
  # if no account_id set this will fallback
@@ -140,29 +177,7 @@ def generate_openlineage_events_from_dbt_cloud_run(
140
177
  )
141
178
 
142
179
  log.debug("Preparing OpenLineage parent job information to be included in DBT events.")
143
- # generate same run id of current task instance
144
- parent_run_id = OpenLineageAdapter.build_task_instance_run_id(
145
- dag_id=task_instance.dag_id,
146
- task_id=operator.task_id,
147
- logical_date=_get_logical_date(task_instance),
148
- try_number=task_instance.try_number,
149
- map_index=task_instance.map_index,
150
- )
151
-
152
- root_parent_run_id = OpenLineageAdapter.build_dag_run_id(
153
- dag_id=task_instance.dag_id,
154
- logical_date=_get_logical_date(task_instance),
155
- clear_number=_get_dag_run_clear_number(task_instance),
156
- )
157
-
158
- parent_job = ParentRunMetadata(
159
- run_id=parent_run_id,
160
- job_name=f"{task_instance.dag_id}.{task_instance.task_id}",
161
- job_namespace=namespace(),
162
- root_parent_run_id=root_parent_run_id,
163
- root_parent_job_name=task_instance.dag_id,
164
- root_parent_job_namespace=namespace(),
165
- )
180
+ parent_metadata = _get_parent_run_metadata(task_instance)
166
181
  adapter = get_openlineage_listener().adapter
167
182
 
168
183
  # process each step in loop, sending generated events in the same order as steps
@@ -178,7 +193,7 @@ def generate_openlineage_events_from_dbt_cloud_run(
178
193
 
179
194
  processor = DbtCloudArtifactProcessor(
180
195
  producer=_PRODUCER,
181
- job_namespace=namespace(),
196
+ job_namespace=parent_metadata.job_namespace,
182
197
  skip_errors=False,
183
198
  logger=operator.log,
184
199
  manifest=manifest,
@@ -187,7 +202,7 @@ def generate_openlineage_events_from_dbt_cloud_run(
187
202
  catalog=catalog,
188
203
  )
189
204
 
190
- processor.dbt_run_metadata = parent_job
205
+ processor.dbt_run_metadata = parent_metadata
191
206
 
192
207
  events = processor.parse().events()
193
208
  log.debug("Found %s OpenLineage events for artifact no. %s.", len(events), counter)
@@ -27,8 +27,8 @@ import pytest
27
27
  from requests import exceptions as requests_exceptions
28
28
  from requests.models import Response
29
29
 
30
- from airflow.exceptions import AirflowException
31
30
  from airflow.models.connection import Connection
31
+ from airflow.providers.common.compat.sdk import AirflowException
32
32
  from airflow.providers.dbt.cloud.hooks.dbt import (
33
33
  DBT_CAUSE_MAX_LENGTH,
34
34
  DbtCloudHook,
@@ -22,9 +22,8 @@ from unittest.mock import MagicMock, patch
22
22
 
23
23
  import pytest
24
24
 
25
- from airflow.exceptions import TaskDeferred
26
25
  from airflow.models import DAG, Connection
27
- from airflow.providers.common.compat.sdk import timezone
26
+ from airflow.providers.common.compat.sdk import TaskDeferred, timezone
28
27
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
29
28
  from airflow.providers.dbt.cloud.operators.dbt import (
30
29
  DbtCloudGetJobRunArtifactOperator,
@@ -648,7 +647,7 @@ class TestDbtCloudRunJobOperator:
648
647
  )
649
648
  @pytest.mark.db_test
650
649
  def test_run_job_operator_link(
651
- self, conn_id, account_id, create_task_instance_of_operator, request, mock_supervisor_comms
650
+ self, conn_id, account_id, dag_maker, create_task_instance_of_operator, request, mock_supervisor_comms
652
651
  ):
653
652
  ti = create_task_instance_of_operator(
654
653
  DbtCloudRunJobOperator,
@@ -675,7 +674,9 @@ class TestDbtCloudRunJobOperator:
675
674
  run_id=_run_response["data"]["id"],
676
675
  ),
677
676
  )
678
- url = ti.task.operator_extra_links[0].get_link(operator=ti.task, ti_key=ti.key)
677
+
678
+ task = dag_maker.dag.get_task(ti.task_id)
679
+ url = task.operator_extra_links[0].get_link(operator=ti.task, ti_key=ti.key)
679
680
 
680
681
  assert url == (
681
682
  EXPECTED_JOB_RUN_OP_EXTRA_LINK.format(
@@ -21,11 +21,8 @@ from unittest.mock import patch
21
21
 
22
22
  import pytest
23
23
 
24
- from airflow.exceptions import (
25
- AirflowException,
26
- TaskDeferred,
27
- )
28
24
  from airflow.models.connection import Connection
25
+ from airflow.providers.common.compat.sdk import AirflowException, TaskDeferred
29
26
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
30
27
  from airflow.providers.dbt.cloud.sensors.dbt import DbtCloudJobRunSensor
31
28
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
@@ -21,14 +21,20 @@ from pathlib import Path
21
21
  from unittest.mock import MagicMock, patch
22
22
 
23
23
  import pytest
24
- from openlineage.common import __version__
24
+ from openlineage.client.constants import __version__
25
25
  from packaging.version import parse
26
26
 
27
27
  from airflow.exceptions import AirflowOptionalProviderFeatureException
28
28
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook
29
29
  from airflow.providers.dbt.cloud.operators.dbt import DbtCloudRunJobOperator
30
- from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_events_from_dbt_cloud_run
30
+ from airflow.providers.dbt.cloud.utils.openlineage import (
31
+ _get_parent_run_metadata,
32
+ generate_openlineage_events_from_dbt_cloud_run,
33
+ )
34
+ from airflow.providers.openlineage.conf import namespace
31
35
  from airflow.providers.openlineage.extractors import OperatorLineage
36
+ from airflow.utils import timezone
37
+ from airflow.utils.state import TaskInstanceState
32
38
 
33
39
  TASK_ID = "dbt_test"
34
40
  DAG_ID = "dbt_dag"
@@ -94,12 +100,13 @@ def get_dbt_artifact(*args, **kwargs):
94
100
  [
95
101
  ("1.99.0", True),
96
102
  ("2.0.0", True),
97
- ("2.3.0", False),
103
+ ("2.3.0", True),
104
+ ("2.5.0", False),
98
105
  ("2.99.0", False),
99
106
  ],
100
107
  )
101
108
  def test_previous_version_openlineage_provider(value, is_error):
102
- """When using OpenLineage, the dbt-cloud provider now depends on openlineage provider >= 2.3"""
109
+ """When using OpenLineage, the dbt-cloud provider now depends on openlineage provider >= 2.4"""
103
110
 
104
111
  def _mock_version(package):
105
112
  if package == "apache-airflow-providers-openlineage":
@@ -110,7 +117,7 @@ def test_previous_version_openlineage_provider(value, is_error):
110
117
  mock_task_instance = MagicMock()
111
118
 
112
119
  expected_err = (
113
- f"OpenLineage provider version `{value}` is lower than required `2.3.0`, "
120
+ f"OpenLineage provider version `{value}` is lower than required `2.5.0`, "
114
121
  "skipping function `generate_openlineage_events_from_dbt_cloud_run` execution"
115
122
  )
116
123
 
@@ -126,8 +133,32 @@ def test_previous_version_openlineage_provider(value, is_error):
126
133
  generate_openlineage_events_from_dbt_cloud_run(mock_operator, mock_task_instance)
127
134
 
128
135
 
136
+ def test_get_parent_run_metadata():
137
+ logical_date = timezone.datetime(2025, 1, 1)
138
+ dr = MagicMock(logical_date=logical_date, clear_number=0)
139
+ mock_ti = MagicMock(
140
+ dag_id="dag_id",
141
+ task_id="task_id",
142
+ map_index=1,
143
+ try_number=1,
144
+ logical_date=logical_date,
145
+ state=TaskInstanceState.SUCCESS,
146
+ dag_run=dr,
147
+ )
148
+ mock_ti.get_template_context.return_value = {"dag_run": dr}
149
+
150
+ result = _get_parent_run_metadata(mock_ti)
151
+
152
+ assert result.run_id == "01941f29-7c00-7087-8906-40e512c257bd"
153
+ assert result.job_namespace == namespace()
154
+ assert result.job_name == "dag_id.task_id"
155
+ assert result.root_parent_run_id == "01941f29-7c00-743e-b109-28b18d0a19c5"
156
+ assert result.root_parent_job_namespace == namespace()
157
+ assert result.root_parent_job_name == "dag_id"
158
+
159
+
129
160
  class TestGenerateOpenLineageEventsFromDbtCloudRun:
130
- @patch("importlib.metadata.version", return_value="2.3.0")
161
+ @patch("importlib.metadata.version", return_value="3.0.0")
131
162
  @patch("airflow.providers.openlineage.plugins.listener.get_openlineage_listener")
132
163
  @patch("airflow.providers.openlineage.plugins.adapter.OpenLineageAdapter.build_task_instance_run_id")
133
164
  @patch("airflow.providers.openlineage.plugins.adapter.OpenLineageAdapter.build_dag_run_id")