apache-airflow-providers-dbt-cloud 4.4.2rc1__tar.gz → 4.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. apache_airflow_providers_dbt_cloud-4.5.0/NOTICE +5 -0
  2. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/PKG-INFO +25 -13
  3. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/README.rst +15 -6
  4. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/changelog.rst +62 -0
  5. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/index.rst +7 -8
  6. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/operators.rst +2 -2
  7. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/provider.yaml +4 -1
  8. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/pyproject.toml +10 -8
  9. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/__init__.py +1 -1
  10. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/hooks/dbt.py +106 -14
  11. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/operators/dbt.py +17 -10
  12. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/sensors/dbt.py +5 -8
  13. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/triggers/dbt.py +5 -1
  14. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/version_compat.py +0 -12
  15. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/hooks/test_dbt.py +281 -36
  16. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/operators/test_dbt.py +17 -17
  17. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/sensors/test_dbt.py +3 -1
  18. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/triggers/test_dbt.py +8 -6
  19. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/utils/test_openlineage.py +2 -1
  20. {apache_airflow_providers_dbt_cloud-4.4.2rc1/src/airflow/providers/dbt/cloud → apache_airflow_providers_dbt_cloud-4.5.0}/LICENSE +0 -0
  21. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/.latest-doc-only-change.txt +0 -0
  22. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/commits.rst +0 -0
  23. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/conf.py +0 -0
  24. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/connections.rst +0 -0
  25. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/installing-providers-from-sources.rst +0 -0
  26. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/integration-logos/dbt.png +0 -0
  27. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/security.rst +0 -0
  28. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/__init__.py +0 -0
  29. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/__init__.py +0 -0
  30. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/__init__.py +0 -0
  31. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/get_provider_info.py +0 -0
  32. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/hooks/__init__.py +0 -0
  33. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/operators/__init__.py +0 -0
  34. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/sensors/__init__.py +0 -0
  35. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/triggers/__init__.py +0 -0
  36. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/utils/__init__.py +0 -0
  37. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/utils/openlineage.py +0 -0
  38. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/conftest.py +0 -0
  39. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/system/__init__.py +0 -0
  40. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/system/dbt/__init__.py +0 -0
  41. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/system/dbt/cloud/__init__.py +0 -0
  42. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/system/dbt/cloud/example_dbt_cloud.py +0 -0
  43. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/__init__.py +0 -0
  44. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/__init__.py +0 -0
  45. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/__init__.py +0 -0
  46. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/hooks/__init__.py +0 -0
  47. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/operators/__init__.py +0 -0
  48. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/sensors/__init__.py +0 -0
  49. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/__init__.py +0 -0
  50. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/catalog.json +0 -0
  51. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/job_run.json +0 -0
  52. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/manifest.json +0 -0
  53. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/run_results.json +0 -0
  54. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/triggers/__init__.py +0 -0
  55. {apache_airflow_providers_dbt_cloud-4.4.2rc1 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/utils/__init__.py +0 -0
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2025 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 4.4.2rc1
3
+ Version: 4.5.0
4
4
  Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
5
  Keywords: airflow-provider,dbt.cloud,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
8
  Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,21 +15,23 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
18
  Classifier: Programming Language :: Python :: 3.10
19
19
  Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0rc1
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.6.0rc1
23
+ License-File: LICENSE
24
+ License-File: NOTICE
25
+ Requires-Dist: apache-airflow>=2.10.0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
25
27
  Requires-Dist: apache-airflow-providers-http
26
28
  Requires-Dist: asgiref>=2.3.0
27
29
  Requires-Dist: aiohttp>=3.9.2
28
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
30
+ Requires-Dist: tenacity>=8.3.0
31
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
29
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.2/changelog.html
31
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.2
33
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html
34
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0
32
35
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
37
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -60,9 +63,8 @@ Provides-Extra: openlineage
60
63
 
61
64
  Package ``apache-airflow-providers-dbt-cloud``
62
65
 
63
- Release: ``4.4.2``
66
+ Release: ``4.5.0``
64
67
 
65
- Release Date: ``|PypiReleaseDate|``
66
68
 
67
69
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
68
70
 
@@ -74,12 +76,12 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
74
76
  are in ``airflow.providers.dbt.cloud`` python package.
75
77
 
76
78
  You can find package information and changelog for the provider
77
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.2/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/>`_.
78
80
 
79
81
  Installation
80
82
  ------------
81
83
 
82
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
84
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
83
85
  for the minimum Airflow version supported) via
84
86
  ``pip install apache-airflow-providers-dbt-cloud``
85
87
 
@@ -92,10 +94,11 @@ Requirements
92
94
  PIP package Version required
93
95
  ========================================== ==================
94
96
  ``apache-airflow`` ``>=2.10.0``
95
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
97
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
96
98
  ``apache-airflow-providers-http``
97
99
  ``asgiref`` ``>=2.3.0``
98
100
  ``aiohttp`` ``>=3.9.2``
101
+ ``tenacity`` ``>=8.3.0``
99
102
  ========================================== ==================
100
103
 
101
104
  Cross provider package dependencies
@@ -119,6 +122,15 @@ Dependent package
119
122
  `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
120
123
  ================================================================================================================== =================
121
124
 
125
+ Optional dependencies
126
+ ----------------------
127
+
128
+ =============== ===============================================
129
+ Extra Dependencies
130
+ =============== ===============================================
131
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
132
+ =============== ===============================================
133
+
122
134
  The changelog for the provider package can be found in the
123
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.2/changelog.html>`_.
135
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html>`_.
124
136
 
@@ -23,9 +23,8 @@
23
23
 
24
24
  Package ``apache-airflow-providers-dbt-cloud``
25
25
 
26
- Release: ``4.4.2``
26
+ Release: ``4.5.0``
27
27
 
28
- Release Date: ``|PypiReleaseDate|``
29
28
 
30
29
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
31
30
 
@@ -37,12 +36,12 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
37
36
  are in ``airflow.providers.dbt.cloud`` python package.
38
37
 
39
38
  You can find package information and changelog for the provider
40
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.2/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/>`_.
41
40
 
42
41
  Installation
43
42
  ------------
44
43
 
45
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
44
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
46
45
  for the minimum Airflow version supported) via
47
46
  ``pip install apache-airflow-providers-dbt-cloud``
48
47
 
@@ -55,10 +54,11 @@ Requirements
55
54
  PIP package Version required
56
55
  ========================================== ==================
57
56
  ``apache-airflow`` ``>=2.10.0``
58
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
59
58
  ``apache-airflow-providers-http``
60
59
  ``asgiref`` ``>=2.3.0``
61
60
  ``aiohttp`` ``>=3.9.2``
61
+ ``tenacity`` ``>=8.3.0``
62
62
  ========================================== ==================
63
63
 
64
64
  Cross provider package dependencies
@@ -82,5 +82,14 @@ Dependent package
82
82
  `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
83
83
  ================================================================================================================== =================
84
84
 
85
+ Optional dependencies
86
+ ----------------------
87
+
88
+ =============== ===============================================
89
+ Extra Dependencies
90
+ =============== ===============================================
91
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
92
+ =============== ===============================================
93
+
85
94
  The changelog for the provider package can be found in the
86
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.2/changelog.html>`_.
95
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html>`_.
@@ -28,6 +28,68 @@
28
28
  Changelog
29
29
  ---------
30
30
 
31
+ 4.5.0
32
+ .....
33
+
34
+ Features
35
+ ~~~~~~~~
36
+
37
+ * ``Fixes inconsistency where other dbt operators already supported hook_params (#57242)``
38
+
39
+ Misc
40
+ ~~~~
41
+
42
+ * ``Convert all airflow distributions to be compliant with ASF requirements (#58138)``
43
+
44
+ .. Below changes are excluded from the changelog. Move them to
45
+ appropriate section above if needed. Do not delete the lines(!):
46
+ * ``Delete all unnecessary LICENSE Files (#58191)``
47
+ * ``Enable PT006 rule to 14 files in providers (databricks,dbt,docker) (#57994)``
48
+
49
+ 4.4.4
50
+ .....
51
+
52
+ Bug Fixes
53
+ ~~~~~~~~~
54
+
55
+ * ``Add retry mechanism and error handling to DBT Hook (#56651)``
56
+
57
+ Misc
58
+ ~~~~
59
+
60
+ * ``Migrate dbt.cloud provider to ''common.compat'' (#56999)``
61
+
62
+ Doc-only
63
+ ~~~~~~~~
64
+
65
+ * ``Remove placeholder Release Date in changelog and index files (#56056)``
66
+
67
+ .. Below changes are excluded from the changelog. Move them to
68
+ appropriate section above if needed. Do not delete the lines(!):
69
+ * ``Enable PT011 rule to prvoider tests (#56320)``
70
+ * ``nit: Bump required OL client for Openlineage provider (#56302)``
71
+
72
+ 4.4.3
73
+ .....
74
+
75
+
76
+ Bug Fixes
77
+ ~~~~~~~~~
78
+
79
+ * ``Change operator DbtCloudRunJobOperator to send job_run_id to XCom (#55184)``
80
+
81
+ Doc-only
82
+ ~~~~~~~~
83
+
84
+ * ``Make term Dag consistent in providers docs (#55101)``
85
+
86
+ .. Below changes are excluded from the changelog. Move them to
87
+ appropriate section above if needed. Do not delete the lines(!):
88
+ * ``Switch pre-commit to prek (#54258)``
89
+
90
+ .. Review and move the new changes to one of the sections above:
91
+ * ``Fix Airflow 2 reference in README/index of providers (#55240)``
92
+
31
93
  4.4.2
32
94
  .....
33
95
 
@@ -60,7 +60,7 @@ an Integrated Developer Environment (IDE).
60
60
  :maxdepth: 1
61
61
  :caption: Resources
62
62
 
63
- Example DAGs <https://github.com/apache/airflow/tree/providers-dbt-cloud/|version|/providers/dbt/tests/system/dbt/cloud/example_dbt_cloud.py>
63
+ Example Dags <https://github.com/apache/airflow/tree/providers-dbt-cloud/|version|/providers/dbt/tests/system/dbt/cloud/example_dbt_cloud.py>
64
64
  PyPI Repository <https://pypi.org/project/apache-airflow-providers-dbt-cloud/>
65
65
  Installing from sources <installing-providers-from-sources>
66
66
 
@@ -81,9 +81,7 @@ apache-airflow-providers-dbt-cloud package
81
81
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
82
82
 
83
83
 
84
- Release: 4.4.2
85
-
86
- Release Date: ``|PypiReleaseDate|``
84
+ Release: 4.5.0
87
85
 
88
86
  Provider package
89
87
  ----------------
@@ -94,7 +92,7 @@ All classes for this package are included in the ``airflow.providers.dbt.cloud``
94
92
  Installation
95
93
  ------------
96
94
 
97
- You can install this package on top of an existing Airflow 2 installation via
95
+ You can install this package on top of an existing Airflow installation via
98
96
  ``pip install apache-airflow-providers-dbt-cloud``.
99
97
  For the minimum Airflow version supported, see ``Requirements`` below.
100
98
 
@@ -107,10 +105,11 @@ The minimum Apache Airflow version supported by this provider distribution is ``
107
105
  PIP package Version required
108
106
  ========================================== ==================
109
107
  ``apache-airflow`` ``>=2.10.0``
110
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
108
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
111
109
  ``apache-airflow-providers-http``
112
110
  ``asgiref`` ``>=2.3.0``
113
111
  ``aiohttp`` ``>=3.9.2``
112
+ ``tenacity`` ``>=8.3.0``
114
113
  ========================================== ==================
115
114
 
116
115
  Cross provider package dependencies
@@ -140,5 +139,5 @@ Downloading official packages
140
139
  You can download officially released packages and verify their checksums and signatures from the
141
140
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
142
141
 
143
- * `The apache-airflow-providers-dbt-cloud 4.4.2 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.2.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.2.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.2.tar.gz.sha512>`__)
144
- * `The apache-airflow-providers-dbt-cloud 4.4.2 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.2-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.2-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.2-py3-none-any.whl.sha512>`__)
142
+ * `The apache-airflow-providers-dbt-cloud 4.5.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz.sha512>`__)
143
+ * `The apache-airflow-providers-dbt-cloud 4.5.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl.sha512>`__)
@@ -65,7 +65,7 @@ configurations or overrides for the job run such as ``threads_override``, ``gene
65
65
 
66
66
  The below examples demonstrate how to instantiate DbtCloudRunJobOperator tasks with both synchronous and
67
67
  asynchronous waiting for run termination, respectively. To note, the ``account_id`` for the operators is
68
- referenced within the ``default_args`` of the example DAG.
68
+ referenced within the ``default_args`` of the example Dag.
69
69
 
70
70
  .. exampleinclude:: /../tests/system/dbt/cloud/example_dbt_cloud.py
71
71
  :language: python
@@ -104,7 +104,7 @@ functionality available with the :class:`~airflow.sensors.base.BaseSensorOperato
104
104
 
105
105
  In the example below, the ``run_id`` value in the example below comes from the output of a previous
106
106
  DbtCloudRunJobOperator task by utilizing the ``.output`` property exposed for all operators. Also, to note,
107
- the ``account_id`` for the task is referenced within the ``default_args`` of the example DAG.
107
+ the ``account_id`` for the task is referenced within the ``default_args`` of the example Dag.
108
108
 
109
109
  .. exampleinclude:: /../tests/system/dbt/cloud/example_dbt_cloud.py
110
110
  :language: python
@@ -22,12 +22,15 @@ description: |
22
22
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1753690386
25
+ source-date-epoch: 1763069126
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 4.5.0
32
+ - 4.4.4
33
+ - 4.4.3
31
34
  - 4.4.2
32
35
  - 4.4.1
33
36
  - 4.4.0
@@ -25,9 +25,11 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-dbt-cloud"
28
- version = "4.4.2rc1"
28
+ version = "4.5.0"
29
29
  description = "Provider package apache-airflow-providers-dbt-cloud for Apache Airflow"
30
30
  readme = "README.rst"
31
+ license = "Apache-2.0"
32
+ license-files = ['LICENSE', 'NOTICE']
31
33
  authors = [
32
34
  {name="Apache Software Foundation", email="dev@airflow.apache.org"},
33
35
  ]
@@ -43,7 +45,6 @@ classifiers = [
43
45
  "Intended Audience :: System Administrators",
44
46
  "Framework :: Apache Airflow",
45
47
  "Framework :: Apache Airflow :: Provider",
46
- "License :: OSI Approved :: Apache Software License",
47
48
  "Programming Language :: Python :: 3.10",
48
49
  "Programming Language :: Python :: 3.11",
49
50
  "Programming Language :: Python :: 3.12",
@@ -54,14 +55,15 @@ requires-python = ">=3.10"
54
55
 
55
56
  # The dependencies should be modified in place in the generated file.
56
57
  # Any change in the dependencies is preserved when the file is regenerated
57
- # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
+ # Make sure to run ``prek update-providers-dependencies --all-files``
58
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
60
  dependencies = [
60
- "apache-airflow>=2.10.0rc1",
61
- "apache-airflow-providers-common-compat>=1.6.0rc1",
61
+ "apache-airflow>=2.10.0",
62
+ "apache-airflow-providers-common-compat>=1.8.0",
62
63
  "apache-airflow-providers-http",
63
64
  "asgiref>=2.3.0",
64
65
  "aiohttp>=3.9.2",
66
+ "tenacity>=8.3.0",
65
67
  ]
66
68
 
67
69
  # The optional dependencies should be modified in place in the generated file
@@ -69,7 +71,7 @@ dependencies = [
69
71
  [project.optional-dependencies]
70
72
  # pip install apache-airflow-providers-dbt-cloud[openlineage]
71
73
  "openlineage" = [
72
- "apache-airflow-providers-openlineage>=2.3.0rc1",
74
+ "apache-airflow-providers-openlineage>=2.3.0",
73
75
  ]
74
76
 
75
77
  [dependency-groups]
@@ -109,8 +111,8 @@ apache-airflow-providers-common-sql = {workspace = true}
109
111
  apache-airflow-providers-standard = {workspace = true}
110
112
 
111
113
  [project.urls]
112
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.2"
113
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.2/changelog.html"
114
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0"
115
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html"
114
116
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
115
117
  "Source Code" = "https://github.com/apache/airflow"
116
118
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.4.2"
32
+ __version__ = "4.5.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -17,6 +17,7 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import asyncio
20
+ import copy
20
21
  import json
21
22
  import time
22
23
  import warnings
@@ -28,8 +29,10 @@ from typing import TYPE_CHECKING, Any, TypedDict, TypeVar, cast
28
29
 
29
30
  import aiohttp
30
31
  from asgiref.sync import sync_to_async
32
+ from requests import exceptions as requests_exceptions
31
33
  from requests.auth import AuthBase
32
34
  from requests.sessions import Session
35
+ from tenacity import AsyncRetrying, RetryCallState, retry_if_exception, stop_after_attempt, wait_exponential
33
36
 
34
37
  from airflow.exceptions import AirflowException
35
38
  from airflow.providers.http.hooks.http import HttpHook
@@ -174,6 +177,10 @@ class DbtCloudHook(HttpHook):
174
177
  Interact with dbt Cloud using the V2 (V3 if supported) API.
175
178
 
176
179
  :param dbt_cloud_conn_id: The ID of the :ref:`dbt Cloud connection <howto/connection:dbt-cloud>`.
180
+ :param timeout_seconds: Optional. The timeout in seconds for HTTP requests. If not provided, no timeout is applied.
181
+ :param retry_limit: The number of times to retry a request in case of failure.
182
+ :param retry_delay: The delay in seconds between retries.
183
+ :param retry_args: A dictionary of arguments to pass to the `tenacity.retry` decorator.
177
184
  """
178
185
 
179
186
  conn_name_attr = "dbt_cloud_conn_id"
@@ -193,9 +200,39 @@ class DbtCloudHook(HttpHook):
193
200
  },
194
201
  }
195
202
 
196
- def __init__(self, dbt_cloud_conn_id: str = default_conn_name, *args, **kwargs) -> None:
203
+ def __init__(
204
+ self,
205
+ dbt_cloud_conn_id: str = default_conn_name,
206
+ timeout_seconds: int | None = None,
207
+ retry_limit: int = 1,
208
+ retry_delay: float = 1.0,
209
+ retry_args: dict[Any, Any] | None = None,
210
+ ) -> None:
197
211
  super().__init__(auth_type=TokenAuth)
198
212
  self.dbt_cloud_conn_id = dbt_cloud_conn_id
213
+ self.timeout_seconds = timeout_seconds
214
+ if retry_limit < 1:
215
+ raise ValueError("Retry limit must be greater than or equal to 1")
216
+ self.retry_limit = retry_limit
217
+ self.retry_delay = retry_delay
218
+
219
+ def retry_after_func(retry_state: RetryCallState) -> None:
220
+ error_msg = str(retry_state.outcome.exception()) if retry_state.outcome else "Unknown error"
221
+ self._log_request_error(retry_state.attempt_number, error_msg)
222
+
223
+ if retry_args:
224
+ self.retry_args = copy.copy(retry_args)
225
+ self.retry_args["retry"] = retry_if_exception(self._retryable_error)
226
+ self.retry_args["after"] = retry_after_func
227
+ self.retry_args["reraise"] = True
228
+ else:
229
+ self.retry_args = {
230
+ "stop": stop_after_attempt(self.retry_limit),
231
+ "wait": wait_exponential(min=self.retry_delay, max=(2**retry_limit)),
232
+ "retry": retry_if_exception(self._retryable_error),
233
+ "after": retry_after_func,
234
+ "reraise": True,
235
+ }
199
236
 
200
237
  @staticmethod
201
238
  def _get_tenant_domain(conn: Connection) -> str:
@@ -233,6 +270,36 @@ class DbtCloudHook(HttpHook):
233
270
  headers["Authorization"] = f"Token {self.connection.password}"
234
271
  return headers, tenant
235
272
 
273
+ def _log_request_error(self, attempt_num: int, error: str) -> None:
274
+ self.log.error("Attempt %s API Request to DBT failed with reason: %s", attempt_num, error)
275
+
276
+ @staticmethod
277
+ def _retryable_error(exception: BaseException) -> bool:
278
+ if isinstance(exception, requests_exceptions.RequestException):
279
+ if isinstance(exception, (requests_exceptions.ConnectionError, requests_exceptions.Timeout)) or (
280
+ exception.response is not None
281
+ and (exception.response.status_code >= 500 or exception.response.status_code == 429)
282
+ ):
283
+ return True
284
+
285
+ if isinstance(exception, aiohttp.ClientResponseError):
286
+ if exception.status >= 500 or exception.status == 429:
287
+ return True
288
+
289
+ if isinstance(exception, (aiohttp.ClientConnectorError, TimeoutError)):
290
+ return True
291
+
292
+ return False
293
+
294
+ def _a_get_retry_object(self) -> AsyncRetrying:
295
+ """
296
+ Instantiate an async retry object.
297
+
298
+ :return: instance of AsyncRetrying class
299
+ """
300
+ # for compatibility we use reraise to avoid handling request error
301
+ return AsyncRetrying(**self.retry_args)
302
+
236
303
  @provide_account_id
237
304
  async def get_job_details(
238
305
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
@@ -249,17 +316,22 @@ class DbtCloudHook(HttpHook):
249
316
  headers, tenant = await self.get_headers_tenants_from_connection()
250
317
  url, params = self.get_request_url_params(tenant, endpoint, include_related)
251
318
  proxies = self._get_proxies(self.connection) or {}
319
+ proxy = proxies.get("https") if proxies and url.startswith("https") else proxies.get("http")
320
+ extra_request_args = {}
252
321
 
253
- async with aiohttp.ClientSession(headers=headers) as session:
254
- proxy = proxies.get("https") if proxies and url.startswith("https") else proxies.get("http")
255
- extra_request_args = {}
322
+ if proxy:
323
+ extra_request_args["proxy"] = proxy
256
324
 
257
- if proxy:
258
- extra_request_args["proxy"] = proxy
325
+ timeout = (
326
+ aiohttp.ClientTimeout(total=self.timeout_seconds) if self.timeout_seconds is not None else None
327
+ )
259
328
 
260
- async with session.get(url, params=params, **extra_request_args) as response: # type: ignore[arg-type]
261
- response.raise_for_status()
262
- return await response.json()
329
+ async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
330
+ async for attempt in self._a_get_retry_object():
331
+ with attempt:
332
+ async with session.get(url, params=params, **extra_request_args) as response: # type: ignore[arg-type]
333
+ response.raise_for_status()
334
+ return await response.json()
263
335
 
264
336
  async def get_job_status(
265
337
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
@@ -297,8 +369,14 @@ class DbtCloudHook(HttpHook):
297
369
  def _paginate(
298
370
  self, endpoint: str, payload: dict[str, Any] | None = None, proxies: dict[str, str] | None = None
299
371
  ) -> list[Response]:
300
- extra_options = {"proxies": proxies} if proxies is not None else None
301
- response = self.run(endpoint=endpoint, data=payload, extra_options=extra_options)
372
+ extra_options: dict[str, Any] = {}
373
+ if self.timeout_seconds is not None:
374
+ extra_options["timeout"] = self.timeout_seconds
375
+ if proxies is not None:
376
+ extra_options["proxies"] = proxies
377
+ response = self.run_with_advanced_retry(
378
+ _retry_args=self.retry_args, endpoint=endpoint, data=payload, extra_options=extra_options or None
379
+ )
302
380
  resp_json = response.json()
303
381
  limit = resp_json["extra"]["filters"]["limit"]
304
382
  num_total_results = resp_json["extra"]["pagination"]["total_count"]
@@ -309,7 +387,12 @@ class DbtCloudHook(HttpHook):
309
387
  _paginate_payload["offset"] = limit
310
388
 
311
389
  while num_current_results < num_total_results:
312
- response = self.run(endpoint=endpoint, data=_paginate_payload, extra_options=extra_options)
390
+ response = self.run_with_advanced_retry(
391
+ _retry_args=self.retry_args,
392
+ endpoint=endpoint,
393
+ data=_paginate_payload,
394
+ extra_options=extra_options,
395
+ )
313
396
  resp_json = response.json()
314
397
  results.append(response)
315
398
  num_current_results += resp_json["extra"]["pagination"]["count"]
@@ -328,7 +411,11 @@ class DbtCloudHook(HttpHook):
328
411
  self.method = method
329
412
  full_endpoint = f"api/{api_version}/accounts/{endpoint}" if endpoint else None
330
413
  proxies = self._get_proxies(self.connection)
331
- extra_options = {"proxies": proxies} if proxies is not None else None
414
+ extra_options: dict[str, Any] = {}
415
+ if self.timeout_seconds is not None:
416
+ extra_options["timeout"] = self.timeout_seconds
417
+ if proxies is not None:
418
+ extra_options["proxies"] = proxies
332
419
 
333
420
  if paginate:
334
421
  if isinstance(payload, str):
@@ -339,7 +426,12 @@ class DbtCloudHook(HttpHook):
339
426
 
340
427
  raise ValueError("An endpoint is needed to paginate a response.")
341
428
 
342
- return self.run(endpoint=full_endpoint, data=payload, extra_options=extra_options)
429
+ return self.run_with_advanced_retry(
430
+ _retry_args=self.retry_args,
431
+ endpoint=full_endpoint,
432
+ data=payload,
433
+ extra_options=extra_options or None,
434
+ )
343
435
 
344
436
  def list_accounts(self) -> list[Response]:
345
437
  """