apache-airflow-providers-dbt-cloud 4.4.0__tar.gz → 4.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. apache_airflow_providers_dbt_cloud-4.5.0/NOTICE +5 -0
  2. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/PKG-INFO +26 -13
  3. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/README.rst +16 -6
  4. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/changelog.rst +104 -0
  5. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/index.rst +7 -6
  6. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/operators.rst +2 -2
  7. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/provider.yaml +6 -1
  8. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/pyproject.toml +10 -8
  9. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/__init__.py +1 -1
  10. {apache_airflow_providers_dbt_cloud-4.4.0/tests/system → apache_airflow_providers_dbt_cloud-4.5.0/src/airflow/providers}/__init__.py +1 -1
  11. {apache_airflow_providers_dbt_cloud-4.4.0/src/airflow/providers → apache_airflow_providers_dbt_cloud-4.5.0/src/airflow/providers/dbt}/__init__.py +1 -1
  12. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/__init__.py +1 -1
  13. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/hooks/dbt.py +109 -17
  14. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/operators/dbt.py +17 -14
  15. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/sensors/dbt.py +5 -3
  16. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/triggers/dbt.py +5 -1
  17. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/utils/openlineage.py +1 -1
  18. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/version_compat.py +4 -0
  19. {apache_airflow_providers_dbt_cloud-4.4.0/src/airflow/providers/dbt → apache_airflow_providers_dbt_cloud-4.5.0/tests/system}/__init__.py +1 -1
  20. apache_airflow_providers_dbt_cloud-4.5.0/tests/system/dbt/__init__.py +17 -0
  21. apache_airflow_providers_dbt_cloud-4.5.0/tests/unit/__init__.py +17 -0
  22. apache_airflow_providers_dbt_cloud-4.5.0/tests/unit/dbt/__init__.py +17 -0
  23. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/hooks/test_dbt.py +291 -46
  24. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/operators/test_dbt.py +27 -25
  25. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/sensors/test_dbt.py +12 -10
  26. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/run_results.json +2 -1
  27. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/triggers/test_dbt.py +8 -6
  28. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/utils/test_openlineage.py +2 -1
  29. apache_airflow_providers_dbt_cloud-4.4.0/tests/system/dbt/__init__.py +0 -17
  30. apache_airflow_providers_dbt_cloud-4.4.0/tests/unit/__init__.py +0 -17
  31. apache_airflow_providers_dbt_cloud-4.4.0/tests/unit/dbt/__init__.py +0 -17
  32. {apache_airflow_providers_dbt_cloud-4.4.0/src/airflow/providers/dbt/cloud → apache_airflow_providers_dbt_cloud-4.5.0}/LICENSE +0 -0
  33. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/.latest-doc-only-change.txt +0 -0
  34. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/commits.rst +0 -0
  35. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/conf.py +0 -0
  36. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/connections.rst +0 -0
  37. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/installing-providers-from-sources.rst +0 -0
  38. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/integration-logos/dbt.png +0 -0
  39. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/docs/security.rst +0 -0
  40. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/get_provider_info.py +0 -0
  41. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/hooks/__init__.py +0 -0
  42. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/operators/__init__.py +0 -0
  43. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/sensors/__init__.py +0 -0
  44. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/triggers/__init__.py +0 -0
  45. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/src/airflow/providers/dbt/cloud/utils/__init__.py +0 -0
  46. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/conftest.py +0 -0
  47. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/system/dbt/cloud/__init__.py +0 -0
  48. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/system/dbt/cloud/example_dbt_cloud.py +0 -0
  49. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/__init__.py +0 -0
  50. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/hooks/__init__.py +0 -0
  51. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/operators/__init__.py +0 -0
  52. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/sensors/__init__.py +0 -0
  53. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/__init__.py +0 -0
  54. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/catalog.json +0 -0
  55. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/job_run.json +0 -0
  56. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/test_data/manifest.json +0 -0
  57. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/triggers/__init__.py +0 -0
  58. {apache_airflow_providers_dbt_cloud-4.4.0 → apache_airflow_providers_dbt_cloud-4.5.0}/tests/unit/dbt/cloud/utils/__init__.py +0 -0
@@ -0,0 +1,5 @@
1
+ Apache Airflow
2
+ Copyright 2016-2025 The Apache Software Foundation
3
+
4
+ This product includes software developed at
5
+ The Apache Software Foundation (http://www.apache.org/).
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 4.4.0
3
+ Version: 4.5.0
4
4
  Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
5
  Keywords: airflow-provider,dbt.cloud,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: >=3.10
9
9
  Description-Content-Type: text/x-rst
10
+ License-Expression: Apache-2.0
10
11
  Classifier: Development Status :: 5 - Production/Stable
11
12
  Classifier: Environment :: Console
12
13
  Classifier: Environment :: Web Environment
@@ -14,21 +15,23 @@ Classifier: Intended Audience :: Developers
14
15
  Classifier: Intended Audience :: System Administrators
15
16
  Classifier: Framework :: Apache Airflow
16
17
  Classifier: Framework :: Apache Airflow :: Provider
17
- Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
21
+ Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
+ License-File: LICENSE
24
+ License-File: NOTICE
23
25
  Requires-Dist: apache-airflow>=2.10.0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.6.0
26
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0
25
27
  Requires-Dist: apache-airflow-providers-http
26
28
  Requires-Dist: asgiref>=2.3.0
27
29
  Requires-Dist: aiohttp>=3.9.2
30
+ Requires-Dist: tenacity>=8.3.0
28
31
  Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
29
32
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0
33
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html
34
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0
32
35
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
36
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
37
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -60,7 +63,7 @@ Provides-Extra: openlineage
60
63
 
61
64
  Package ``apache-airflow-providers-dbt-cloud``
62
65
 
63
- Release: ``4.4.0``
66
+ Release: ``4.5.0``
64
67
 
65
68
 
66
69
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -73,16 +76,16 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
73
76
  are in ``airflow.providers.dbt.cloud`` python package.
74
77
 
75
78
  You can find package information and changelog for the provider
76
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/>`_.
79
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/>`_.
77
80
 
78
81
  Installation
79
82
  ------------
80
83
 
81
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
84
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
82
85
  for the minimum Airflow version supported) via
83
86
  ``pip install apache-airflow-providers-dbt-cloud``
84
87
 
85
- The package supports the following python versions: 3.9,3.10,3.11,3.12
88
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
86
89
 
87
90
  Requirements
88
91
  ------------
@@ -91,10 +94,11 @@ Requirements
91
94
  PIP package Version required
92
95
  ========================================== ==================
93
96
  ``apache-airflow`` ``>=2.10.0``
94
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
97
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
95
98
  ``apache-airflow-providers-http``
96
99
  ``asgiref`` ``>=2.3.0``
97
100
  ``aiohttp`` ``>=3.9.2``
101
+ ``tenacity`` ``>=8.3.0``
98
102
  ========================================== ==================
99
103
 
100
104
  Cross provider package dependencies
@@ -118,6 +122,15 @@ Dependent package
118
122
  `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
119
123
  ================================================================================================================== =================
120
124
 
125
+ Optional dependencies
126
+ ----------------------
127
+
128
+ =============== ===============================================
129
+ Extra Dependencies
130
+ =============== ===============================================
131
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
132
+ =============== ===============================================
133
+
121
134
  The changelog for the provider package can be found in the
122
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/changelog.html>`_.
135
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html>`_.
123
136
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-dbt-cloud``
25
25
 
26
- Release: ``4.4.0``
26
+ Release: ``4.5.0``
27
27
 
28
28
 
29
29
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -36,16 +36,16 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
36
36
  are in ``airflow.providers.dbt.cloud`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
43
43
 
44
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
44
+ You can install this package on top of an existing Airflow installation (see ``Requirements`` below
45
45
  for the minimum Airflow version supported) via
46
46
  ``pip install apache-airflow-providers-dbt-cloud``
47
47
 
48
- The package supports the following python versions: 3.9,3.10,3.11,3.12
48
+ The package supports the following python versions: 3.10,3.11,3.12,3.13
49
49
 
50
50
  Requirements
51
51
  ------------
@@ -54,10 +54,11 @@ Requirements
54
54
  PIP package Version required
55
55
  ========================================== ==================
56
56
  ``apache-airflow`` ``>=2.10.0``
57
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
58
58
  ``apache-airflow-providers-http``
59
59
  ``asgiref`` ``>=2.3.0``
60
60
  ``aiohttp`` ``>=3.9.2``
61
+ ``tenacity`` ``>=8.3.0``
61
62
  ========================================== ==================
62
63
 
63
64
  Cross provider package dependencies
@@ -81,5 +82,14 @@ Dependent package
81
82
  `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
82
83
  ================================================================================================================== =================
83
84
 
85
+ Optional dependencies
86
+ ----------------------
87
+
88
+ =============== ===============================================
89
+ Extra Dependencies
90
+ =============== ===============================================
91
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
92
+ =============== ===============================================
93
+
84
94
  The changelog for the provider package can be found in the
85
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/changelog.html>`_.
95
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html>`_.
@@ -28,6 +28,110 @@
28
28
  Changelog
29
29
  ---------
30
30
 
31
+ 4.5.0
32
+ .....
33
+
34
+ Features
35
+ ~~~~~~~~
36
+
37
+ * ``Fixes inconsistency where other dbt operators already supported hook_params (#57242)``
38
+
39
+ Misc
40
+ ~~~~
41
+
42
+ * ``Convert all airflow distributions to be compliant with ASF requirements (#58138)``
43
+
44
+ .. Below changes are excluded from the changelog. Move them to
45
+ appropriate section above if needed. Do not delete the lines(!):
46
+ * ``Delete all unnecessary LICENSE Files (#58191)``
47
+ * ``Enable PT006 rule to 14 files in providers (databricks,dbt,docker) (#57994)``
48
+
49
+ 4.4.4
50
+ .....
51
+
52
+ Bug Fixes
53
+ ~~~~~~~~~
54
+
55
+ * ``Add retry mechanism and error handling to DBT Hook (#56651)``
56
+
57
+ Misc
58
+ ~~~~
59
+
60
+ * ``Migrate dbt.cloud provider to ''common.compat'' (#56999)``
61
+
62
+ Doc-only
63
+ ~~~~~~~~
64
+
65
+ * ``Remove placeholder Release Date in changelog and index files (#56056)``
66
+
67
+ .. Below changes are excluded from the changelog. Move them to
68
+ appropriate section above if needed. Do not delete the lines(!):
69
+ * ``Enable PT011 rule to prvoider tests (#56320)``
70
+ * ``nit: Bump required OL client for Openlineage provider (#56302)``
71
+
72
+ 4.4.3
73
+ .....
74
+
75
+
76
+ Bug Fixes
77
+ ~~~~~~~~~
78
+
79
+ * ``Change operator DbtCloudRunJobOperator to send job_run_id to XCom (#55184)``
80
+
81
+ Doc-only
82
+ ~~~~~~~~
83
+
84
+ * ``Make term Dag consistent in providers docs (#55101)``
85
+
86
+ .. Below changes are excluded from the changelog. Move them to
87
+ appropriate section above if needed. Do not delete the lines(!):
88
+ * ``Switch pre-commit to prek (#54258)``
89
+
90
+ .. Review and move the new changes to one of the sections above:
91
+ * ``Fix Airflow 2 reference in README/index of providers (#55240)``
92
+
93
+ 4.4.2
94
+ .....
95
+
96
+ Misc
97
+ ~~~~
98
+
99
+ * ``Add Python 3.13 support for Airflow. (#46891)``
100
+ * ``Cleanup mypy ignore in dbt provider where possible (#53270)``
101
+ * ``Remove type ignore across codebase after mypy upgrade (#53243)``
102
+ * ``Remove upper-binding for "python-requires" (#52980)``
103
+ * ``Temporarily switch to use >=,< pattern instead of '~=' (#52967)``
104
+
105
+ .. Below changes are excluded from the changelog. Move them to
106
+ appropriate section above if needed. Do not delete the lines(!):
107
+ * ``mocking definition order FIX (#52905)``
108
+
109
+ 4.4.1
110
+ .....
111
+
112
+ Bug Fixes
113
+ ~~~~~~~~~
114
+
115
+ * ``Converting int account IDs to str in DBT Cloud connections (#51957)``
116
+
117
+ Misc
118
+ ~~~~
119
+
120
+ * ``Move 'BaseHook' implementation to task SDK (#51873)``
121
+ * ``Disable UP038 ruff rule and revert mandatory 'X | Y' in insintance checks (#52644)``
122
+ * ``Replace 'models.BaseOperator' to Task SDK one for DBT & Databricks (#52377)``
123
+ * ``Drop support for Python 3.9 (#52072)``
124
+ * ``Use BaseSensorOperator from task sdk in providers (#52296)``
125
+ * ``Add deprecation to 'airflow/sensors/base.py' (#52249)``
126
+ * ``Adding 'invocation_id' to run-results as expected by Openlineage (#51916)``
127
+
128
+ .. Below changes are excluded from the changelog. Move them to
129
+ appropriate section above if needed. Do not delete the lines(!):
130
+ * ``Make sure all test version imports come from test_common (#52425)``
131
+ * ``removed usage of pytest.mark.db_test from dbt tests (#52031)``
132
+ * ``Introducing fixture to create 'Connections' without DB in provider tests (#51930)``
133
+ * ``Switch the Supervisor/task process from line-based to length-prefixed (#51699)``
134
+
31
135
  4.4.0
32
136
  .....
33
137
 
@@ -60,7 +60,7 @@ an Integrated Developer Environment (IDE).
60
60
  :maxdepth: 1
61
61
  :caption: Resources
62
62
 
63
- Example DAGs <https://github.com/apache/airflow/tree/providers-dbt-cloud/|version|/providers/dbt/tests/system/dbt/cloud/example_dbt_cloud.py>
63
+ Example Dags <https://github.com/apache/airflow/tree/providers-dbt-cloud/|version|/providers/dbt/tests/system/dbt/cloud/example_dbt_cloud.py>
64
64
  PyPI Repository <https://pypi.org/project/apache-airflow-providers-dbt-cloud/>
65
65
  Installing from sources <installing-providers-from-sources>
66
66
 
@@ -81,7 +81,7 @@ apache-airflow-providers-dbt-cloud package
81
81
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
82
82
 
83
83
 
84
- Release: 4.4.0
84
+ Release: 4.5.0
85
85
 
86
86
  Provider package
87
87
  ----------------
@@ -92,7 +92,7 @@ All classes for this package are included in the ``airflow.providers.dbt.cloud``
92
92
  Installation
93
93
  ------------
94
94
 
95
- You can install this package on top of an existing Airflow 2 installation via
95
+ You can install this package on top of an existing Airflow installation via
96
96
  ``pip install apache-airflow-providers-dbt-cloud``.
97
97
  For the minimum Airflow version supported, see ``Requirements`` below.
98
98
 
@@ -105,10 +105,11 @@ The minimum Apache Airflow version supported by this provider distribution is ``
105
105
  PIP package Version required
106
106
  ========================================== ==================
107
107
  ``apache-airflow`` ``>=2.10.0``
108
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
108
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
109
109
  ``apache-airflow-providers-http``
110
110
  ``asgiref`` ``>=2.3.0``
111
111
  ``aiohttp`` ``>=3.9.2``
112
+ ``tenacity`` ``>=8.3.0``
112
113
  ========================================== ==================
113
114
 
114
115
  Cross provider package dependencies
@@ -138,5 +139,5 @@ Downloading official packages
138
139
  You can download officially released packages and verify their checksums and signatures from the
139
140
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
140
141
 
141
- * `The apache-airflow-providers-dbt-cloud 4.4.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0.tar.gz.sha512>`__)
142
- * `The apache-airflow-providers-dbt-cloud 4.4.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0-py3-none-any.whl.sha512>`__)
142
+ * `The apache-airflow-providers-dbt-cloud 4.5.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0.tar.gz.sha512>`__)
143
+ * `The apache-airflow-providers-dbt-cloud 4.5.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.5.0-py3-none-any.whl.sha512>`__)
@@ -65,7 +65,7 @@ configurations or overrides for the job run such as ``threads_override``, ``gene
65
65
 
66
66
  The below examples demonstrate how to instantiate DbtCloudRunJobOperator tasks with both synchronous and
67
67
  asynchronous waiting for run termination, respectively. To note, the ``account_id`` for the operators is
68
- referenced within the ``default_args`` of the example DAG.
68
+ referenced within the ``default_args`` of the example Dag.
69
69
 
70
70
  .. exampleinclude:: /../tests/system/dbt/cloud/example_dbt_cloud.py
71
71
  :language: python
@@ -104,7 +104,7 @@ functionality available with the :class:`~airflow.sensors.base.BaseSensorOperato
104
104
 
105
105
  In the example below, the ``run_id`` value in the example below comes from the output of a previous
106
106
  DbtCloudRunJobOperator task by utilizing the ``.output`` property exposed for all operators. Also, to note,
107
- the ``account_id`` for the task is referenced within the ``default_args`` of the example DAG.
107
+ the ``account_id`` for the task is referenced within the ``default_args`` of the example Dag.
108
108
 
109
109
  .. exampleinclude:: /../tests/system/dbt/cloud/example_dbt_cloud.py
110
110
  :language: python
@@ -22,12 +22,17 @@ description: |
22
22
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1747132505
25
+ source-date-epoch: 1763069126
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 4.5.0
32
+ - 4.4.4
33
+ - 4.4.3
34
+ - 4.4.2
35
+ - 4.4.1
31
36
  - 4.4.0
32
37
  - 4.3.3
33
38
  - 4.3.2
@@ -25,9 +25,11 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-dbt-cloud"
28
- version = "4.4.0"
28
+ version = "4.5.0"
29
29
  description = "Provider package apache-airflow-providers-dbt-cloud for Apache Airflow"
30
30
  readme = "README.rst"
31
+ license = "Apache-2.0"
32
+ license-files = ['LICENSE', 'NOTICE']
31
33
  authors = [
32
34
  {name="Apache Software Foundation", email="dev@airflow.apache.org"},
33
35
  ]
@@ -43,25 +45,25 @@ classifiers = [
43
45
  "Intended Audience :: System Administrators",
44
46
  "Framework :: Apache Airflow",
45
47
  "Framework :: Apache Airflow :: Provider",
46
- "License :: OSI Approved :: Apache Software License",
47
- "Programming Language :: Python :: 3.9",
48
48
  "Programming Language :: Python :: 3.10",
49
49
  "Programming Language :: Python :: 3.11",
50
50
  "Programming Language :: Python :: 3.12",
51
+ "Programming Language :: Python :: 3.13",
51
52
  "Topic :: System :: Monitoring",
52
53
  ]
53
- requires-python = "~=3.9"
54
+ requires-python = ">=3.10"
54
55
 
55
56
  # The dependencies should be modified in place in the generated file.
56
57
  # Any change in the dependencies is preserved when the file is regenerated
57
- # Make sure to run ``breeze static-checks --type update-providers-dependencies --all-files``
58
+ # Make sure to run ``prek update-providers-dependencies --all-files``
58
59
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
60
  dependencies = [
60
61
  "apache-airflow>=2.10.0",
61
- "apache-airflow-providers-common-compat>=1.6.0",
62
+ "apache-airflow-providers-common-compat>=1.8.0",
62
63
  "apache-airflow-providers-http",
63
64
  "asgiref>=2.3.0",
64
65
  "aiohttp>=3.9.2",
66
+ "tenacity>=8.3.0",
65
67
  ]
66
68
 
67
69
  # The optional dependencies should be modified in place in the generated file
@@ -109,8 +111,8 @@ apache-airflow-providers-common-sql = {workspace = true}
109
111
  apache-airflow-providers-standard = {workspace = true}
110
112
 
111
113
  [project.urls]
112
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0"
113
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/changelog.html"
114
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0"
115
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.5.0/changelog.html"
114
116
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
115
117
  "Source Code" = "https://github.com/apache/airflow"
116
118
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -14,4 +14,4 @@
14
14
  # KIND, either express or implied. See the License for the
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
- __path__ = __import__("pkgutil").extend_path(__path__, __name__) # type: ignore
17
+ __path__ = __import__("pkgutil").extend_path(__path__, __name__)
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.4.0"
32
+ __version__ = "4.5.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -17,19 +17,22 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import asyncio
20
+ import copy
20
21
  import json
21
22
  import time
22
23
  import warnings
23
- from collections.abc import Sequence
24
+ from collections.abc import Callable, Sequence
24
25
  from enum import Enum
25
26
  from functools import cached_property, wraps
26
27
  from inspect import signature
27
- from typing import TYPE_CHECKING, Any, Callable, TypedDict, TypeVar, cast
28
+ from typing import TYPE_CHECKING, Any, TypedDict, TypeVar, cast
28
29
 
29
30
  import aiohttp
30
31
  from asgiref.sync import sync_to_async
32
+ from requests import exceptions as requests_exceptions
31
33
  from requests.auth import AuthBase
32
34
  from requests.sessions import Session
35
+ from tenacity import AsyncRetrying, RetryCallState, retry_if_exception, stop_after_attempt, wait_exponential
33
36
 
34
37
  from airflow.exceptions import AirflowException
35
38
  from airflow.providers.http.hooks.http import HttpHook
@@ -174,6 +177,10 @@ class DbtCloudHook(HttpHook):
174
177
  Interact with dbt Cloud using the V2 (V3 if supported) API.
175
178
 
176
179
  :param dbt_cloud_conn_id: The ID of the :ref:`dbt Cloud connection <howto/connection:dbt-cloud>`.
180
+ :param timeout_seconds: Optional. The timeout in seconds for HTTP requests. If not provided, no timeout is applied.
181
+ :param retry_limit: The number of times to retry a request in case of failure.
182
+ :param retry_delay: The delay in seconds between retries.
183
+ :param retry_args: A dictionary of arguments to pass to the `tenacity.retry` decorator.
177
184
  """
178
185
 
179
186
  conn_name_attr = "dbt_cloud_conn_id"
@@ -193,9 +200,39 @@ class DbtCloudHook(HttpHook):
193
200
  },
194
201
  }
195
202
 
196
- def __init__(self, dbt_cloud_conn_id: str = default_conn_name, *args, **kwargs) -> None:
203
+ def __init__(
204
+ self,
205
+ dbt_cloud_conn_id: str = default_conn_name,
206
+ timeout_seconds: int | None = None,
207
+ retry_limit: int = 1,
208
+ retry_delay: float = 1.0,
209
+ retry_args: dict[Any, Any] | None = None,
210
+ ) -> None:
197
211
  super().__init__(auth_type=TokenAuth)
198
212
  self.dbt_cloud_conn_id = dbt_cloud_conn_id
213
+ self.timeout_seconds = timeout_seconds
214
+ if retry_limit < 1:
215
+ raise ValueError("Retry limit must be greater than or equal to 1")
216
+ self.retry_limit = retry_limit
217
+ self.retry_delay = retry_delay
218
+
219
+ def retry_after_func(retry_state: RetryCallState) -> None:
220
+ error_msg = str(retry_state.outcome.exception()) if retry_state.outcome else "Unknown error"
221
+ self._log_request_error(retry_state.attempt_number, error_msg)
222
+
223
+ if retry_args:
224
+ self.retry_args = copy.copy(retry_args)
225
+ self.retry_args["retry"] = retry_if_exception(self._retryable_error)
226
+ self.retry_args["after"] = retry_after_func
227
+ self.retry_args["reraise"] = True
228
+ else:
229
+ self.retry_args = {
230
+ "stop": stop_after_attempt(self.retry_limit),
231
+ "wait": wait_exponential(min=self.retry_delay, max=(2**retry_limit)),
232
+ "retry": retry_if_exception(self._retryable_error),
233
+ "after": retry_after_func,
234
+ "reraise": True,
235
+ }
199
236
 
200
237
  @staticmethod
201
238
  def _get_tenant_domain(conn: Connection) -> str:
@@ -233,6 +270,36 @@ class DbtCloudHook(HttpHook):
233
270
  headers["Authorization"] = f"Token {self.connection.password}"
234
271
  return headers, tenant
235
272
 
273
+ def _log_request_error(self, attempt_num: int, error: str) -> None:
274
+ self.log.error("Attempt %s API Request to DBT failed with reason: %s", attempt_num, error)
275
+
276
+ @staticmethod
277
+ def _retryable_error(exception: BaseException) -> bool:
278
+ if isinstance(exception, requests_exceptions.RequestException):
279
+ if isinstance(exception, (requests_exceptions.ConnectionError, requests_exceptions.Timeout)) or (
280
+ exception.response is not None
281
+ and (exception.response.status_code >= 500 or exception.response.status_code == 429)
282
+ ):
283
+ return True
284
+
285
+ if isinstance(exception, aiohttp.ClientResponseError):
286
+ if exception.status >= 500 or exception.status == 429:
287
+ return True
288
+
289
+ if isinstance(exception, (aiohttp.ClientConnectorError, TimeoutError)):
290
+ return True
291
+
292
+ return False
293
+
294
+ def _a_get_retry_object(self) -> AsyncRetrying:
295
+ """
296
+ Instantiate an async retry object.
297
+
298
+ :return: instance of AsyncRetrying class
299
+ """
300
+ # for compatibility we use reraise to avoid handling request error
301
+ return AsyncRetrying(**self.retry_args)
302
+
236
303
  @provide_account_id
237
304
  async def get_job_details(
238
305
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
@@ -249,17 +316,22 @@ class DbtCloudHook(HttpHook):
249
316
  headers, tenant = await self.get_headers_tenants_from_connection()
250
317
  url, params = self.get_request_url_params(tenant, endpoint, include_related)
251
318
  proxies = self._get_proxies(self.connection) or {}
319
+ proxy = proxies.get("https") if proxies and url.startswith("https") else proxies.get("http")
320
+ extra_request_args = {}
252
321
 
253
- async with aiohttp.ClientSession(headers=headers) as session:
254
- proxy = proxies.get("https") if proxies and url.startswith("https") else proxies.get("http")
255
- extra_request_args = {}
322
+ if proxy:
323
+ extra_request_args["proxy"] = proxy
256
324
 
257
- if proxy:
258
- extra_request_args["proxy"] = proxy
325
+ timeout = (
326
+ aiohttp.ClientTimeout(total=self.timeout_seconds) if self.timeout_seconds is not None else None
327
+ )
259
328
 
260
- async with session.get(url, params=params, **extra_request_args) as response: # type: ignore[arg-type]
261
- response.raise_for_status()
262
- return await response.json()
329
+ async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
330
+ async for attempt in self._a_get_retry_object():
331
+ with attempt:
332
+ async with session.get(url, params=params, **extra_request_args) as response: # type: ignore[arg-type]
333
+ response.raise_for_status()
334
+ return await response.json()
263
335
 
264
336
  async def get_job_status(
265
337
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
@@ -283,7 +355,7 @@ class DbtCloudHook(HttpHook):
283
355
  if not _connection.password:
284
356
  raise AirflowException("An API token is required to connect to dbt Cloud.")
285
357
 
286
- return _connection
358
+ return _connection # type: ignore[return-value]
287
359
 
288
360
  def get_conn(self, *args, **kwargs) -> Session:
289
361
  tenant = self._get_tenant_domain(self.connection)
@@ -297,8 +369,14 @@ class DbtCloudHook(HttpHook):
297
369
  def _paginate(
298
370
  self, endpoint: str, payload: dict[str, Any] | None = None, proxies: dict[str, str] | None = None
299
371
  ) -> list[Response]:
300
- extra_options = {"proxies": proxies} if proxies is not None else None
301
- response = self.run(endpoint=endpoint, data=payload, extra_options=extra_options)
372
+ extra_options: dict[str, Any] = {}
373
+ if self.timeout_seconds is not None:
374
+ extra_options["timeout"] = self.timeout_seconds
375
+ if proxies is not None:
376
+ extra_options["proxies"] = proxies
377
+ response = self.run_with_advanced_retry(
378
+ _retry_args=self.retry_args, endpoint=endpoint, data=payload, extra_options=extra_options or None
379
+ )
302
380
  resp_json = response.json()
303
381
  limit = resp_json["extra"]["filters"]["limit"]
304
382
  num_total_results = resp_json["extra"]["pagination"]["total_count"]
@@ -309,7 +387,12 @@ class DbtCloudHook(HttpHook):
309
387
  _paginate_payload["offset"] = limit
310
388
 
311
389
  while num_current_results < num_total_results:
312
- response = self.run(endpoint=endpoint, data=_paginate_payload, extra_options=extra_options)
390
+ response = self.run_with_advanced_retry(
391
+ _retry_args=self.retry_args,
392
+ endpoint=endpoint,
393
+ data=_paginate_payload,
394
+ extra_options=extra_options,
395
+ )
313
396
  resp_json = response.json()
314
397
  results.append(response)
315
398
  num_current_results += resp_json["extra"]["pagination"]["count"]
@@ -328,7 +411,11 @@ class DbtCloudHook(HttpHook):
328
411
  self.method = method
329
412
  full_endpoint = f"api/{api_version}/accounts/{endpoint}" if endpoint else None
330
413
  proxies = self._get_proxies(self.connection)
331
- extra_options = {"proxies": proxies} if proxies is not None else None
414
+ extra_options: dict[str, Any] = {}
415
+ if self.timeout_seconds is not None:
416
+ extra_options["timeout"] = self.timeout_seconds
417
+ if proxies is not None:
418
+ extra_options["proxies"] = proxies
332
419
 
333
420
  if paginate:
334
421
  if isinstance(payload, str):
@@ -339,7 +426,12 @@ class DbtCloudHook(HttpHook):
339
426
 
340
427
  raise ValueError("An endpoint is needed to paginate a response.")
341
428
 
342
- return self.run(endpoint=full_endpoint, data=payload, extra_options=extra_options)
429
+ return self.run_with_advanced_retry(
430
+ _retry_args=self.retry_args,
431
+ endpoint=full_endpoint,
432
+ data=payload,
433
+ extra_options=extra_options or None,
434
+ )
343
435
 
344
436
  def list_accounts(self) -> list[Response]:
345
437
  """