apache-airflow-providers-dbt-cloud 4.4.3__tar.gz → 4.4.4rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-dbt-cloud might be problematic. Click here for more details.

Files changed (54) hide show
  1. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/PKG-INFO +21 -10
  2. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/README.rst +14 -4
  3. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/changelog.rst +23 -2
  4. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/index.rst +5 -4
  5. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/provider.yaml +2 -1
  6. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/pyproject.toml +7 -6
  7. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/__init__.py +1 -1
  8. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/hooks/dbt.py +106 -14
  9. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/operators/dbt.py +13 -8
  10. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/sensors/dbt.py +1 -6
  11. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/triggers/dbt.py +5 -1
  12. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/version_compat.py +0 -12
  13. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/hooks/test_dbt.py +249 -4
  14. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/operators/test_dbt.py +1 -1
  15. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/triggers/test_dbt.py +2 -0
  16. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/utils/test_openlineage.py +1 -0
  17. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/.latest-doc-only-change.txt +0 -0
  18. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/commits.rst +0 -0
  19. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/conf.py +0 -0
  20. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/connections.rst +0 -0
  21. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/installing-providers-from-sources.rst +0 -0
  22. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/integration-logos/dbt.png +0 -0
  23. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/operators.rst +0 -0
  24. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/docs/security.rst +0 -0
  25. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/__init__.py +0 -0
  26. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/__init__.py +0 -0
  27. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/__init__.py +0 -0
  28. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/LICENSE +0 -0
  29. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/get_provider_info.py +0 -0
  30. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/hooks/__init__.py +0 -0
  31. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/operators/__init__.py +0 -0
  32. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/sensors/__init__.py +0 -0
  33. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/triggers/__init__.py +0 -0
  34. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/utils/__init__.py +0 -0
  35. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/src/airflow/providers/dbt/cloud/utils/openlineage.py +0 -0
  36. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/conftest.py +0 -0
  37. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/system/__init__.py +0 -0
  38. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/system/dbt/__init__.py +0 -0
  39. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/system/dbt/cloud/__init__.py +0 -0
  40. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/system/dbt/cloud/example_dbt_cloud.py +0 -0
  41. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/__init__.py +0 -0
  42. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/__init__.py +0 -0
  43. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/__init__.py +0 -0
  44. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/hooks/__init__.py +0 -0
  45. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/operators/__init__.py +0 -0
  46. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/sensors/__init__.py +0 -0
  47. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/sensors/test_dbt.py +0 -0
  48. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/test_data/__init__.py +0 -0
  49. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/test_data/catalog.json +0 -0
  50. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/test_data/job_run.json +0 -0
  51. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/test_data/manifest.json +0 -0
  52. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/test_data/run_results.json +0 -0
  53. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/triggers/__init__.py +0 -0
  54. {apache_airflow_providers_dbt_cloud-4.4.3 → apache_airflow_providers_dbt_cloud-4.4.4rc1}/tests/unit/dbt/cloud/utils/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 4.4.3
3
+ Version: 4.4.4rc1
4
4
  Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
5
  Keywords: airflow-provider,dbt.cloud,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,15 +20,16 @@ Classifier: Programming Language :: Python :: 3.11
20
20
  Classifier: Programming Language :: Python :: 3.12
21
21
  Classifier: Programming Language :: Python :: 3.13
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: apache-airflow>=2.10.0
24
- Requires-Dist: apache-airflow-providers-common-compat>=1.6.0
23
+ Requires-Dist: apache-airflow>=2.10.0rc1
24
+ Requires-Dist: apache-airflow-providers-common-compat>=1.8.0rc1
25
25
  Requires-Dist: apache-airflow-providers-http
26
26
  Requires-Dist: asgiref>=2.3.0
27
27
  Requires-Dist: aiohttp>=3.9.2
28
- Requires-Dist: apache-airflow-providers-openlineage>=2.3.0 ; extra == "openlineage"
28
+ Requires-Dist: tenacity>=8.3.0
29
+ Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
29
30
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.3/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.3
31
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/changelog.html
32
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4
32
33
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
34
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
35
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -60,7 +61,7 @@ Provides-Extra: openlineage
60
61
 
61
62
  Package ``apache-airflow-providers-dbt-cloud``
62
63
 
63
- Release: ``4.4.3``
64
+ Release: ``4.4.4``
64
65
 
65
66
 
66
67
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -73,7 +74,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
73
74
  are in ``airflow.providers.dbt.cloud`` python package.
74
75
 
75
76
  You can find package information and changelog for the provider
76
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.3/>`_.
77
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/>`_.
77
78
 
78
79
  Installation
79
80
  ------------
@@ -91,10 +92,11 @@ Requirements
91
92
  PIP package Version required
92
93
  ========================================== ==================
93
94
  ``apache-airflow`` ``>=2.10.0``
94
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
95
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
95
96
  ``apache-airflow-providers-http``
96
97
  ``asgiref`` ``>=2.3.0``
97
98
  ``aiohttp`` ``>=3.9.2``
99
+ ``tenacity`` ``>=8.3.0``
98
100
  ========================================== ==================
99
101
 
100
102
  Cross provider package dependencies
@@ -118,6 +120,15 @@ Dependent package
118
120
  `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
119
121
  ================================================================================================================== =================
120
122
 
123
+ Optional dependencies
124
+ ----------------------
125
+
126
+ =============== ===============================================
127
+ Extra Dependencies
128
+ =============== ===============================================
129
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
130
+ =============== ===============================================
131
+
121
132
  The changelog for the provider package can be found in the
122
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.3/changelog.html>`_.
133
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/changelog.html>`_.
123
134
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-dbt-cloud``
25
25
 
26
- Release: ``4.4.3``
26
+ Release: ``4.4.4``
27
27
 
28
28
 
29
29
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
36
36
  are in ``airflow.providers.dbt.cloud`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.3/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -54,10 +54,11 @@ Requirements
54
54
  PIP package Version required
55
55
  ========================================== ==================
56
56
  ``apache-airflow`` ``>=2.10.0``
57
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
57
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
58
58
  ``apache-airflow-providers-http``
59
59
  ``asgiref`` ``>=2.3.0``
60
60
  ``aiohttp`` ``>=3.9.2``
61
+ ``tenacity`` ``>=8.3.0``
61
62
  ========================================== ==================
62
63
 
63
64
  Cross provider package dependencies
@@ -81,5 +82,14 @@ Dependent package
81
82
  `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
82
83
  ================================================================================================================== =================
83
84
 
85
+ Optional dependencies
86
+ ----------------------
87
+
88
+ =============== ===============================================
89
+ Extra Dependencies
90
+ =============== ===============================================
91
+ ``openlineage`` ``apache-airflow-providers-openlineage>=2.3.0``
92
+ =============== ===============================================
93
+
84
94
  The changelog for the provider package can be found in the
85
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.3/changelog.html>`_.
95
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/changelog.html>`_.
@@ -28,11 +28,32 @@
28
28
  Changelog
29
29
  ---------
30
30
 
31
- 4.4.3
31
+ 4.4.4
32
32
  .....
33
33
 
34
+ Bug Fixes
35
+ ~~~~~~~~~
36
+
37
+ * ``Add retry mechanism and error handling to DBT Hook (#56651)``
38
+
39
+ Misc
40
+ ~~~~
41
+
42
+ * ``Migrate dbt.cloud provider to ''common.compat'' (#56999)``
43
+
44
+ Doc-only
45
+ ~~~~~~~~
46
+
47
+ * ``Remove placeholder Release Date in changelog and index files (#56056)``
48
+
49
+ .. Below changes are excluded from the changelog. Move them to
50
+ appropriate section above if needed. Do not delete the lines(!):
51
+ * ``Enable PT011 rule to prvoider tests (#56320)``
52
+ * ``nit: Bump required OL client for Openlineage provider (#56302)``
53
+
54
+ 4.4.3
55
+ .....
34
56
 
35
- Release Date: ``|PypiReleaseDate|``
36
57
 
37
58
  Bug Fixes
38
59
  ~~~~~~~~~
@@ -81,7 +81,7 @@ apache-airflow-providers-dbt-cloud package
81
81
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
82
82
 
83
83
 
84
- Release: 4.4.3
84
+ Release: 4.4.4
85
85
 
86
86
  Provider package
87
87
  ----------------
@@ -105,10 +105,11 @@ The minimum Apache Airflow version supported by this provider distribution is ``
105
105
  PIP package Version required
106
106
  ========================================== ==================
107
107
  ``apache-airflow`` ``>=2.10.0``
108
- ``apache-airflow-providers-common-compat`` ``>=1.6.0``
108
+ ``apache-airflow-providers-common-compat`` ``>=1.8.0``
109
109
  ``apache-airflow-providers-http``
110
110
  ``asgiref`` ``>=2.3.0``
111
111
  ``aiohttp`` ``>=3.9.2``
112
+ ``tenacity`` ``>=8.3.0``
112
113
  ========================================== ==================
113
114
 
114
115
  Cross provider package dependencies
@@ -138,5 +139,5 @@ Downloading official packages
138
139
  You can download officially released packages and verify their checksums and signatures from the
139
140
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
140
141
 
141
- * `The apache-airflow-providers-dbt-cloud 4.4.3 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.3.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.3.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.3.tar.gz.sha512>`__)
142
- * `The apache-airflow-providers-dbt-cloud 4.4.3 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.3-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.3-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.3-py3-none-any.whl.sha512>`__)
142
+ * `The apache-airflow-providers-dbt-cloud 4.4.4 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.4.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.4.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.4.tar.gz.sha512>`__)
143
+ * `The apache-airflow-providers-dbt-cloud 4.4.4 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.4-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.4-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.4-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,13 @@ description: |
22
22
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1756876793
25
+ source-date-epoch: 1761116606
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 4.4.4
31
32
  - 4.4.3
32
33
  - 4.4.2
33
34
  - 4.4.1
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-dbt-cloud"
28
- version = "4.4.3"
28
+ version = "4.4.4rc1"
29
29
  description = "Provider package apache-airflow-providers-dbt-cloud for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -57,11 +57,12 @@ requires-python = ">=3.10"
57
57
  # Make sure to run ``prek update-providers-dependencies --all-files``
58
58
  # After you modify the dependencies, and rebuild your Breeze CI image with ``breeze ci-image build``
59
59
  dependencies = [
60
- "apache-airflow>=2.10.0",
61
- "apache-airflow-providers-common-compat>=1.6.0",
60
+ "apache-airflow>=2.10.0rc1",
61
+ "apache-airflow-providers-common-compat>=1.8.0rc1",
62
62
  "apache-airflow-providers-http",
63
63
  "asgiref>=2.3.0",
64
64
  "aiohttp>=3.9.2",
65
+ "tenacity>=8.3.0",
65
66
  ]
66
67
 
67
68
  # The optional dependencies should be modified in place in the generated file
@@ -69,7 +70,7 @@ dependencies = [
69
70
  [project.optional-dependencies]
70
71
  # pip install apache-airflow-providers-dbt-cloud[openlineage]
71
72
  "openlineage" = [
72
- "apache-airflow-providers-openlineage>=2.3.0",
73
+ "apache-airflow-providers-openlineage>=2.3.0rc1",
73
74
  ]
74
75
 
75
76
  [dependency-groups]
@@ -109,8 +110,8 @@ apache-airflow-providers-common-sql = {workspace = true}
109
110
  apache-airflow-providers-standard = {workspace = true}
110
111
 
111
112
  [project.urls]
112
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.3"
113
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.3/changelog.html"
113
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4"
114
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.4/changelog.html"
114
115
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
115
116
  "Source Code" = "https://github.com/apache/airflow"
116
117
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.4.3"
32
+ __version__ = "4.4.4"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -17,6 +17,7 @@
17
17
  from __future__ import annotations
18
18
 
19
19
  import asyncio
20
+ import copy
20
21
  import json
21
22
  import time
22
23
  import warnings
@@ -28,8 +29,10 @@ from typing import TYPE_CHECKING, Any, TypedDict, TypeVar, cast
28
29
 
29
30
  import aiohttp
30
31
  from asgiref.sync import sync_to_async
32
+ from requests import exceptions as requests_exceptions
31
33
  from requests.auth import AuthBase
32
34
  from requests.sessions import Session
35
+ from tenacity import AsyncRetrying, RetryCallState, retry_if_exception, stop_after_attempt, wait_exponential
33
36
 
34
37
  from airflow.exceptions import AirflowException
35
38
  from airflow.providers.http.hooks.http import HttpHook
@@ -174,6 +177,10 @@ class DbtCloudHook(HttpHook):
174
177
  Interact with dbt Cloud using the V2 (V3 if supported) API.
175
178
 
176
179
  :param dbt_cloud_conn_id: The ID of the :ref:`dbt Cloud connection <howto/connection:dbt-cloud>`.
180
+ :param timeout_seconds: Optional. The timeout in seconds for HTTP requests. If not provided, no timeout is applied.
181
+ :param retry_limit: The number of times to retry a request in case of failure.
182
+ :param retry_delay: The delay in seconds between retries.
183
+ :param retry_args: A dictionary of arguments to pass to the `tenacity.retry` decorator.
177
184
  """
178
185
 
179
186
  conn_name_attr = "dbt_cloud_conn_id"
@@ -193,9 +200,39 @@ class DbtCloudHook(HttpHook):
193
200
  },
194
201
  }
195
202
 
196
- def __init__(self, dbt_cloud_conn_id: str = default_conn_name, *args, **kwargs) -> None:
203
+ def __init__(
204
+ self,
205
+ dbt_cloud_conn_id: str = default_conn_name,
206
+ timeout_seconds: int | None = None,
207
+ retry_limit: int = 1,
208
+ retry_delay: float = 1.0,
209
+ retry_args: dict[Any, Any] | None = None,
210
+ ) -> None:
197
211
  super().__init__(auth_type=TokenAuth)
198
212
  self.dbt_cloud_conn_id = dbt_cloud_conn_id
213
+ self.timeout_seconds = timeout_seconds
214
+ if retry_limit < 1:
215
+ raise ValueError("Retry limit must be greater than or equal to 1")
216
+ self.retry_limit = retry_limit
217
+ self.retry_delay = retry_delay
218
+
219
+ def retry_after_func(retry_state: RetryCallState) -> None:
220
+ error_msg = str(retry_state.outcome.exception()) if retry_state.outcome else "Unknown error"
221
+ self._log_request_error(retry_state.attempt_number, error_msg)
222
+
223
+ if retry_args:
224
+ self.retry_args = copy.copy(retry_args)
225
+ self.retry_args["retry"] = retry_if_exception(self._retryable_error)
226
+ self.retry_args["after"] = retry_after_func
227
+ self.retry_args["reraise"] = True
228
+ else:
229
+ self.retry_args = {
230
+ "stop": stop_after_attempt(self.retry_limit),
231
+ "wait": wait_exponential(min=self.retry_delay, max=(2**retry_limit)),
232
+ "retry": retry_if_exception(self._retryable_error),
233
+ "after": retry_after_func,
234
+ "reraise": True,
235
+ }
199
236
 
200
237
  @staticmethod
201
238
  def _get_tenant_domain(conn: Connection) -> str:
@@ -233,6 +270,36 @@ class DbtCloudHook(HttpHook):
233
270
  headers["Authorization"] = f"Token {self.connection.password}"
234
271
  return headers, tenant
235
272
 
273
+ def _log_request_error(self, attempt_num: int, error: str) -> None:
274
+ self.log.error("Attempt %s API Request to DBT failed with reason: %s", attempt_num, error)
275
+
276
+ @staticmethod
277
+ def _retryable_error(exception: BaseException) -> bool:
278
+ if isinstance(exception, requests_exceptions.RequestException):
279
+ if isinstance(exception, (requests_exceptions.ConnectionError, requests_exceptions.Timeout)) or (
280
+ exception.response is not None
281
+ and (exception.response.status_code >= 500 or exception.response.status_code == 429)
282
+ ):
283
+ return True
284
+
285
+ if isinstance(exception, aiohttp.ClientResponseError):
286
+ if exception.status >= 500 or exception.status == 429:
287
+ return True
288
+
289
+ if isinstance(exception, (aiohttp.ClientConnectorError, TimeoutError)):
290
+ return True
291
+
292
+ return False
293
+
294
+ def _a_get_retry_object(self) -> AsyncRetrying:
295
+ """
296
+ Instantiate an async retry object.
297
+
298
+ :return: instance of AsyncRetrying class
299
+ """
300
+ # for compatibility we use reraise to avoid handling request error
301
+ return AsyncRetrying(**self.retry_args)
302
+
236
303
  @provide_account_id
237
304
  async def get_job_details(
238
305
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
@@ -249,17 +316,22 @@ class DbtCloudHook(HttpHook):
249
316
  headers, tenant = await self.get_headers_tenants_from_connection()
250
317
  url, params = self.get_request_url_params(tenant, endpoint, include_related)
251
318
  proxies = self._get_proxies(self.connection) or {}
319
+ proxy = proxies.get("https") if proxies and url.startswith("https") else proxies.get("http")
320
+ extra_request_args = {}
252
321
 
253
- async with aiohttp.ClientSession(headers=headers) as session:
254
- proxy = proxies.get("https") if proxies and url.startswith("https") else proxies.get("http")
255
- extra_request_args = {}
322
+ if proxy:
323
+ extra_request_args["proxy"] = proxy
256
324
 
257
- if proxy:
258
- extra_request_args["proxy"] = proxy
325
+ timeout = (
326
+ aiohttp.ClientTimeout(total=self.timeout_seconds) if self.timeout_seconds is not None else None
327
+ )
259
328
 
260
- async with session.get(url, params=params, **extra_request_args) as response: # type: ignore[arg-type]
261
- response.raise_for_status()
262
- return await response.json()
329
+ async with aiohttp.ClientSession(headers=headers, timeout=timeout) as session:
330
+ async for attempt in self._a_get_retry_object():
331
+ with attempt:
332
+ async with session.get(url, params=params, **extra_request_args) as response: # type: ignore[arg-type]
333
+ response.raise_for_status()
334
+ return await response.json()
263
335
 
264
336
  async def get_job_status(
265
337
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
@@ -297,8 +369,14 @@ class DbtCloudHook(HttpHook):
297
369
  def _paginate(
298
370
  self, endpoint: str, payload: dict[str, Any] | None = None, proxies: dict[str, str] | None = None
299
371
  ) -> list[Response]:
300
- extra_options = {"proxies": proxies} if proxies is not None else None
301
- response = self.run(endpoint=endpoint, data=payload, extra_options=extra_options)
372
+ extra_options: dict[str, Any] = {}
373
+ if self.timeout_seconds is not None:
374
+ extra_options["timeout"] = self.timeout_seconds
375
+ if proxies is not None:
376
+ extra_options["proxies"] = proxies
377
+ response = self.run_with_advanced_retry(
378
+ _retry_args=self.retry_args, endpoint=endpoint, data=payload, extra_options=extra_options or None
379
+ )
302
380
  resp_json = response.json()
303
381
  limit = resp_json["extra"]["filters"]["limit"]
304
382
  num_total_results = resp_json["extra"]["pagination"]["total_count"]
@@ -309,7 +387,12 @@ class DbtCloudHook(HttpHook):
309
387
  _paginate_payload["offset"] = limit
310
388
 
311
389
  while num_current_results < num_total_results:
312
- response = self.run(endpoint=endpoint, data=_paginate_payload, extra_options=extra_options)
390
+ response = self.run_with_advanced_retry(
391
+ _retry_args=self.retry_args,
392
+ endpoint=endpoint,
393
+ data=_paginate_payload,
394
+ extra_options=extra_options,
395
+ )
313
396
  resp_json = response.json()
314
397
  results.append(response)
315
398
  num_current_results += resp_json["extra"]["pagination"]["count"]
@@ -328,7 +411,11 @@ class DbtCloudHook(HttpHook):
328
411
  self.method = method
329
412
  full_endpoint = f"api/{api_version}/accounts/{endpoint}" if endpoint else None
330
413
  proxies = self._get_proxies(self.connection)
331
- extra_options = {"proxies": proxies} if proxies is not None else None
414
+ extra_options: dict[str, Any] = {}
415
+ if self.timeout_seconds is not None:
416
+ extra_options["timeout"] = self.timeout_seconds
417
+ if proxies is not None:
418
+ extra_options["proxies"] = proxies
332
419
 
333
420
  if paginate:
334
421
  if isinstance(payload, str):
@@ -339,7 +426,12 @@ class DbtCloudHook(HttpHook):
339
426
 
340
427
  raise ValueError("An endpoint is needed to paginate a response.")
341
428
 
342
- return self.run(endpoint=full_endpoint, data=payload, extra_options=extra_options)
429
+ return self.run_with_advanced_retry(
430
+ _retry_args=self.retry_args,
431
+ endpoint=full_endpoint,
432
+ data=payload,
433
+ extra_options=extra_options or None,
434
+ )
343
435
 
344
436
  def list_accounts(self) -> list[Response]:
345
437
  """
@@ -24,6 +24,7 @@ from pathlib import Path
24
24
  from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.configuration import conf
27
+ from airflow.providers.common.compat.sdk import BaseOperator, BaseOperatorLink, XCom
27
28
  from airflow.providers.dbt.cloud.hooks.dbt import (
28
29
  DbtCloudHook,
29
30
  DbtCloudJobRunException,
@@ -32,11 +33,6 @@ from airflow.providers.dbt.cloud.hooks.dbt import (
32
33
  )
33
34
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
34
35
  from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_events_from_dbt_cloud_run
35
- from airflow.providers.dbt.cloud.version_compat import (
36
- BaseOperator,
37
- BaseOperatorLink,
38
- XCom,
39
- )
40
36
 
41
37
  if TYPE_CHECKING:
42
38
  from airflow.providers.openlineage.extractors import OperatorLineage
@@ -87,6 +83,7 @@ class DbtCloudRunJobOperator(BaseOperator):
87
83
  run. For more information on retry logic, see:
88
84
  https://docs.getdbt.com/dbt-cloud/api-v2#/operations/Retry%20Failed%20Job
89
85
  :param deferrable: Run operator in the deferrable mode
86
+ :param hook_params: Extra arguments passed to the DbtCloudHook constructor.
90
87
  :return: The ID of the triggered dbt Cloud job run.
91
88
  """
92
89
 
@@ -124,6 +121,7 @@ class DbtCloudRunJobOperator(BaseOperator):
124
121
  reuse_existing_run: bool = False,
125
122
  retry_from_failure: bool = False,
126
123
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
124
+ hook_params: dict[str, Any] | None = None,
127
125
  **kwargs,
128
126
  ) -> None:
129
127
  super().__init__(**kwargs)
@@ -144,6 +142,7 @@ class DbtCloudRunJobOperator(BaseOperator):
144
142
  self.reuse_existing_run = reuse_existing_run
145
143
  self.retry_from_failure = retry_from_failure
146
144
  self.deferrable = deferrable
145
+ self.hook_params = hook_params or {}
147
146
 
148
147
  def execute(self, context: Context):
149
148
  if self.trigger_reason is None:
@@ -273,7 +272,7 @@ class DbtCloudRunJobOperator(BaseOperator):
273
272
  @cached_property
274
273
  def hook(self):
275
274
  """Returns DBT Cloud hook."""
276
- return DbtCloudHook(self.dbt_cloud_conn_id)
275
+ return DbtCloudHook(self.dbt_cloud_conn_id, **self.hook_params)
277
276
 
278
277
  def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage:
279
278
  """
@@ -311,6 +310,7 @@ class DbtCloudGetJobRunArtifactOperator(BaseOperator):
311
310
  be returned.
312
311
  :param output_file_name: Optional. The desired file name for the download artifact file.
313
312
  Defaults to <run_id>_<path> (e.g. "728368_run_results.json").
313
+ :param hook_params: Extra arguments passed to the DbtCloudHook constructor.
314
314
  """
315
315
 
316
316
  template_fields = ("dbt_cloud_conn_id", "run_id", "path", "account_id", "output_file_name")
@@ -324,6 +324,7 @@ class DbtCloudGetJobRunArtifactOperator(BaseOperator):
324
324
  account_id: int | None = None,
325
325
  step: int | None = None,
326
326
  output_file_name: str | None = None,
327
+ hook_params: dict[str, Any] | None = None,
327
328
  **kwargs,
328
329
  ) -> None:
329
330
  super().__init__(**kwargs)
@@ -333,9 +334,10 @@ class DbtCloudGetJobRunArtifactOperator(BaseOperator):
333
334
  self.account_id = account_id
334
335
  self.step = step
335
336
  self.output_file_name = output_file_name or f"{self.run_id}_{self.path}".replace("/", "-")
337
+ self.hook_params = hook_params or {}
336
338
 
337
339
  def execute(self, context: Context) -> str:
338
- hook = DbtCloudHook(self.dbt_cloud_conn_id)
340
+ hook = DbtCloudHook(self.dbt_cloud_conn_id, **self.hook_params)
339
341
  response = hook.get_job_run_artifact(
340
342
  run_id=self.run_id, path=self.path, account_id=self.account_id, step=self.step
341
343
  )
@@ -370,6 +372,7 @@ class DbtCloudListJobsOperator(BaseOperator):
370
372
  :param order_by: Optional. Field to order the result by. Use '-' to indicate reverse order.
371
373
  For example, to use reverse order by the run ID use ``order_by=-id``.
372
374
  :param project_id: Optional. The ID of a dbt Cloud project.
375
+ :param hook_params: Extra arguments passed to the DbtCloudHook constructor.
373
376
  """
374
377
 
375
378
  template_fields = (
@@ -384,6 +387,7 @@ class DbtCloudListJobsOperator(BaseOperator):
384
387
  account_id: int | None = None,
385
388
  project_id: int | None = None,
386
389
  order_by: str | None = None,
390
+ hook_params: dict[str, Any] | None = None,
387
391
  **kwargs,
388
392
  ) -> None:
389
393
  super().__init__(**kwargs)
@@ -391,9 +395,10 @@ class DbtCloudListJobsOperator(BaseOperator):
391
395
  self.account_id = account_id
392
396
  self.project_id = project_id
393
397
  self.order_by = order_by
398
+ self.hook_params = hook_params or {}
394
399
 
395
400
  def execute(self, context: Context) -> list:
396
- hook = DbtCloudHook(self.dbt_cloud_conn_id)
401
+ hook = DbtCloudHook(self.dbt_cloud_conn_id, **self.hook_params)
397
402
  list_jobs_response = hook.list_jobs(
398
403
  account_id=self.account_id, order_by=self.order_by, project_id=self.project_id
399
404
  )
@@ -22,15 +22,10 @@ from typing import TYPE_CHECKING, Any
22
22
 
23
23
  from airflow.configuration import conf
24
24
  from airflow.exceptions import AirflowException
25
+ from airflow.providers.common.compat.sdk import BaseSensorOperator
25
26
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
26
27
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
27
28
  from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_events_from_dbt_cloud_run
28
- from airflow.providers.dbt.cloud.version_compat import AIRFLOW_V_3_0_PLUS
29
-
30
- if AIRFLOW_V_3_0_PLUS:
31
- from airflow.sdk import BaseSensorOperator
32
- else:
33
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
34
29
 
35
30
  if TYPE_CHECKING:
36
31
  from airflow.providers.openlineage.extractors import OperatorLineage
@@ -36,6 +36,7 @@ class DbtCloudRunJobTrigger(BaseTrigger):
36
36
  :param end_time: Time in seconds to wait for a job run to reach a terminal status. Defaults to 7 days.
37
37
  :param account_id: The ID of a dbt Cloud account.
38
38
  :param poll_interval: polling period in seconds to check for the status.
39
+ :param hook_params: Extra arguments passed to the DbtCloudHook constructor.
39
40
  """
40
41
 
41
42
  def __init__(
@@ -45,6 +46,7 @@ class DbtCloudRunJobTrigger(BaseTrigger):
45
46
  end_time: float,
46
47
  poll_interval: float,
47
48
  account_id: int | None,
49
+ hook_params: dict[str, Any] | None = None,
48
50
  ):
49
51
  super().__init__()
50
52
  self.run_id = run_id
@@ -52,6 +54,7 @@ class DbtCloudRunJobTrigger(BaseTrigger):
52
54
  self.conn_id = conn_id
53
55
  self.end_time = end_time
54
56
  self.poll_interval = poll_interval
57
+ self.hook_params = hook_params or {}
55
58
 
56
59
  def serialize(self) -> tuple[str, dict[str, Any]]:
57
60
  """Serialize DbtCloudRunJobTrigger arguments and classpath."""
@@ -63,12 +66,13 @@ class DbtCloudRunJobTrigger(BaseTrigger):
63
66
  "conn_id": self.conn_id,
64
67
  "end_time": self.end_time,
65
68
  "poll_interval": self.poll_interval,
69
+ "hook_params": self.hook_params,
66
70
  },
67
71
  )
68
72
 
69
73
  async def run(self) -> AsyncIterator[TriggerEvent]:
70
74
  """Make async connection to Dbt, polls for the pipeline run status."""
71
- hook = DbtCloudHook(self.conn_id)
75
+ hook = DbtCloudHook(self.conn_id, **self.hook_params)
72
76
  try:
73
77
  while await self.is_still_running(hook):
74
78
  if self.end_time < time.time():
@@ -34,18 +34,6 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
36
 
37
- if AIRFLOW_V_3_0_PLUS:
38
- from airflow.sdk import BaseOperator, BaseOperatorLink, BaseSensorOperator
39
- from airflow.sdk.execution_time.xcom import XCom
40
- else:
41
- from airflow.models import BaseOperator, XCom
42
- from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
43
- from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
44
-
45
37
  __all__ = [
46
38
  "AIRFLOW_V_3_0_PLUS",
47
- "BaseOperator",
48
- "BaseSensorOperator",
49
- "BaseOperatorLink",
50
- "XCom",
51
39
  ]
@@ -20,9 +20,11 @@ import json
20
20
  from copy import deepcopy
21
21
  from datetime import timedelta
22
22
  from typing import Any
23
- from unittest.mock import MagicMock, patch
23
+ from unittest.mock import AsyncMock, MagicMock, patch
24
24
 
25
+ import aiohttp
25
26
  import pytest
27
+ from requests import exceptions as requests_exceptions
26
28
  from requests.models import Response
27
29
 
28
30
  from airflow.exceptions import AirflowException
@@ -36,7 +38,11 @@ from airflow.providers.dbt.cloud.hooks.dbt import (
36
38
  TokenAuth,
37
39
  fallback_to_default_account,
38
40
  )
39
- from airflow.utils import timezone
41
+
42
+ try:
43
+ from airflow.sdk import timezone
44
+ except ImportError:
45
+ from airflow.utils import timezone # type: ignore[attr-defined,no-redef]
40
46
 
41
47
  ACCOUNT_ID_CONN = "account_id_conn"
42
48
  NO_ACCOUNT_ID_CONN = "no_account_id_conn"
@@ -57,6 +63,7 @@ RUN_ID = 5555
57
63
 
58
64
  BASE_URL = "https://cloud.getdbt.com/"
59
65
  SINGLE_TENANT_URL = "https://single.tenant.getdbt.com/"
66
+ NOT_VAILD_DBT_STATUS = "not a valid DbtCloudJobRunStatus"
60
67
 
61
68
  DEFAULT_LIST_PROJECTS_RESPONSE = {
62
69
  "data": [
@@ -90,6 +97,12 @@ def mock_response_json(response: dict):
90
97
  return run_response
91
98
 
92
99
 
100
+ def request_exception_with_status(status_code: int) -> requests_exceptions.HTTPError:
101
+ response = Response()
102
+ response.status_code = status_code
103
+ return requests_exceptions.HTTPError(response=response)
104
+
105
+
93
106
  class TestDbtCloudJobRunStatus:
94
107
  valid_job_run_statuses = [
95
108
  1, # QUEUED
@@ -127,7 +140,7 @@ class TestDbtCloudJobRunStatus:
127
140
  ids=_get_ids(invalid_job_run_statuses),
128
141
  )
129
142
  def test_invalid_job_run_status(self, statuses):
130
- with pytest.raises(ValueError):
143
+ with pytest.raises(ValueError, match=NOT_VAILD_DBT_STATUS):
131
144
  DbtCloudJobRunStatus.check_is_valid(statuses)
132
145
 
133
146
  @pytest.mark.parametrize(
@@ -144,7 +157,7 @@ class TestDbtCloudJobRunStatus:
144
157
  ids=_get_ids(invalid_job_run_statuses),
145
158
  )
146
159
  def test_invalid_terminal_job_run_status(self, statuses):
147
- with pytest.raises(ValueError):
160
+ with pytest.raises(ValueError, match=NOT_VAILD_DBT_STATUS):
148
161
  DbtCloudJobRunStatus.check_is_valid(statuses)
149
162
 
150
163
 
@@ -1067,3 +1080,235 @@ class TestDbtCloudHook:
1067
1080
 
1068
1081
  assert status is False
1069
1082
  assert msg == "403:Authentication credentials were not provided"
1083
+
1084
+ @pytest.mark.parametrize(
1085
+ argnames="timeout_seconds",
1086
+ argvalues=[60, 180, 300],
1087
+ ids=["60s", "180s", "300s"],
1088
+ )
1089
+ @patch.object(DbtCloudHook, "run_with_advanced_retry")
1090
+ def test_timeout_passed_to_run_and_get_response(self, mock_run_with_retry, timeout_seconds):
1091
+ """Test that timeout is passed to extra_options in _run_and_get_response."""
1092
+ hook = DbtCloudHook(ACCOUNT_ID_CONN, timeout_seconds=timeout_seconds)
1093
+ mock_run_with_retry.return_value = mock_response_json({"data": {"id": JOB_ID}})
1094
+
1095
+ hook.get_job(job_id=JOB_ID, account_id=DEFAULT_ACCOUNT_ID)
1096
+
1097
+ call_args = mock_run_with_retry.call_args
1098
+ assert call_args is not None
1099
+ extra_options = call_args.kwargs.get("extra_options")
1100
+ assert extra_options is not None
1101
+ assert extra_options["timeout"] == timeout_seconds
1102
+
1103
+ @pytest.mark.parametrize(
1104
+ argnames="timeout_seconds",
1105
+ argvalues=[60, 180, 300],
1106
+ ids=["60s", "180s", "300s"],
1107
+ )
1108
+ @patch.object(DbtCloudHook, "run_with_advanced_retry")
1109
+ def test_timeout_passed_to_paginate(self, mock_run_with_retry, timeout_seconds):
1110
+ """Test that timeout is passed to extra_options in _paginate."""
1111
+ hook = DbtCloudHook(ACCOUNT_ID_CONN, timeout_seconds=timeout_seconds)
1112
+ mock_response = mock_response_json(
1113
+ {
1114
+ "data": [{"id": JOB_ID}],
1115
+ "extra": {"filters": {"limit": 100}, "pagination": {"count": 1, "total_count": 1}},
1116
+ }
1117
+ )
1118
+ mock_run_with_retry.return_value = mock_response
1119
+
1120
+ hook.list_jobs(account_id=DEFAULT_ACCOUNT_ID)
1121
+
1122
+ call_args = mock_run_with_retry.call_args
1123
+ assert call_args is not None
1124
+ extra_options = call_args.kwargs.get("extra_options")
1125
+ assert extra_options is not None
1126
+ assert extra_options["timeout"] == timeout_seconds
1127
+
1128
+ @pytest.mark.parametrize(
1129
+ argnames="timeout_seconds",
1130
+ argvalues=[60, 180, 300],
1131
+ ids=["60s", "180s", "300s"],
1132
+ )
1133
+ @patch.object(DbtCloudHook, "run_with_advanced_retry")
1134
+ def test_timeout_with_proxies(self, mock_run_with_retry, timeout_seconds):
1135
+ """Test that both timeout and proxies are passed to extra_options."""
1136
+ hook = DbtCloudHook(PROXY_CONN, timeout_seconds=timeout_seconds)
1137
+ mock_run_with_retry.return_value = mock_response_json({"data": {"id": JOB_ID}})
1138
+
1139
+ hook.get_job(job_id=JOB_ID, account_id=DEFAULT_ACCOUNT_ID)
1140
+
1141
+ call_args = mock_run_with_retry.call_args
1142
+ assert call_args is not None
1143
+ extra_options = call_args.kwargs.get("extra_options")
1144
+ assert extra_options is not None
1145
+ assert extra_options["timeout"] == timeout_seconds
1146
+ assert "proxies" in extra_options
1147
+ assert extra_options["proxies"] == EXTRA_PROXIES["proxies"]
1148
+
1149
+ @pytest.mark.parametrize(
1150
+ argnames="exception, expected",
1151
+ argvalues=[
1152
+ (requests_exceptions.ConnectionError(), True),
1153
+ (requests_exceptions.Timeout(), True),
1154
+ (request_exception_with_status(503), True),
1155
+ (request_exception_with_status(429), True),
1156
+ (request_exception_with_status(404), False),
1157
+ (aiohttp.ClientResponseError(MagicMock(), (), status=500, message=""), True),
1158
+ (aiohttp.ClientResponseError(MagicMock(), (), status=429, message=""), True),
1159
+ (aiohttp.ClientResponseError(MagicMock(), (), status=400, message=""), False),
1160
+ (aiohttp.ClientConnectorError(MagicMock(), OSError()), True),
1161
+ (TimeoutError(), True),
1162
+ (ValueError(), False),
1163
+ ],
1164
+ ids=[
1165
+ "requests_connection_error",
1166
+ "requests_timeout",
1167
+ "requests_status_503",
1168
+ "requests_status_429",
1169
+ "requests_status_404",
1170
+ "aiohttp_status_500",
1171
+ "aiohttp_status_429",
1172
+ "aiohttp_status_400",
1173
+ "aiohttp_connector_error",
1174
+ "timeout_error",
1175
+ "value_error",
1176
+ ],
1177
+ )
1178
+ def test_retryable_error(self, exception, expected):
1179
+ assert DbtCloudHook._retryable_error(exception) is expected
1180
+
1181
+ @pytest.mark.asyncio
1182
+ @pytest.mark.parametrize(
1183
+ "error_factory, retry_qty, retry_delay",
1184
+ [
1185
+ (
1186
+ lambda: aiohttp.ClientResponseError(
1187
+ request_info=AsyncMock(), history=(), status=500, message=""
1188
+ ),
1189
+ 3,
1190
+ 0.1,
1191
+ ),
1192
+ (
1193
+ lambda: aiohttp.ClientResponseError(
1194
+ request_info=AsyncMock(), history=(), status=429, message=""
1195
+ ),
1196
+ 5,
1197
+ 0.1,
1198
+ ),
1199
+ (lambda: aiohttp.ClientConnectorError(AsyncMock(), OSError("boom")), 2, 0.1),
1200
+ (lambda: TimeoutError(), 2, 0.1),
1201
+ ],
1202
+ ids=["aiohttp_500", "aiohttp_429", "connector_error", "timeout"],
1203
+ )
1204
+ @patch("airflow.providers.dbt.cloud.hooks.dbt.aiohttp.ClientSession.get")
1205
+ async def test_get_job_details_retry_with_retryable_errors(
1206
+ self, get_mock, error_factory, retry_qty, retry_delay
1207
+ ):
1208
+ hook = DbtCloudHook(ACCOUNT_ID_CONN, retry_limit=retry_qty, retry_delay=retry_delay)
1209
+
1210
+ def fail_cm():
1211
+ cm = AsyncMock()
1212
+ cm.__aenter__.side_effect = error_factory()
1213
+ return cm
1214
+
1215
+ ok_resp = AsyncMock()
1216
+ ok_resp.raise_for_status = MagicMock(return_value=None)
1217
+ ok_resp.json = AsyncMock(return_value={"data": "Success"})
1218
+ ok_cm = AsyncMock()
1219
+ ok_cm.__aenter__.return_value = ok_resp
1220
+ ok_cm.__aexit__.return_value = AsyncMock()
1221
+
1222
+ all_resp = [fail_cm() for _ in range(retry_qty - 1)]
1223
+ all_resp.append(ok_cm)
1224
+ get_mock.side_effect = all_resp
1225
+
1226
+ result = await hook.get_job_details(run_id=RUN_ID, account_id=None)
1227
+
1228
+ assert result == {"data": "Success"}
1229
+ assert get_mock.call_count == retry_qty
1230
+
1231
+ @pytest.mark.asyncio
1232
+ @pytest.mark.parametrize(
1233
+ "error_factory, expected_exception",
1234
+ [
1235
+ (
1236
+ lambda: aiohttp.ClientResponseError(
1237
+ request_info=AsyncMock(), history=(), status=404, message="Not Found"
1238
+ ),
1239
+ aiohttp.ClientResponseError,
1240
+ ),
1241
+ (
1242
+ lambda: aiohttp.ClientResponseError(
1243
+ request_info=AsyncMock(), history=(), status=400, message="Bad Request"
1244
+ ),
1245
+ aiohttp.ClientResponseError,
1246
+ ),
1247
+ (lambda: ValueError("Invalid parameter"), ValueError),
1248
+ ],
1249
+ ids=["aiohttp_404", "aiohttp_400", "value_error"],
1250
+ )
1251
+ @patch("airflow.providers.dbt.cloud.hooks.dbt.aiohttp.ClientSession.get")
1252
+ async def test_get_job_details_retry_with_non_retryable_errors(
1253
+ self, get_mock, error_factory, expected_exception
1254
+ ):
1255
+ hook = DbtCloudHook(ACCOUNT_ID_CONN, retry_limit=3, retry_delay=0.1)
1256
+
1257
+ def fail_cm():
1258
+ cm = AsyncMock()
1259
+ cm.__aenter__.side_effect = error_factory()
1260
+ return cm
1261
+
1262
+ get_mock.return_value = fail_cm()
1263
+
1264
+ with pytest.raises(expected_exception):
1265
+ await hook.get_job_details(run_id=RUN_ID, account_id=None)
1266
+
1267
+ assert get_mock.call_count == 1
1268
+
1269
+ @pytest.mark.asyncio
1270
+ @pytest.mark.parametrize(
1271
+ argnames="error_factory, expected_exception",
1272
+ argvalues=[
1273
+ (
1274
+ lambda: aiohttp.ClientResponseError(
1275
+ request_info=AsyncMock(), history=(), status=503, message="Service Unavailable"
1276
+ ),
1277
+ aiohttp.ClientResponseError,
1278
+ ),
1279
+ (
1280
+ lambda: aiohttp.ClientResponseError(
1281
+ request_info=AsyncMock(), history=(), status=500, message="Internal Server Error"
1282
+ ),
1283
+ aiohttp.ClientResponseError,
1284
+ ),
1285
+ (
1286
+ lambda: aiohttp.ClientConnectorError(AsyncMock(), OSError("Connection refused")),
1287
+ aiohttp.ClientConnectorError,
1288
+ ),
1289
+ (lambda: TimeoutError("Request timeout"), TimeoutError),
1290
+ ],
1291
+ ids=[
1292
+ "aiohttp_503_exhausted",
1293
+ "aiohttp_500_exhausted",
1294
+ "connector_error_exhausted",
1295
+ "timeout_exhausted",
1296
+ ],
1297
+ )
1298
+ @patch("airflow.providers.dbt.cloud.hooks.dbt.aiohttp.ClientSession.get")
1299
+ async def test_get_job_details_retry_with_exhausted_retries(
1300
+ self, get_mock, error_factory, expected_exception
1301
+ ):
1302
+ hook = DbtCloudHook(ACCOUNT_ID_CONN, retry_limit=2, retry_delay=0.1)
1303
+
1304
+ def fail_cm():
1305
+ cm = AsyncMock()
1306
+ cm.__aenter__.side_effect = error_factory()
1307
+ return cm
1308
+
1309
+ get_mock.side_effect = [fail_cm() for _ in range(2)]
1310
+
1311
+ with pytest.raises(expected_exception):
1312
+ await hook.get_job_details(run_id=RUN_ID, account_id=None)
1313
+
1314
+ assert get_mock.call_count == 2
@@ -24,6 +24,7 @@ import pytest
24
24
 
25
25
  from airflow.exceptions import TaskDeferred
26
26
  from airflow.models import DAG, Connection
27
+ from airflow.providers.common.compat.sdk import timezone
27
28
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
28
29
  from airflow.providers.dbt.cloud.operators.dbt import (
29
30
  DbtCloudGetJobRunArtifactOperator,
@@ -31,7 +32,6 @@ from airflow.providers.dbt.cloud.operators.dbt import (
31
32
  DbtCloudRunJobOperator,
32
33
  )
33
34
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
34
- from airflow.utils import timezone
35
35
 
36
36
  from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
37
37
 
@@ -45,6 +45,7 @@ class TestDbtCloudRunJobTrigger:
45
45
  end_time=self.END_TIME,
46
46
  run_id=self.RUN_ID,
47
47
  account_id=self.ACCOUNT_ID,
48
+ hook_params={"retry_delay": 10},
48
49
  )
49
50
  classpath, kwargs = trigger.serialize()
50
51
  assert classpath == "airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger"
@@ -54,6 +55,7 @@ class TestDbtCloudRunJobTrigger:
54
55
  "conn_id": self.CONN_ID,
55
56
  "end_time": self.END_TIME,
56
57
  "poll_interval": self.POLL_INTERVAL,
58
+ "hook_params": {"retry_delay": 10},
57
59
  }
58
60
 
59
61
  @pytest.mark.asyncio
@@ -157,6 +157,7 @@ class TestGenerateOpenLineageEventsFromDbtCloudRun:
157
157
  "data": {
158
158
  "connection": {
159
159
  "type": "snowflake",
160
+ "name": "conn_name",
160
161
  "details": {
161
162
  "account": "gp21411.us-east-1",
162
163
  "database": "SANDBOX",