apache-airflow-providers-dbt-cloud 4.4.0rc1__tar.gz → 4.4.1rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-dbt-cloud might be problematic. Click here for more details.

Files changed (54) hide show
  1. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/PKG-INFO +8 -9
  2. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/README.rst +4 -4
  3. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/changelog.rst +26 -0
  4. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/index.rst +3 -3
  5. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/provider.yaml +2 -1
  6. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/pyproject.toml +4 -5
  7. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/__init__.py +1 -1
  8. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/hooks/dbt.py +3 -3
  9. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/operators/dbt.py +5 -9
  10. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/sensors/dbt.py +6 -1
  11. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/version_compat.py +16 -0
  12. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/hooks/test_dbt.py +11 -11
  13. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/operators/test_dbt.py +11 -9
  14. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/sensors/test_dbt.py +9 -9
  15. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/test_data/run_results.json +2 -1
  16. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/.latest-doc-only-change.txt +0 -0
  17. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/commits.rst +0 -0
  18. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/conf.py +0 -0
  19. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/connections.rst +0 -0
  20. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/installing-providers-from-sources.rst +0 -0
  21. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/integration-logos/dbt.png +0 -0
  22. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/operators.rst +0 -0
  23. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/docs/security.rst +0 -0
  24. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/__init__.py +0 -0
  25. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/__init__.py +0 -0
  26. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/__init__.py +0 -0
  27. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/LICENSE +0 -0
  28. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/get_provider_info.py +0 -0
  29. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/hooks/__init__.py +0 -0
  30. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/operators/__init__.py +0 -0
  31. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/sensors/__init__.py +0 -0
  32. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/triggers/__init__.py +0 -0
  33. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/triggers/dbt.py +0 -0
  34. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/utils/__init__.py +0 -0
  35. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/src/airflow/providers/dbt/cloud/utils/openlineage.py +0 -0
  36. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/conftest.py +0 -0
  37. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/system/__init__.py +0 -0
  38. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/system/dbt/__init__.py +0 -0
  39. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/system/dbt/cloud/__init__.py +0 -0
  40. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/system/dbt/cloud/example_dbt_cloud.py +0 -0
  41. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/__init__.py +0 -0
  42. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/__init__.py +0 -0
  43. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/__init__.py +0 -0
  44. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/hooks/__init__.py +0 -0
  45. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/operators/__init__.py +0 -0
  46. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/sensors/__init__.py +0 -0
  47. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/test_data/__init__.py +0 -0
  48. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/test_data/catalog.json +0 -0
  49. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/test_data/job_run.json +0 -0
  50. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/test_data/manifest.json +0 -0
  51. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/triggers/__init__.py +0 -0
  52. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/triggers/test_dbt.py +0 -0
  53. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/utils/__init__.py +0 -0
  54. {apache_airflow_providers_dbt_cloud-4.4.0rc1 → apache_airflow_providers_dbt_cloud-4.4.1rc1}/tests/unit/dbt/cloud/utils/test_openlineage.py +0 -0
@@ -1,11 +1,11 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 4.4.0rc1
3
+ Version: 4.4.1rc1
4
4
  Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
5
  Keywords: airflow-provider,dbt.cloud,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
7
  Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
- Requires-Python: ~=3.9
8
+ Requires-Python: ~=3.10
9
9
  Description-Content-Type: text/x-rst
10
10
  Classifier: Development Status :: 5 - Production/Stable
11
11
  Classifier: Environment :: Console
@@ -15,7 +15,6 @@ Classifier: Intended Audience :: System Administrators
15
15
  Classifier: Framework :: Apache Airflow
16
16
  Classifier: Framework :: Apache Airflow :: Provider
17
17
  Classifier: License :: OSI Approved :: Apache Software License
18
- Classifier: Programming Language :: Python :: 3.9
19
18
  Classifier: Programming Language :: Python :: 3.10
20
19
  Classifier: Programming Language :: Python :: 3.11
21
20
  Classifier: Programming Language :: Python :: 3.12
@@ -27,8 +26,8 @@ Requires-Dist: asgiref>=2.3.0
27
26
  Requires-Dist: aiohttp>=3.9.2
28
27
  Requires-Dist: apache-airflow-providers-openlineage>=2.3.0rc1 ; extra == "openlineage"
29
28
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/changelog.html
31
- Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0
29
+ Project-URL: Changelog, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.1/changelog.html
30
+ Project-URL: Documentation, https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.1
32
31
  Project-URL: Mastodon, https://fosstodon.org/@airflow
33
32
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
34
33
  Project-URL: Source Code, https://github.com/apache/airflow
@@ -60,7 +59,7 @@ Provides-Extra: openlineage
60
59
 
61
60
  Package ``apache-airflow-providers-dbt-cloud``
62
61
 
63
- Release: ``4.4.0``
62
+ Release: ``4.4.1``
64
63
 
65
64
 
66
65
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -73,7 +72,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
73
72
  are in ``airflow.providers.dbt.cloud`` python package.
74
73
 
75
74
  You can find package information and changelog for the provider
76
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/>`_.
75
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.1/>`_.
77
76
 
78
77
  Installation
79
78
  ------------
@@ -82,7 +81,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
82
81
  for the minimum Airflow version supported) via
83
82
  ``pip install apache-airflow-providers-dbt-cloud``
84
83
 
85
- The package supports the following python versions: 3.9,3.10,3.11,3.12
84
+ The package supports the following python versions: 3.10,3.11,3.12
86
85
 
87
86
  Requirements
88
87
  ------------
@@ -119,5 +118,5 @@ Dependent package
119
118
  ================================================================================================================== =================
120
119
 
121
120
  The changelog for the provider package can be found in the
122
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/changelog.html>`_.
121
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.1/changelog.html>`_.
123
122
 
@@ -23,7 +23,7 @@
23
23
 
24
24
  Package ``apache-airflow-providers-dbt-cloud``
25
25
 
26
- Release: ``4.4.0``
26
+ Release: ``4.4.1``
27
27
 
28
28
 
29
29
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -36,7 +36,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
36
36
  are in ``airflow.providers.dbt.cloud`` python package.
37
37
 
38
38
  You can find package information and changelog for the provider
39
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/>`_.
39
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.1/>`_.
40
40
 
41
41
  Installation
42
42
  ------------
@@ -45,7 +45,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
45
45
  for the minimum Airflow version supported) via
46
46
  ``pip install apache-airflow-providers-dbt-cloud``
47
47
 
48
- The package supports the following python versions: 3.9,3.10,3.11,3.12
48
+ The package supports the following python versions: 3.10,3.11,3.12
49
49
 
50
50
  Requirements
51
51
  ------------
@@ -82,4 +82,4 @@ Dependent package
82
82
  ================================================================================================================== =================
83
83
 
84
84
  The changelog for the provider package can be found in the
85
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/changelog.html>`_.
85
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.1/changelog.html>`_.
@@ -28,6 +28,32 @@
28
28
  Changelog
29
29
  ---------
30
30
 
31
+ 4.4.1
32
+ .....
33
+
34
+ Bug Fixes
35
+ ~~~~~~~~~
36
+
37
+ * ``Converting int account IDs to str in DBT Cloud connections (#51957)``
38
+
39
+ Misc
40
+ ~~~~
41
+
42
+ * ``Move 'BaseHook' implementation to task SDK (#51873)``
43
+ * ``Disable UP038 ruff rule and revert mandatory 'X | Y' in insintance checks (#52644)``
44
+ * ``Replace 'models.BaseOperator' to Task SDK one for DBT & Databricks (#52377)``
45
+ * ``Drop support for Python 3.9 (#52072)``
46
+ * ``Use BaseSensorOperator from task sdk in providers (#52296)``
47
+ * ``Add deprecation to 'airflow/sensors/base.py' (#52249)``
48
+ * ``Adding 'invocation_id' to run-results as expected by Openlineage (#51916)``
49
+
50
+ .. Below changes are excluded from the changelog. Move them to
51
+ appropriate section above if needed. Do not delete the lines(!):
52
+ * ``Make sure all test version imports come from test_common (#52425)``
53
+ * ``removed usage of pytest.mark.db_test from dbt tests (#52031)``
54
+ * ``Introducing fixture to create 'Connections' without DB in provider tests (#51930)``
55
+ * ``Switch the Supervisor/task process from line-based to length-prefixed (#51699)``
56
+
31
57
  4.4.0
32
58
  .....
33
59
 
@@ -81,7 +81,7 @@ apache-airflow-providers-dbt-cloud package
81
81
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
82
82
 
83
83
 
84
- Release: 4.4.0
84
+ Release: 4.4.1
85
85
 
86
86
  Provider package
87
87
  ----------------
@@ -138,5 +138,5 @@ Downloading official packages
138
138
  You can download officially released packages and verify their checksums and signatures from the
139
139
  `Official Apache Download site <https://downloads.apache.org/airflow/providers/>`_
140
140
 
141
- * `The apache-airflow-providers-dbt-cloud 4.4.0 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0.tar.gz.sha512>`__)
142
- * `The apache-airflow-providers-dbt-cloud 4.4.0 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.0-py3-none-any.whl.sha512>`__)
141
+ * `The apache-airflow-providers-dbt-cloud 4.4.1 sdist package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.1.tar.gz>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.1.tar.gz.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.1.tar.gz.sha512>`__)
142
+ * `The apache-airflow-providers-dbt-cloud 4.4.1 wheel package <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.1-py3-none-any.whl>`_ (`asc <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.1-py3-none-any.whl.asc>`__, `sha512 <https://downloads.apache.org/airflow/providers/apache_airflow_providers_dbt_cloud-4.4.1-py3-none-any.whl.sha512>`__)
@@ -22,12 +22,13 @@ description: |
22
22
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
23
23
 
24
24
  state: ready
25
- source-date-epoch: 1747132505
25
+ source-date-epoch: 1751473108
26
26
  # Note that those versions are maintained by release manager - do not update them manually
27
27
  # with the exception of case where other provider in sources has >= new provider version.
28
28
  # In such case adding >= NEW_VERSION and bumping to NEW_VERSION in a provider have
29
29
  # to be done in the same PR
30
30
  versions:
31
+ - 4.4.1
31
32
  - 4.4.0
32
33
  - 4.3.3
33
34
  - 4.3.2
@@ -25,7 +25,7 @@ build-backend = "flit_core.buildapi"
25
25
 
26
26
  [project]
27
27
  name = "apache-airflow-providers-dbt-cloud"
28
- version = "4.4.0rc1"
28
+ version = "4.4.1rc1"
29
29
  description = "Provider package apache-airflow-providers-dbt-cloud for Apache Airflow"
30
30
  readme = "README.rst"
31
31
  authors = [
@@ -44,13 +44,12 @@ classifiers = [
44
44
  "Framework :: Apache Airflow",
45
45
  "Framework :: Apache Airflow :: Provider",
46
46
  "License :: OSI Approved :: Apache Software License",
47
- "Programming Language :: Python :: 3.9",
48
47
  "Programming Language :: Python :: 3.10",
49
48
  "Programming Language :: Python :: 3.11",
50
49
  "Programming Language :: Python :: 3.12",
51
50
  "Topic :: System :: Monitoring",
52
51
  ]
53
- requires-python = "~=3.9"
52
+ requires-python = "~=3.10"
54
53
 
55
54
  # The dependencies should be modified in place in the generated file.
56
55
  # Any change in the dependencies is preserved when the file is regenerated
@@ -109,8 +108,8 @@ apache-airflow-providers-common-sql = {workspace = true}
109
108
  apache-airflow-providers-standard = {workspace = true}
110
109
 
111
110
  [project.urls]
112
- "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0"
113
- "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.0/changelog.html"
111
+ "Documentation" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.1"
112
+ "Changelog" = "https://airflow.staged.apache.org/docs/apache-airflow-providers-dbt-cloud/4.4.1/changelog.html"
114
113
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
115
114
  "Source Code" = "https://github.com/apache/airflow"
116
115
  "Slack Chat" = "https://s.apache.org/airflow-slack"
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.4.0"
32
+ __version__ = "4.4.1"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.10.0"
@@ -20,11 +20,11 @@ import asyncio
20
20
  import json
21
21
  import time
22
22
  import warnings
23
- from collections.abc import Sequence
23
+ from collections.abc import Callable, Sequence
24
24
  from enum import Enum
25
25
  from functools import cached_property, wraps
26
26
  from inspect import signature
27
- from typing import TYPE_CHECKING, Any, Callable, TypedDict, TypeVar, cast
27
+ from typing import TYPE_CHECKING, Any, TypedDict, TypeVar, cast
28
28
 
29
29
  import aiohttp
30
30
  from asgiref.sync import sync_to_async
@@ -283,7 +283,7 @@ class DbtCloudHook(HttpHook):
283
283
  if not _connection.password:
284
284
  raise AirflowException("An API token is required to connect to dbt Cloud.")
285
285
 
286
- return _connection
286
+ return _connection # type: ignore[return-value]
287
287
 
288
288
  def get_conn(self, *args, **kwargs) -> Session:
289
289
  tenant = self._get_tenant_domain(self.connection)
@@ -24,8 +24,6 @@ from pathlib import Path
24
24
  from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.configuration import conf
27
- from airflow.models import BaseOperator
28
- from airflow.providers.common.compat.version_compat import AIRFLOW_V_3_0_PLUS
29
27
  from airflow.providers.dbt.cloud.hooks.dbt import (
30
28
  DbtCloudHook,
31
29
  DbtCloudJobRunException,
@@ -34,18 +32,16 @@ from airflow.providers.dbt.cloud.hooks.dbt import (
34
32
  )
35
33
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
36
34
  from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_events_from_dbt_cloud_run
35
+ from airflow.providers.dbt.cloud.version_compat import (
36
+ BaseOperator,
37
+ BaseOperatorLink,
38
+ XCom,
39
+ )
37
40
 
38
41
  if TYPE_CHECKING:
39
42
  from airflow.providers.openlineage.extractors import OperatorLineage
40
43
  from airflow.utils.context import Context
41
44
 
42
- if AIRFLOW_V_3_0_PLUS:
43
- from airflow.sdk import BaseOperatorLink
44
- from airflow.sdk.execution_time.xcom import XCom
45
- else:
46
- from airflow.models import XCom # type: ignore[no-redef]
47
- from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
48
-
49
45
 
50
46
  class DbtCloudRunJobOperatorLink(BaseOperatorLink):
51
47
  """Allows users to monitor the triggered job run directly in dbt Cloud."""
@@ -25,7 +25,12 @@ from airflow.exceptions import AirflowException
25
25
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
26
26
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
27
27
  from airflow.providers.dbt.cloud.utils.openlineage import generate_openlineage_events_from_dbt_cloud_run
28
- from airflow.sensors.base import BaseSensorOperator
28
+ from airflow.providers.dbt.cloud.version_compat import AIRFLOW_V_3_0_PLUS
29
+
30
+ if AIRFLOW_V_3_0_PLUS:
31
+ from airflow.sdk import BaseSensorOperator
32
+ else:
33
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
29
34
 
30
35
  if TYPE_CHECKING:
31
36
  from airflow.providers.openlineage.extractors import OperatorLineage
@@ -33,3 +33,19 @@ def get_base_airflow_version_tuple() -> tuple[int, int, int]:
33
33
 
34
34
 
35
35
  AIRFLOW_V_3_0_PLUS = get_base_airflow_version_tuple() >= (3, 0, 0)
36
+
37
+ if AIRFLOW_V_3_0_PLUS:
38
+ from airflow.sdk import BaseOperator, BaseOperatorLink, BaseSensorOperator
39
+ from airflow.sdk.execution_time.xcom import XCom
40
+ else:
41
+ from airflow.models import BaseOperator, XCom
42
+ from airflow.models.baseoperatorlink import BaseOperatorLink # type: ignore[no-redef]
43
+ from airflow.sensors.base import BaseSensorOperator # type: ignore[no-redef]
44
+
45
+ __all__ = [
46
+ "AIRFLOW_V_3_0_PLUS",
47
+ "BaseOperator",
48
+ "BaseSensorOperator",
49
+ "BaseOperatorLink",
50
+ "XCom",
51
+ ]
@@ -36,9 +36,7 @@ from airflow.providers.dbt.cloud.hooks.dbt import (
36
36
  TokenAuth,
37
37
  fallback_to_default_account,
38
38
  )
39
- from airflow.utils import db, timezone
40
-
41
- pytestmark = pytest.mark.db_test
39
+ from airflow.utils import timezone
42
40
 
43
41
  ACCOUNT_ID_CONN = "account_id_conn"
44
42
  NO_ACCOUNT_ID_CONN = "no_account_id_conn"
@@ -151,12 +149,14 @@ class TestDbtCloudJobRunStatus:
151
149
 
152
150
 
153
151
  class TestDbtCloudHook:
154
- def setup_class(self):
152
+ # TODO: Potential performance issue, converted setup_class to a setup_connections function level fixture
153
+ @pytest.fixture(autouse=True)
154
+ def setup_connections(self, create_connection_without_db):
155
155
  # Connection with ``account_id`` specified
156
156
  account_id_conn = Connection(
157
157
  conn_id=ACCOUNT_ID_CONN,
158
158
  conn_type=DbtCloudHook.conn_type,
159
- login=DEFAULT_ACCOUNT_ID,
159
+ login=str(DEFAULT_ACCOUNT_ID),
160
160
  password=TOKEN,
161
161
  )
162
162
 
@@ -171,7 +171,7 @@ class TestDbtCloudHook:
171
171
  host_conn = Connection(
172
172
  conn_id=SINGLE_TENANT_CONN,
173
173
  conn_type=DbtCloudHook.conn_type,
174
- login=DEFAULT_ACCOUNT_ID,
174
+ login=str(DEFAULT_ACCOUNT_ID),
175
175
  password=TOKEN,
176
176
  host=SINGLE_TENANT_DOMAIN,
177
177
  )
@@ -180,16 +180,16 @@ class TestDbtCloudHook:
180
180
  proxy_conn = Connection(
181
181
  conn_id=PROXY_CONN,
182
182
  conn_type=DbtCloudHook.conn_type,
183
- login=DEFAULT_ACCOUNT_ID,
183
+ login=str(DEFAULT_ACCOUNT_ID),
184
184
  password=TOKEN,
185
185
  host=SINGLE_TENANT_DOMAIN,
186
186
  extra=EXTRA_PROXIES,
187
187
  )
188
188
 
189
- db.merge_conn(account_id_conn)
190
- db.merge_conn(no_account_id_conn)
191
- db.merge_conn(host_conn)
192
- db.merge_conn(proxy_conn)
189
+ create_connection_without_db(account_id_conn)
190
+ create_connection_without_db(no_account_id_conn)
191
+ create_connection_without_db(host_conn)
192
+ create_connection_without_db(proxy_conn)
193
193
 
194
194
  @pytest.mark.parametrize(
195
195
  argnames="conn_id, url",
@@ -31,14 +31,13 @@ from airflow.providers.dbt.cloud.operators.dbt import (
31
31
  DbtCloudRunJobOperator,
32
32
  )
33
33
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
34
- from airflow.providers.dbt.cloud.version_compat import AIRFLOW_V_3_0_PLUS
35
- from airflow.utils import db, timezone
34
+ from airflow.utils import timezone
35
+
36
+ from tests_common.test_utils.version_compat import AIRFLOW_V_3_0_PLUS
36
37
 
37
38
  if AIRFLOW_V_3_0_PLUS:
38
39
  from airflow.sdk.execution_time.comms import XComResult
39
40
 
40
- pytestmark = pytest.mark.db_test
41
-
42
41
  DEFAULT_DATE = timezone.datetime(2021, 1, 1)
43
42
  TASK_ID = "run_job_op"
44
43
  ACCOUNT_ID_CONN = "account_id_conn"
@@ -97,12 +96,14 @@ def mock_response_json(response: dict):
97
96
  return run_response
98
97
 
99
98
 
100
- def setup_module():
99
+ # TODO: Potential performance issue, converted setup_module to a setup_connections function level fixture
100
+ @pytest.fixture(autouse=True)
101
+ def setup_connections(create_connection_without_db):
101
102
  # Connection with ``account_id`` specified
102
103
  conn_account_id = Connection(
103
104
  conn_id=ACCOUNT_ID_CONN,
104
105
  conn_type=DbtCloudHook.conn_type,
105
- login=DEFAULT_ACCOUNT_ID,
106
+ login=str(DEFAULT_ACCOUNT_ID),
106
107
  password=TOKEN,
107
108
  )
108
109
 
@@ -113,8 +114,8 @@ def setup_module():
113
114
  password=TOKEN,
114
115
  )
115
116
 
116
- db.merge_conn(conn_account_id)
117
- db.merge_conn(conn_no_account_id)
117
+ create_connection_without_db(conn_account_id)
118
+ create_connection_without_db(conn_no_account_id)
118
119
 
119
120
 
120
121
  class TestDbtCloudRunJobOperator:
@@ -645,6 +646,7 @@ class TestDbtCloudRunJobOperator:
645
646
  [(ACCOUNT_ID_CONN, None), (NO_ACCOUNT_ID_CONN, ACCOUNT_ID)],
646
647
  ids=["default_account", "explicit_account"],
647
648
  )
649
+ @pytest.mark.db_test
648
650
  def test_run_job_operator_link(
649
651
  self, conn_id, account_id, create_task_instance_of_operator, request, mock_supervisor_comms
650
652
  ):
@@ -665,7 +667,7 @@ class TestDbtCloudRunJobOperator:
665
667
  ti.xcom_push(key="job_run_url", value=_run_response["data"]["href"])
666
668
 
667
669
  if AIRFLOW_V_3_0_PLUS and mock_supervisor_comms:
668
- mock_supervisor_comms.get_message.return_value = XComResult(
670
+ mock_supervisor_comms.send.return_value = XComResult(
669
671
  key="job_run_url",
670
672
  value=EXPECTED_JOB_RUN_OP_EXTRA_LINK.format(
671
673
  account_id=account_id or DEFAULT_ACCOUNT_ID,
@@ -29,10 +29,6 @@ from airflow.models.connection import Connection
29
29
  from airflow.providers.dbt.cloud.hooks.dbt import DbtCloudHook, DbtCloudJobRunException, DbtCloudJobRunStatus
30
30
  from airflow.providers.dbt.cloud.sensors.dbt import DbtCloudJobRunSensor
31
31
  from airflow.providers.dbt.cloud.triggers.dbt import DbtCloudRunJobTrigger
32
- from airflow.utils import db
33
-
34
- pytestmark = pytest.mark.db_test
35
-
36
32
 
37
33
  ACCOUNT_ID = 11111
38
34
  RUN_ID = 5555
@@ -45,6 +41,15 @@ class TestDbtCloudJobRunSensor:
45
41
  DBT_RUN_ID = 1234
46
42
  TIMEOUT = 300
47
43
 
44
+ # TODO: Potential performance issue, converted setup_class to a setup_connections function level fixture
45
+ @pytest.fixture(autouse=True)
46
+ def setup_connections(self, create_connection_without_db):
47
+ # Connection
48
+ conn = Connection(
49
+ conn_id="dbt", conn_type=DbtCloudHook.conn_type, login=str(ACCOUNT_ID), password=TOKEN
50
+ )
51
+ create_connection_without_db(conn)
52
+
48
53
  def setup_class(self):
49
54
  self.sensor = DbtCloudJobRunSensor(
50
55
  task_id="job_run_sensor",
@@ -55,11 +60,6 @@ class TestDbtCloudJobRunSensor:
55
60
  poke_interval=15,
56
61
  )
57
62
 
58
- # Connection
59
- conn = Connection(conn_id="dbt", conn_type=DbtCloudHook.conn_type, login=ACCOUNT_ID, password=TOKEN)
60
-
61
- db.merge_conn(conn)
62
-
63
63
  def test_init(self):
64
64
  assert self.sensor.dbt_cloud_conn_id == "dbt"
65
65
  assert self.sensor.run_id == RUN_ID
@@ -1,7 +1,8 @@
1
1
  {
2
2
  "metadata": {
3
3
  "dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v4.json",
4
- "dbt_version": "1.6.1"
4
+ "dbt_version": "1.6.1",
5
+ "invocation_id": "188471607"
5
6
  },
6
7
  "results": [
7
8
  {