apache-airflow-providers-dbt-cloud 3.4.0rc1__tar.gz → 3.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-dbt-cloud might be problematic. Click here for more details.

Files changed (28) hide show
  1. {apache-airflow-providers-dbt-cloud-3.4.0rc1/apache_airflow_providers_dbt_cloud.egg-info → apache_airflow_providers_dbt_cloud-3.4.1}/PKG-INFO +29 -22
  2. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/README.rst +9 -4
  3. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1/airflow/providers/dbt/cloud}/LICENSE +52 -0
  4. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/__init__.py +4 -5
  5. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/get_provider_info.py +4 -2
  6. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/hooks/dbt.py +20 -20
  7. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/operators/dbt.py +7 -10
  8. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/sensors/dbt.py +5 -3
  9. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/triggers/dbt.py +1 -1
  10. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/utils/openlineage.py +2 -2
  11. apache_airflow_providers_dbt_cloud-3.4.1/pyproject.toml +84 -0
  12. apache-airflow-providers-dbt-cloud-3.4.0rc1/MANIFEST.in +0 -31
  13. apache-airflow-providers-dbt-cloud-3.4.0rc1/NOTICE +0 -6
  14. apache-airflow-providers-dbt-cloud-3.4.0rc1/PKG-INFO +0 -133
  15. apache-airflow-providers-dbt-cloud-3.4.0rc1/apache_airflow_providers_dbt_cloud.egg-info/SOURCES.txt +0 -26
  16. apache-airflow-providers-dbt-cloud-3.4.0rc1/apache_airflow_providers_dbt_cloud.egg-info/dependency_links.txt +0 -1
  17. apache-airflow-providers-dbt-cloud-3.4.0rc1/apache_airflow_providers_dbt_cloud.egg-info/entry_points.txt +0 -2
  18. apache-airflow-providers-dbt-cloud-3.4.0rc1/apache_airflow_providers_dbt_cloud.egg-info/not-zip-safe +0 -1
  19. apache-airflow-providers-dbt-cloud-3.4.0rc1/apache_airflow_providers_dbt_cloud.egg-info/requires.txt +0 -10
  20. apache-airflow-providers-dbt-cloud-3.4.0rc1/apache_airflow_providers_dbt_cloud.egg-info/top_level.txt +0 -1
  21. apache-airflow-providers-dbt-cloud-3.4.0rc1/pyproject.toml +0 -179
  22. apache-airflow-providers-dbt-cloud-3.4.0rc1/setup.cfg +0 -65
  23. apache-airflow-providers-dbt-cloud-3.4.0rc1/setup.py +0 -52
  24. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/hooks/__init__.py +0 -0
  25. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/operators/__init__.py +0 -0
  26. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/sensors/__init__.py +0 -0
  27. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/triggers/__init__.py +0 -0
  28. {apache-airflow-providers-dbt-cloud-3.4.0rc1 → apache_airflow_providers_dbt_cloud-3.4.1}/airflow/providers/dbt/cloud/utils/__init__.py +0 -0
@@ -1,19 +1,12 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 3.4.0rc1
4
- Summary: Provider for Apache Airflow. Implements apache-airflow-providers-dbt-cloud package
5
- Home-page: https://airflow.apache.org/
6
- Download-URL: https://archive.apache.org/dist/airflow/providers
7
- Author: Apache Software Foundation
8
- Author-email: dev@airflow.apache.org
9
- License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/
11
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/changelog.html
12
- Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
13
- Project-URL: Source Code, https://github.com/apache/airflow
14
- Project-URL: Slack Chat, https://s.apache.org/airflow-slack
15
- Project-URL: Twitter, https://twitter.com/ApacheAirflow
16
- Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
3
+ Version: 3.4.1
4
+ Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
+ Keywords: airflow-provider,dbt.cloud,airflow,integration
6
+ Author-email: Apache Software Foundation <dev@airflow.apache.org>
7
+ Maintainer-email: Apache Software Foundation <dev@airflow.apache.org>
8
+ Requires-Python: ~=3.8
9
+ Description-Content-Type: text/x-rst
17
10
  Classifier: Development Status :: 5 - Production/Stable
18
11
  Classifier: Environment :: Console
19
12
  Classifier: Environment :: Web Environment
@@ -27,12 +20,21 @@ Classifier: Programming Language :: Python :: 3.9
27
20
  Classifier: Programming Language :: Python :: 3.10
28
21
  Classifier: Programming Language :: Python :: 3.11
29
22
  Classifier: Topic :: System :: Monitoring
30
- Requires-Python: ~=3.8
31
- Description-Content-Type: text/x-rst
23
+ Requires-Dist: aiohttp
24
+ Requires-Dist: apache-airflow-providers-http
25
+ Requires-Dist: apache-airflow>=2.5.0
26
+ Requires-Dist: asgiref
27
+ Requires-Dist: apache-airflow-providers-http ; extra == "http"
28
+ Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
29
+ Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.1/changelog.html
31
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.1
32
+ Project-URL: Slack Chat, https://s.apache.org/airflow-slack
33
+ Project-URL: Source Code, https://github.com/apache/airflow
34
+ Project-URL: Twitter, https://twitter.com/ApacheAirflow
35
+ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
32
36
  Provides-Extra: http
33
37
  Provides-Extra: openlineage
34
- License-File: LICENSE
35
- License-File: NOTICE
36
38
 
37
39
 
38
40
  .. Licensed to the Apache Software Foundation (ASF) under one
@@ -69,10 +71,16 @@ License-File: NOTICE
69
71
  specific language governing permissions and limitations
70
72
  under the License.
71
73
 
74
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
75
+ OVERWRITTEN WHEN PREPARING PACKAGES.
76
+
77
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
78
+ `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
79
+
72
80
 
73
81
  Package ``apache-airflow-providers-dbt-cloud``
74
82
 
75
- Release: ``3.4.0rc1``
83
+ Release: ``3.4.1``
76
84
 
77
85
 
78
86
  `dbt Cloud <https://www.getdbt.com/product/what-is-dbt/>`__
@@ -85,8 +93,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
85
93
  are in ``airflow.providers.dbt.cloud`` python package.
86
94
 
87
95
  You can find package information and changelog for the provider
88
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/>`_.
89
-
96
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.1/>`_.
90
97
 
91
98
  Installation
92
99
  ------------
@@ -130,4 +137,4 @@ Dependent package
130
137
  ============================================================================================================== ===============
131
138
 
132
139
  The changelog for the provider package can be found in the
133
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/changelog.html>`_.
140
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.1/changelog.html>`_.
@@ -33,10 +33,16 @@
33
33
  specific language governing permissions and limitations
34
34
  under the License.
35
35
 
36
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
37
+ OVERWRITTEN WHEN PREPARING PACKAGES.
38
+
39
+ .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
40
+ `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
41
+
36
42
 
37
43
  Package ``apache-airflow-providers-dbt-cloud``
38
44
 
39
- Release: ``3.4.0rc1``
45
+ Release: ``3.4.1``
40
46
 
41
47
 
42
48
  `dbt Cloud <https://www.getdbt.com/product/what-is-dbt/>`__
@@ -49,8 +55,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
49
55
  are in ``airflow.providers.dbt.cloud`` python package.
50
56
 
51
57
  You can find package information and changelog for the provider
52
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/>`_.
53
-
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.1/>`_.
54
59
 
55
60
  Installation
56
61
  ------------
@@ -94,4 +99,4 @@ Dependent package
94
99
  ============================================================================================================== ===============
95
100
 
96
101
  The changelog for the provider package can be found in the
97
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/changelog.html>`_.
102
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.1/changelog.html>`_.
@@ -199,3 +199,55 @@ distributed under the License is distributed on an "AS IS" BASIS,
199
199
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
200
  See the License for the specific language governing permissions and
201
201
  limitations under the License.
202
+
203
+ ============================================================================
204
+ APACHE AIRFLOW SUBCOMPONENTS:
205
+
206
+ The Apache Airflow project contains subcomponents with separate copyright
207
+ notices and license terms. Your use of the source code for the these
208
+ subcomponents is subject to the terms and conditions of the following
209
+ licenses.
210
+
211
+
212
+ ========================================================================
213
+ Third party Apache 2.0 licenses
214
+ ========================================================================
215
+
216
+ The following components are provided under the Apache 2.0 License.
217
+ See project link for details. The text of each license is also included
218
+ at licenses/LICENSE-[project].txt.
219
+
220
+ (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
221
+ (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
222
+ (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
223
+ (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
224
+
225
+ ========================================================================
226
+ MIT licenses
227
+ ========================================================================
228
+
229
+ The following components are provided under the MIT License. See project link for details.
230
+ The text of each license is also included at licenses/LICENSE-[project].txt.
231
+
232
+ (MIT License) jquery v3.5.1 (https://jquery.org/license/)
233
+ (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
234
+ (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
235
+ (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
236
+ (MIT License) dataTables v1.10.25 (https://datatables.net)
237
+ (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
238
+ (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
239
+ (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
240
+ (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
241
+
242
+ ========================================================================
243
+ BSD 3-Clause licenses
244
+ ========================================================================
245
+ The following components are provided under the BSD 3-Clause license. See project links for details.
246
+ The text of each license is also included at licenses/LICENSE-[project].txt.
247
+
248
+ (BSD 3 License) d3 v5.16.0 (https://d3js.org)
249
+ (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
250
+ (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
251
+
252
+ ========================================================================
253
+ See licenses/LICENSES-ui.txt for packages used in `/airflow/www`
@@ -1,4 +1,3 @@
1
- #
2
1
  # Licensed to the Apache Software Foundation (ASF) under one
3
2
  # or more contributor license agreements. See the NOTICE file
4
3
  # distributed with this work for additional information
@@ -19,8 +18,8 @@
19
18
  # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
20
19
  # OVERWRITTEN WHEN PREPARING DOCUMENTATION FOR THE PACKAGES.
21
20
  #
22
- # IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
23
- # `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
21
+ # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
+ # `PROVIDER__INIT__PY_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
24
23
  #
25
24
  from __future__ import annotations
26
25
 
@@ -28,7 +27,7 @@ import packaging.version
28
27
 
29
28
  __all__ = ["__version__"]
30
29
 
31
- __version__ = "3.4.0"
30
+ __version__ = "3.4.1"
32
31
 
33
32
  try:
34
33
  from airflow import __version__ as airflow_version
@@ -39,5 +38,5 @@ if packaging.version.parse(packaging.version.parse(airflow_version).base_version
39
38
  "2.5.0"
40
39
  ):
41
40
  raise RuntimeError(
42
- f"The package `apache-airflow-providers-dbt-cloud:{__version__}` requires Apache Airflow 2.5.0+" # NOQA: E501
41
+ f"The package `apache-airflow-providers-dbt-cloud:{__version__}` needs Apache Airflow 2.5.0+"
43
42
  )
@@ -18,8 +18,8 @@
18
18
  # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
19
  # OVERWRITTEN WHEN PREPARING PACKAGES.
20
20
  #
21
- # IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
22
- # `get_provider_info_TEMPLATE.py.jinja2` IN the `provider_packages` DIRECTORY
21
+ # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
+ # `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
23
23
 
24
24
 
25
25
  def get_provider_info():
@@ -28,7 +28,9 @@ def get_provider_info():
28
28
  "name": "dbt Cloud",
29
29
  "description": "`dbt Cloud <https://www.getdbt.com/product/what-is-dbt/>`__\n",
30
30
  "suspended": False,
31
+ "source-date-epoch": 1700827456,
31
32
  "versions": [
33
+ "3.4.1",
32
34
  "3.4.0",
33
35
  "3.3.0",
34
36
  "3.2.3",
@@ -42,7 +42,7 @@ if TYPE_CHECKING:
42
42
 
43
43
  def fallback_to_default_account(func: Callable) -> Callable:
44
44
  """
45
- Decorator which provides a fallback value for ``account_id``.
45
+ Provide a fallback value for ``account_id``.
46
46
 
47
47
  If the ``account_id`` is None or not passed to the decorated function,
48
48
  the value will be taken from the configured dbt Cloud Airflow Connection.
@@ -113,7 +113,7 @@ class DbtCloudJobRunStatus(Enum):
113
113
 
114
114
  @classmethod
115
115
  def check_is_valid(cls, statuses: int | Sequence[int] | set[int]):
116
- """Validates input statuses are a known value."""
116
+ """Validate input statuses are a known value."""
117
117
  if isinstance(statuses, (Sequence, Set)):
118
118
  for status in statuses:
119
119
  cls(status)
@@ -122,7 +122,7 @@ class DbtCloudJobRunStatus(Enum):
122
122
 
123
123
  @classmethod
124
124
  def is_terminal(cls, status: int) -> bool:
125
- """Checks if the input status is that of a terminal type."""
125
+ """Check if the input status is that of a terminal type."""
126
126
  cls.check_is_valid(statuses=status)
127
127
 
128
128
  return status in cls.TERMINAL_STATUSES.value
@@ -137,7 +137,7 @@ T = TypeVar("T", bound=Any)
137
137
 
138
138
  def provide_account_id(func: T) -> T:
139
139
  """
140
- Decorator which provides a fallback value for ``account_id``.
140
+ Provide a fallback value for ``account_id``.
141
141
 
142
142
  If the ``account_id`` is None or not passed to the decorated function,
143
143
  the value will be taken from the configured dbt Cloud Airflow Connection.
@@ -176,7 +176,7 @@ class DbtCloudHook(HttpHook):
176
176
 
177
177
  @staticmethod
178
178
  def get_ui_field_behaviour() -> dict[str, Any]:
179
- """Builds custom field behavior for the dbt Cloud connection form in the Airflow UI."""
179
+ """Build custom field behavior for the dbt Cloud connection form in the Airflow UI."""
180
180
  return {
181
181
  "hidden_fields": ["schema", "port", "extra"],
182
182
  "relabeling": {"login": "Account ID", "password": "API Token", "host": "Tenant"},
@@ -224,7 +224,7 @@ class DbtCloudHook(HttpHook):
224
224
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
225
225
  ) -> Any:
226
226
  """
227
- Uses Http async call to retrieve metadata for a specific run of a dbt Cloud job.
227
+ Use Http async call to retrieve metadata for a specific run of a dbt Cloud job.
228
228
 
229
229
  :param run_id: The ID of a dbt Cloud job run.
230
230
  :param account_id: Optional. The ID of a dbt Cloud account.
@@ -247,7 +247,7 @@ class DbtCloudHook(HttpHook):
247
247
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
248
248
  ) -> int:
249
249
  """
250
- Retrieves the status for a specific run of a dbt Cloud job.
250
+ Retrieve the status for a specific run of a dbt Cloud job.
251
251
 
252
252
  :param run_id: The ID of a dbt Cloud job run.
253
253
  :param account_id: Optional. The ID of a dbt Cloud account.
@@ -317,7 +317,7 @@ class DbtCloudHook(HttpHook):
317
317
 
318
318
  def list_accounts(self) -> list[Response]:
319
319
  """
320
- Retrieves all of the dbt Cloud accounts the configured API token is authorized to access.
320
+ Retrieve all of the dbt Cloud accounts the configured API token is authorized to access.
321
321
 
322
322
  :return: List of request responses.
323
323
  """
@@ -326,7 +326,7 @@ class DbtCloudHook(HttpHook):
326
326
  @fallback_to_default_account
327
327
  def get_account(self, account_id: int | None = None) -> Response:
328
328
  """
329
- Retrieves metadata for a specific dbt Cloud account.
329
+ Retrieve metadata for a specific dbt Cloud account.
330
330
 
331
331
  :param account_id: Optional. The ID of a dbt Cloud account.
332
332
  :return: The request response.
@@ -336,7 +336,7 @@ class DbtCloudHook(HttpHook):
336
336
  @fallback_to_default_account
337
337
  def list_projects(self, account_id: int | None = None) -> list[Response]:
338
338
  """
339
- Retrieves metadata for all projects tied to a specified dbt Cloud account.
339
+ Retrieve metadata for all projects tied to a specified dbt Cloud account.
340
340
 
341
341
  :param account_id: Optional. The ID of a dbt Cloud account.
342
342
  :return: List of request responses.
@@ -346,7 +346,7 @@ class DbtCloudHook(HttpHook):
346
346
  @fallback_to_default_account
347
347
  def get_project(self, project_id: int, account_id: int | None = None) -> Response:
348
348
  """
349
- Retrieves metadata for a specific project.
349
+ Retrieve metadata for a specific project.
350
350
 
351
351
  :param project_id: The ID of a dbt Cloud project.
352
352
  :param account_id: Optional. The ID of a dbt Cloud account.
@@ -362,7 +362,7 @@ class DbtCloudHook(HttpHook):
362
362
  project_id: int | None = None,
363
363
  ) -> list[Response]:
364
364
  """
365
- Retrieves metadata for all jobs tied to a specified dbt Cloud account.
365
+ Retrieve metadata for all jobs tied to a specified dbt Cloud account.
366
366
 
367
367
  If a ``project_id`` is supplied, only jobs pertaining to this project will be retrieved.
368
368
 
@@ -381,7 +381,7 @@ class DbtCloudHook(HttpHook):
381
381
  @fallback_to_default_account
382
382
  def get_job(self, job_id: int, account_id: int | None = None) -> Response:
383
383
  """
384
- Retrieves metadata for a specific job.
384
+ Retrieve metadata for a specific job.
385
385
 
386
386
  :param job_id: The ID of a dbt Cloud job.
387
387
  :param account_id: Optional. The ID of a dbt Cloud account.
@@ -438,7 +438,7 @@ class DbtCloudHook(HttpHook):
438
438
  order_by: str | None = None,
439
439
  ) -> list[Response]:
440
440
  """
441
- Retrieves metadata for all dbt Cloud job runs for an account.
441
+ Retrieve metadata for all dbt Cloud job runs for an account.
442
442
 
443
443
  If a ``job_definition_id`` is supplied, only metadata for runs of that specific job are pulled.
444
444
 
@@ -465,7 +465,7 @@ class DbtCloudHook(HttpHook):
465
465
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
466
466
  ) -> Response:
467
467
  """
468
- Retrieves metadata for a specific run of a dbt Cloud job.
468
+ Retrieve metadata for a specific run of a dbt Cloud job.
469
469
 
470
470
  :param run_id: The ID of a dbt Cloud job run.
471
471
  :param account_id: Optional. The ID of a dbt Cloud account.
@@ -480,7 +480,7 @@ class DbtCloudHook(HttpHook):
480
480
 
481
481
  def get_job_run_status(self, run_id: int, account_id: int | None = None) -> int:
482
482
  """
483
- Retrieves the status for a specific run of a dbt Cloud job.
483
+ Retrieve the status for a specific run of a dbt Cloud job.
484
484
 
485
485
  :param run_id: The ID of a dbt Cloud job run.
486
486
  :param account_id: Optional. The ID of a dbt Cloud account.
@@ -504,7 +504,7 @@ class DbtCloudHook(HttpHook):
504
504
  timeout: int = 60 * 60 * 24 * 7,
505
505
  ) -> bool:
506
506
  """
507
- Waits for a dbt Cloud job run to match an expected status.
507
+ Wait for a dbt Cloud job run to match an expected status.
508
508
 
509
509
  :param run_id: The ID of a dbt Cloud job run.
510
510
  :param account_id: Optional. The ID of a dbt Cloud account.
@@ -555,7 +555,7 @@ class DbtCloudHook(HttpHook):
555
555
  self, run_id: int, account_id: int | None = None, step: int | None = None
556
556
  ) -> list[Response]:
557
557
  """
558
- Retrieves a list of the available artifact files generated for a completed run of a dbt Cloud job.
558
+ Retrieve a list of the available artifact files generated for a completed run of a dbt Cloud job.
559
559
 
560
560
  By default, this returns artifacts from the last step in the run. To
561
561
  list artifacts from other steps in the run, use the ``step`` parameter.
@@ -576,7 +576,7 @@ class DbtCloudHook(HttpHook):
576
576
  self, run_id: int, path: str, account_id: int | None = None, step: int | None = None
577
577
  ) -> Response:
578
578
  """
579
- Retrieves a list of the available artifact files generated for a completed run of a dbt Cloud job.
579
+ Retrieve a list of the available artifact files generated for a completed run of a dbt Cloud job.
580
580
 
581
581
  By default, this returns artifacts from the last step in the run. To
582
582
  list artifacts from other steps in the run, use the ``step`` parameter.
@@ -604,7 +604,7 @@ class DbtCloudHook(HttpHook):
604
604
  step: int | None = None,
605
605
  ):
606
606
  """
607
- Retrieves a list of chosen artifact files generated for a step in completed run of a dbt Cloud job.
607
+ Retrieve a list of chosen artifact files generated for a step in completed run of a dbt Cloud job.
608
608
 
609
609
  By default, this returns artifacts from the last step in the run.
610
610
  This takes advantage of the asynchronous calls to speed up the retrieval.
@@ -24,7 +24,6 @@ from pathlib import Path
24
24
  from typing import TYPE_CHECKING, Any
25
25
 
26
26
  from airflow.configuration import conf
27
- from airflow.exceptions import AirflowException
28
27
  from airflow.models import BaseOperator, BaseOperatorLink, XCom
29
28
  from airflow.providers.dbt.cloud.hooks.dbt import (
30
29
  DbtCloudHook,
@@ -189,15 +188,13 @@ class DbtCloudRunJobOperator(BaseOperator):
189
188
  return self.run_id
190
189
 
191
190
  def execute_complete(self, context: Context, event: dict[str, Any]) -> int:
192
- """
193
- Callback for when the trigger fires - returns immediately.
194
-
195
- Relies on trigger to throw an exception, otherwise it assumes execution was successful.
196
- """
197
- if event["status"] == "error":
198
- raise AirflowException(event["message"])
199
- self.log.info(event["message"])
191
+ """Execute when the trigger fires - returns immediately."""
200
192
  self.run_id = event["run_id"]
193
+ if event["status"] == "cancelled":
194
+ raise DbtCloudJobRunException(f"Job run {self.run_id} has been cancelled.")
195
+ elif event["status"] == "error":
196
+ raise DbtCloudJobRunException(f"Job run {self.run_id} has failed.")
197
+ self.log.info(event["message"])
201
198
  return int(event["run_id"])
202
199
 
203
200
  def on_kill(self) -> None:
@@ -220,7 +217,7 @@ class DbtCloudRunJobOperator(BaseOperator):
220
217
 
221
218
  def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage:
222
219
  """
223
- Implementing _on_complete because job_run needs to be triggered first in execute method.
220
+ Implement _on_complete because job_run needs to be triggered first in execute method.
224
221
 
225
222
  This should send additional events only if operator `wait_for_termination` is set to True.
226
223
  """
@@ -130,7 +130,8 @@ class DbtCloudJobRunSensor(BaseSensorOperator):
130
130
  )
131
131
 
132
132
  def execute_complete(self, context: Context, event: dict[str, Any]) -> int:
133
- """Callback for when the trigger fires - returns immediately.
133
+ """
134
+ Execute when the trigger fires - returns immediately.
134
135
 
135
136
  This relies on trigger to throw an exception, otherwise it assumes
136
137
  execution was successful.
@@ -144,12 +145,13 @@ class DbtCloudJobRunSensor(BaseSensorOperator):
144
145
  return int(event["run_id"])
145
146
 
146
147
  def get_openlineage_facets_on_complete(self, task_instance) -> OperatorLineage:
147
- """Implementing _on_complete because job_run needs to be triggered first in execute method."""
148
+ """Implement _on_complete because job_run needs to be triggered first in execute method."""
148
149
  return generate_openlineage_events_from_dbt_cloud_run(operator=self, task_instance=task_instance)
149
150
 
150
151
 
151
152
  class DbtCloudJobRunAsyncSensor(DbtCloudJobRunSensor):
152
- """This class is deprecated.
153
+ """
154
+ This class is deprecated.
153
155
 
154
156
  Please use :class:`airflow.providers.dbt.cloud.sensor.dbt.DbtCloudJobRunSensor`
155
157
  with ``deferrable=True``.
@@ -52,7 +52,7 @@ class DbtCloudRunJobTrigger(BaseTrigger):
52
52
  self.poll_interval = poll_interval
53
53
 
54
54
  def serialize(self) -> tuple[str, dict[str, Any]]:
55
- """Serializes DbtCloudRunJobTrigger arguments and classpath."""
55
+ """Serialize DbtCloudRunJobTrigger arguments and classpath."""
56
56
  return (
57
57
  "airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger",
58
58
  {
@@ -32,7 +32,7 @@ def generate_openlineage_events_from_dbt_cloud_run(
32
32
  operator: DbtCloudRunJobOperator | DbtCloudJobRunSensor, task_instance: TaskInstance
33
33
  ) -> OperatorLineage:
34
34
  """
35
- Common method generating OpenLineage events from the DBT Cloud run.
35
+ Generate OpenLineage events from the DBT Cloud run.
36
36
 
37
37
  This function retrieves information about a DBT Cloud run, including the associated job,
38
38
  project, and execution details. It processes the run's artifacts, such as the manifest and run results,
@@ -83,7 +83,7 @@ def generate_openlineage_events_from_dbt_cloud_run(
83
83
  catalog = operator.hook.get_job_run_artifact(operator.run_id, path="catalog.json").json()["data"]
84
84
 
85
85
  async def get_artifacts_for_steps(steps, artifacts):
86
- """Gets artifacts for a list of steps concurrently."""
86
+ """Get artifacts for a list of steps concurrently."""
87
87
  tasks = [
88
88
  operator.hook.get_job_run_artifacts_concurrently(
89
89
  run_id=operator.run_id,
@@ -0,0 +1,84 @@
1
+
2
+ # Licensed to the Apache Software Foundation (ASF) under one
3
+ # or more contributor license agreements. See the NOTICE file
4
+ # distributed with this work for additional information
5
+ # regarding copyright ownership. The ASF licenses this file
6
+ # to you under the Apache License, Version 2.0 (the
7
+ # "License"); you may not use this file except in compliance
8
+ # with the License. You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing,
13
+ # software distributed under the License is distributed on an
14
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ # KIND, either express or implied. See the License for the
16
+ # specific language governing permissions and limitations
17
+ # under the License.
18
+
19
+ # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
20
+ # OVERWRITTEN WHEN PREPARING PACKAGES.
21
+
22
+ # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
23
+ # `pyproject_TEMPLATE.toml.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
24
+ #
25
+ [build-system]
26
+ requires = ["flit_core >=3.2,<4"]
27
+ build-backend = "flit_core.buildapi"
28
+
29
+ [project]
30
+ name = "apache-airflow-providers-dbt-cloud"
31
+ version = "3.4.1"
32
+ description = "Provider package apache-airflow-providers-dbt-cloud for Apache Airflow"
33
+ readme = "README.rst"
34
+ authors = [
35
+ {name="Apache Software Foundation", email="dev@airflow.apache.org"},
36
+ ]
37
+ maintainers = [
38
+ {name="Apache Software Foundation", email="dev@airflow.apache.org"},
39
+ ]
40
+ keywords = [ "airflow-provider", "dbt.cloud", "airflow", "integration" ]
41
+ classifiers = [
42
+ "Development Status :: 5 - Production/Stable",
43
+ "Environment :: Console",
44
+ "Environment :: Web Environment",
45
+ "Intended Audience :: Developers",
46
+ "Intended Audience :: System Administrators",
47
+ "Framework :: Apache Airflow",
48
+ "Framework :: Apache Airflow :: Provider",
49
+ "License :: OSI Approved :: Apache Software License",
50
+ "Programming Language :: Python :: 3.8",
51
+ "Programming Language :: Python :: 3.9",
52
+ "Programming Language :: Python :: 3.10",
53
+ "Programming Language :: Python :: 3.11",
54
+ "Topic :: System :: Monitoring",
55
+ ]
56
+ requires-python = "~=3.8"
57
+ dependencies = [
58
+ "aiohttp",
59
+ "apache-airflow-providers-http",
60
+ "apache-airflow>=2.5.0",
61
+ "asgiref",
62
+ ]
63
+
64
+ [project.urls]
65
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.1"
66
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.1/changelog.html"
67
+ "Bug Tracker" = "https://github.com/apache/airflow/issues"
68
+ "Source Code" = "https://github.com/apache/airflow"
69
+ "Slack Chat" = "https://s.apache.org/airflow-slack"
70
+ "Twitter" = "https://twitter.com/ApacheAirflow"
71
+ "YouTube" = "https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/"
72
+
73
+ [project.entry-points."apache_airflow_provider"]
74
+ provider_info = "airflow.providers.dbt.cloud.get_provider_info:get_provider_info"
75
+ [project.optional-dependencies]
76
+ "http" = [
77
+ "apache-airflow-providers-http",
78
+ ]
79
+ "openlineage" = [
80
+ "apache-airflow-providers-openlineage",
81
+ ]
82
+
83
+ [tool.flit.module]
84
+ name = "airflow.providers.dbt.cloud"
@@ -1,31 +0,0 @@
1
- #
2
- # Licensed to the Apache Software Foundation (ASF) under one
3
- # or more contributor license agreements. See the NOTICE file
4
- # distributed with this work for additional information
5
- # regarding copyright ownership. The ASF licenses this file
6
- # to you under the Apache License, Version 2.0 (the
7
- # "License"); you may not use this file except in compliance
8
- # with the License. You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing,
13
- # software distributed under the License is distributed on an
14
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- # KIND, either express or implied. See the License for the
16
- # specific language governing permissions and limitations
17
- # under the License.
18
-
19
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
20
- # OVERWRITTEN WHEN PREPARING PACKAGES.
21
-
22
- # IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
23
- # `MANIFEST_TEMPLATE.py.jinja2` IN the `provider_packages` DIRECTORY
24
-
25
-
26
-
27
-
28
- include NOTICE
29
- include LICENSE
30
- include CHANGELOG.rst
31
- global-exclude __pycache__ *.pyc
@@ -1,6 +0,0 @@
1
- Apache Airflow
2
- Copyright 2016-2021 The Apache Software Foundation
3
-
4
- This product includes software developed at The Apache Software
5
- Foundation (http://www.apache.org/).
6
- =======================================================================
@@ -1,133 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: apache-airflow-providers-dbt-cloud
3
- Version: 3.4.0rc1
4
- Summary: Provider for Apache Airflow. Implements apache-airflow-providers-dbt-cloud package
5
- Home-page: https://airflow.apache.org/
6
- Download-URL: https://archive.apache.org/dist/airflow/providers
7
- Author: Apache Software Foundation
8
- Author-email: dev@airflow.apache.org
9
- License: Apache License 2.0
10
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/
11
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/changelog.html
12
- Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
13
- Project-URL: Source Code, https://github.com/apache/airflow
14
- Project-URL: Slack Chat, https://s.apache.org/airflow-slack
15
- Project-URL: Twitter, https://twitter.com/ApacheAirflow
16
- Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
17
- Classifier: Development Status :: 5 - Production/Stable
18
- Classifier: Environment :: Console
19
- Classifier: Environment :: Web Environment
20
- Classifier: Intended Audience :: Developers
21
- Classifier: Intended Audience :: System Administrators
22
- Classifier: Framework :: Apache Airflow
23
- Classifier: Framework :: Apache Airflow :: Provider
24
- Classifier: License :: OSI Approved :: Apache Software License
25
- Classifier: Programming Language :: Python :: 3.8
26
- Classifier: Programming Language :: Python :: 3.9
27
- Classifier: Programming Language :: Python :: 3.10
28
- Classifier: Programming Language :: Python :: 3.11
29
- Classifier: Topic :: System :: Monitoring
30
- Requires-Python: ~=3.8
31
- Description-Content-Type: text/x-rst
32
- Provides-Extra: http
33
- Provides-Extra: openlineage
34
- License-File: LICENSE
35
- License-File: NOTICE
36
-
37
-
38
- .. Licensed to the Apache Software Foundation (ASF) under one
39
- or more contributor license agreements. See the NOTICE file
40
- distributed with this work for additional information
41
- regarding copyright ownership. The ASF licenses this file
42
- to you under the Apache License, Version 2.0 (the
43
- "License"); you may not use this file except in compliance
44
- with the License. You may obtain a copy of the License at
45
-
46
- .. http://www.apache.org/licenses/LICENSE-2.0
47
-
48
- .. Unless required by applicable law or agreed to in writing,
49
- software distributed under the License is distributed on an
50
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
51
- KIND, either express or implied. See the License for the
52
- specific language governing permissions and limitations
53
- under the License.
54
-
55
- .. Licensed to the Apache Software Foundation (ASF) under one
56
- or more contributor license agreements. See the NOTICE file
57
- distributed with this work for additional information
58
- regarding copyright ownership. The ASF licenses this file
59
- to you under the Apache License, Version 2.0 (the
60
- "License"); you may not use this file except in compliance
61
- with the License. You may obtain a copy of the License at
62
-
63
- .. http://www.apache.org/licenses/LICENSE-2.0
64
-
65
- .. Unless required by applicable law or agreed to in writing,
66
- software distributed under the License is distributed on an
67
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
68
- KIND, either express or implied. See the License for the
69
- specific language governing permissions and limitations
70
- under the License.
71
-
72
-
73
- Package ``apache-airflow-providers-dbt-cloud``
74
-
75
- Release: ``3.4.0rc1``
76
-
77
-
78
- `dbt Cloud <https://www.getdbt.com/product/what-is-dbt/>`__
79
-
80
-
81
- Provider package
82
- ----------------
83
-
84
- This is a provider package for ``dbt.cloud`` provider. All classes for this provider package
85
- are in ``airflow.providers.dbt.cloud`` python package.
86
-
87
- You can find package information and changelog for the provider
88
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/>`_.
89
-
90
-
91
- Installation
92
- ------------
93
-
94
- You can install this package on top of an existing Airflow 2 installation (see ``Requirements`` below
95
- for the minimum Airflow version supported) via
96
- ``pip install apache-airflow-providers-dbt-cloud``
97
-
98
- The package supports the following python versions: 3.8,3.9,3.10,3.11
99
-
100
- Requirements
101
- ------------
102
-
103
- ================================= ==================
104
- PIP package Version required
105
- ================================= ==================
106
- ``apache-airflow`` ``>=2.5.0``
107
- ``apache-airflow-providers-http``
108
- ``asgiref``
109
- ``aiohttp``
110
- ================================= ==================
111
-
112
- Cross provider package dependencies
113
- -----------------------------------
114
-
115
- Those are dependencies that might be needed in order to use all the features of the package.
116
- You need to install the specified provider packages in order to use them.
117
-
118
- You can install such cross-provider dependencies when installing from PyPI. For example:
119
-
120
- .. code-block:: bash
121
-
122
- pip install apache-airflow-providers-dbt-cloud[http]
123
-
124
-
125
- ============================================================================================================== ===============
126
- Dependent package Extra
127
- ============================================================================================================== ===============
128
- `apache-airflow-providers-http <https://airflow.apache.org/docs/apache-airflow-providers-http>`_ ``http``
129
- `apache-airflow-providers-openlineage <https://airflow.apache.org/docs/apache-airflow-providers-openlineage>`_ ``openlineage``
130
- ============================================================================================================== ===============
131
-
132
- The changelog for the provider package can be found in the
133
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/changelog.html>`_.
@@ -1,26 +0,0 @@
1
- LICENSE
2
- MANIFEST.in
3
- NOTICE
4
- README.rst
5
- pyproject.toml
6
- setup.cfg
7
- setup.py
8
- airflow/providers/dbt/cloud/__init__.py
9
- airflow/providers/dbt/cloud/get_provider_info.py
10
- airflow/providers/dbt/cloud/hooks/__init__.py
11
- airflow/providers/dbt/cloud/hooks/dbt.py
12
- airflow/providers/dbt/cloud/operators/__init__.py
13
- airflow/providers/dbt/cloud/operators/dbt.py
14
- airflow/providers/dbt/cloud/sensors/__init__.py
15
- airflow/providers/dbt/cloud/sensors/dbt.py
16
- airflow/providers/dbt/cloud/triggers/__init__.py
17
- airflow/providers/dbt/cloud/triggers/dbt.py
18
- airflow/providers/dbt/cloud/utils/__init__.py
19
- airflow/providers/dbt/cloud/utils/openlineage.py
20
- apache_airflow_providers_dbt_cloud.egg-info/PKG-INFO
21
- apache_airflow_providers_dbt_cloud.egg-info/SOURCES.txt
22
- apache_airflow_providers_dbt_cloud.egg-info/dependency_links.txt
23
- apache_airflow_providers_dbt_cloud.egg-info/entry_points.txt
24
- apache_airflow_providers_dbt_cloud.egg-info/not-zip-safe
25
- apache_airflow_providers_dbt_cloud.egg-info/requires.txt
26
- apache_airflow_providers_dbt_cloud.egg-info/top_level.txt
@@ -1,2 +0,0 @@
1
- [apache_airflow_provider]
2
- provider_info = airflow.providers.dbt.cloud.get_provider_info:get_provider_info
@@ -1,10 +0,0 @@
1
- aiohttp
2
- apache-airflow-providers-http
3
- apache-airflow>=2.5.0.dev0
4
- asgiref
5
-
6
- [http]
7
- apache-airflow-providers-http
8
-
9
- [openlineage]
10
- apache-airflow-providers-openlineage
@@ -1,179 +0,0 @@
1
- # Licensed to the Apache Software Foundation (ASF) under one
2
- # or more contributor license agreements. See the NOTICE file
3
- # distributed with this work for additional information
4
- # regarding copyright ownership. The ASF licenses this file
5
- # to you under the Apache License, Version 2.0 (the
6
- # "License"); you may not use this file except in compliance
7
- # with the License. You may obtain a copy of the License at
8
- #
9
- # http://www.apache.org/licenses/LICENSE-2.0
10
- #
11
- # Unless required by applicable law or agreed to in writing,
12
- # software distributed under the License is distributed on an
13
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
- # KIND, either express or implied. See the License for the
15
- # specific language governing permissions and limitations
16
- # under the License.
17
- [tool.black]
18
- line-length = 110
19
- target-version = ['py38', 'py39', 'py310', 'py311']
20
-
21
- # Editable installs are currently broken using setuptools 64.0.0 and above. The problem is tracked in
22
- # https://github.com/pypa/setuptools/issues/3548. We're also discussing how we could potentially fix
23
- # this problem on our end in issue https://github.com/apache/airflow/issues/30764. Until then we need
24
- # to use one of the following workarounds locally for editable installs:
25
- # 1) Pin setuptools <= 63.4.3 below in the [build-system] section.
26
- # 2) Include your airflow source code directory in PYTHONPATH.
27
- [build-system]
28
- requires = ['setuptools==67.2.0']
29
- build-backend = "setuptools.build_meta"
30
-
31
- [tool.ruff]
32
- target-version = "py38"
33
- typing-modules = ["airflow.typing_compat"]
34
- line-length = 110
35
- extend-exclude = [
36
- ".eggs",
37
- "airflow/_vendor/*",
38
- "airflow/providers/google/ads/_vendor/*",
39
- # The files generated by stubgen aren't 100% valid syntax it turns out, and we don't ship them, so we can
40
- # ignore them in ruff
41
- "airflow/providers/common/sql/*/*.pyi",
42
- "airflow/migrations/versions/*.py",
43
- "tests/dags/test_imports.py",
44
- ]
45
-
46
- extend-select = [
47
- "I", # Missing required import (auto-fixable)
48
- "UP", # Pyupgrade
49
- "RUF100", # Unused noqa (auto-fixable)
50
-
51
- # implicit single-line string concatenation
52
- "ISC001",
53
- # We ignore more pydocstyle than we enable, so be more selective at what we enable
54
- "D101",
55
- "D106",
56
- "D2",
57
- "D3",
58
- "D400",
59
- # "D401", # Not enabled by ruff, but we don't want it
60
- "D402",
61
- "D403",
62
- "D412",
63
- "D419",
64
- "TCH001", # typing-only-first-party-import
65
- "TCH002", # typing-only-third-party-import
66
- "TID251",
67
- ]
68
- extend-ignore = [
69
- "D203",
70
- "D212",
71
- "D213",
72
- "D214",
73
- "D215",
74
- "E731",
75
- ]
76
-
77
- namespace-packages = ["airflow/providers"]
78
-
79
- [tool.pytest.ini_options]
80
- # * Disable `flaky` plugin for pytest. This plugin conflicts with `rerunfailures` because provide same marker.
81
- # * Disable `nose` builtin plugin for pytest. This feature deprecated in 7.2 and will be removed in pytest>=8
82
- # * And we focus on use native pytest capabilities rather than adopt another frameworks.
83
- addopts = "-rasl --verbosity=2 -p no:flaky -p no:nose --asyncio-mode=strict"
84
- norecursedirs = [
85
- ".eggs",
86
- "airflow",
87
- "tests/dags_with_system_exit",
88
- "tests/test_utils",
89
- "tests/dags_corrupted",
90
- "tests/dags",
91
- "tests/system/providers/google/cloud/dataproc/resources",
92
- "tests/system/providers/google/cloud/gcs/resources",
93
- ]
94
- log_level = "INFO"
95
- filterwarnings = [
96
- "error::pytest.PytestCollectionWarning",
97
- "ignore::DeprecationWarning:flask_appbuilder.filemanager",
98
- "ignore::DeprecationWarning:flask_appbuilder.widgets",
99
- # https://github.com/dpgaspar/Flask-AppBuilder/pull/1940
100
- "ignore::DeprecationWarning:flask_sqlalchemy",
101
- # https://github.com/dpgaspar/Flask-AppBuilder/pull/1903
102
- "ignore::DeprecationWarning:apispec.utils",
103
- ]
104
- python_files = [
105
- "test_*.py",
106
- "example_*.py",
107
- ]
108
- testpaths = [
109
- "tests",
110
- ]
111
-
112
- [tool.ruff.isort]
113
- required-imports = ["from __future__ import annotations"]
114
- combine-as-imports = true
115
-
116
- [tool.ruff.per-file-ignores]
117
- "airflow/models/__init__.py" = ["F401"]
118
- "airflow/models/sqla_models.py" = ["F401"]
119
-
120
- # The test_python.py is needed because adding __future__.annotations breaks runtime checks that are
121
- # needed for the test to work
122
- "tests/decorators/test_python.py" = ["I002"]
123
-
124
- # The Pydantic representations of SqlAlchemy Models are not parsed well with Pydantic
125
- # when __future__.annotations is used so we need to skip them from upgrading
126
- # Pydantic also require models to be imported during execution
127
- "airflow/serialization/pydantic/*.py" = ["I002", "UP007", "TCH001"]
128
-
129
- # Ignore pydoc style from these
130
- "*.pyi" = ["D"]
131
- "tests/*" = ["D"]
132
- "scripts/*" = ["D"]
133
- "dev/*" = ["D"]
134
- "docs/*" = ["D"]
135
- "provider_packages/*" = ["D"]
136
- "docker_tests/*" = ["D"]
137
- "kubernetes_tests/*" = ["D"]
138
- "*/example_dags/*" = ["D"]
139
- "chart/*" = ["D"]
140
-
141
- # All of the modules which have an extra license header (i.e. that we copy from another project) need to
142
- # ignore E402 -- module level import not at top level
143
- "airflow/api/auth/backend/kerberos_auth.py" = ["E402"]
144
- "airflow/security/kerberos.py" = ["E402"]
145
- "airflow/security/utils.py" = ["E402"]
146
- "tests/providers/elasticsearch/log/elasticmock/__init__.py" = ["E402"]
147
- "tests/providers/elasticsearch/log/elasticmock/utilities/__init__.py" = ["E402"]
148
-
149
- [tool.ruff.flake8-tidy-imports.banned-api]
150
- "airflow.AirflowException".msg = "Use airflow.exceptions.AirflowException instead."
151
- "airflow.Dataset".msg = "Use airflow.datasets.Dataset instead."
152
-
153
- [tool.coverage.run]
154
- branch = true
155
- relative_files = true
156
- source = ["airflow"]
157
- omit = [
158
- "airflow/_vendor/**",
159
- "airflow/contrib/**",
160
- "airflow/example_dags/**",
161
- "airflow/migrations/**",
162
- "airflow/providers/**/example_dags/**",
163
- "airflow/www/node_modules/**",
164
- "airflow/providers/google/ads/_vendor/**",
165
- ]
166
-
167
- [tool.coverage.report]
168
- skip_empty = true
169
- exclude_also = [
170
- "def __repr__",
171
- "raise AssertionError",
172
- "raise NotImplementedError",
173
- "if __name__ == .__main__.:",
174
- "@(abc\\.)?abstractmethod",
175
- "@(typing(_extensions)?\\.)?overload",
176
- "if TYPE_CHECKING:"
177
- ]
178
- [tool.ruff.flake8-type-checking]
179
- exempt-modules = ["typing", "typing_extensions"]
@@ -1,65 +0,0 @@
1
- [metadata]
2
- name = apache-airflow-providers-dbt-cloud
3
- summary = Provider for Apache Airflow. Implements apache-airflow-providers-dbt-cloud package
4
- description = Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
- long_description = file: README.rst
6
- long_description_content_type = text/x-rst
7
- author = Apache Software Foundation
8
- author_email = dev@airflow.apache.org
9
- url = https://airflow.apache.org/
10
- download_url = https://archive.apache.org/dist/airflow/providers
11
- license = Apache License 2.0
12
- license_files =
13
- LICENSE
14
- NOTICE
15
- classifiers =
16
- Development Status :: 5 - Production/Stable
17
- Environment :: Console
18
- Environment :: Web Environment
19
- Intended Audience :: Developers
20
- Intended Audience :: System Administrators
21
- Framework :: Apache Airflow
22
- Framework :: Apache Airflow :: Provider
23
- License :: OSI Approved :: Apache Software License
24
- Programming Language :: Python :: 3.8
25
- Programming Language :: Python :: 3.9
26
- Programming Language :: Python :: 3.10
27
- Programming Language :: Python :: 3.11
28
- Topic :: System :: Monitoring
29
- project_urls =
30
- Documentation=https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/
31
- Changelog=https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.4.0/changelog.html
32
- Bug Tracker=https://github.com/apache/airflow/issues
33
- Source Code=https://github.com/apache/airflow
34
- Slack Chat=https://s.apache.org/airflow-slack
35
- Twitter=https://twitter.com/ApacheAirflow
36
- YouTube=https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
37
-
38
- [bdist_wheel]
39
- python_tag = py3
40
-
41
- [options]
42
- zip_safe = False
43
- include_package_data = True
44
- python_requires = ~=3.8
45
- packages = find:
46
- setup_requires =
47
- setuptools
48
- wheel
49
- install_requires =
50
- aiohttp
51
- apache-airflow-providers-http
52
- apache-airflow>=2.5.0.dev0
53
- asgiref
54
-
55
- [options.entry_points]
56
- apache_airflow_provider =
57
- provider_info=airflow.providers.dbt.cloud.get_provider_info:get_provider_info
58
-
59
- [files]
60
- packages = airflow.providers.dbt.cloud
61
-
62
- [egg_info]
63
- tag_build = rc1
64
- tag_date = 0
65
-
@@ -1,52 +0,0 @@
1
- #
2
- # Licensed to the Apache Software Foundation (ASF) under one
3
- # or more contributor license agreements. See the NOTICE file
4
- # distributed with this work for additional information
5
- # regarding copyright ownership. The ASF licenses this file
6
- # to you under the Apache License, Version 2.0 (the
7
- # "License"); you may not use this file except in compliance
8
- # with the License. You may obtain a copy of the License at
9
- #
10
- # http://www.apache.org/licenses/LICENSE-2.0
11
- #
12
- # Unless required by applicable law or agreed to in writing,
13
- # software distributed under the License is distributed on an
14
- # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
- # KIND, either express or implied. See the License for the
16
- # specific language governing permissions and limitations
17
- # under the License.
18
-
19
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
20
- # OVERWRITTEN WHEN PREPARING PACKAGES.
21
- #
22
- # IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE
23
- # `SETUP_TEMPLATE.py.jinja2` IN the `dev/provider_packages` DIRECTORY
24
-
25
- """Setup.py for the apache-airflow-providers-dbt-cloud package."""
26
-
27
- from setuptools import find_namespace_packages, setup
28
-
29
- version = "3.4.0"
30
-
31
-
32
- def do_setup():
33
- """Perform the package apache-airflow-providers-dbt-cloud setup."""
34
- setup(
35
- version=version,
36
- extras_require={
37
- "http": ["apache-airflow-providers-http"],
38
- "openlineage": ["apache-airflow-providers-openlineage"],
39
- },
40
- packages=find_namespace_packages(
41
- include=[
42
- "airflow.providers.dbt.cloud",
43
- "airflow.providers.dbt.cloud.*",
44
- "airflow.providers.dbt.cloud_vendor",
45
- "airflow.providers.dbt.cloud_vendor.*",
46
- ],
47
- ),
48
- )
49
-
50
-
51
- if __name__ == "__main__":
52
- do_setup()