apache-airflow-providers-dbt-cloud 3.2.0rc1__py3-none-any.whl → 3.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of apache-airflow-providers-dbt-cloud might be problematic. Click here for more details.
- airflow/providers/dbt/cloud/__init__.py +8 -5
- airflow/providers/dbt/cloud/get_provider_info.py +2 -4
- airflow/providers/dbt/cloud/hooks/dbt.py +3 -4
- airflow/providers/dbt/cloud/sensors/dbt.py +14 -13
- airflow/providers/dbt/cloud/triggers/dbt.py +5 -9
- {apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info → apache_airflow_providers_dbt_cloud-3.2.1.dist-info}/METADATA +30 -8
- apache_airflow_providers_dbt_cloud-3.2.1.dist-info/RECORD +17 -0
- apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info/RECORD +0 -17
- {apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info → apache_airflow_providers_dbt_cloud-3.2.1.dist-info}/LICENSE +0 -0
- {apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info → apache_airflow_providers_dbt_cloud-3.2.1.dist-info}/NOTICE +0 -0
- {apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info → apache_airflow_providers_dbt_cloud-3.2.1.dist-info}/WHEEL +0 -0
- {apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info → apache_airflow_providers_dbt_cloud-3.2.1.dist-info}/entry_points.txt +0 -0
- {apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info → apache_airflow_providers_dbt_cloud-3.2.1.dist-info}/top_level.txt +0 -0
|
@@ -26,13 +26,16 @@ from __future__ import annotations
|
|
|
26
26
|
|
|
27
27
|
import packaging.version
|
|
28
28
|
|
|
29
|
-
|
|
29
|
+
__all__ = ["__version__"]
|
|
30
30
|
|
|
31
|
-
|
|
31
|
+
__version__ = "3.2.1"
|
|
32
32
|
|
|
33
|
-
|
|
33
|
+
try:
|
|
34
|
+
from airflow import __version__ as airflow_version
|
|
35
|
+
except ImportError:
|
|
36
|
+
from airflow.version import version as airflow_version
|
|
34
37
|
|
|
35
|
-
if packaging.version.parse(
|
|
38
|
+
if packaging.version.parse(airflow_version) < packaging.version.parse("2.4.0"):
|
|
36
39
|
raise RuntimeError(
|
|
37
|
-
f"The package `apache-airflow-providers-dbt-cloud:{
|
|
40
|
+
f"The package `apache-airflow-providers-dbt-cloud:{__version__}` requires Apache Airflow 2.4.0+" # NOQA: E501
|
|
38
41
|
)
|
|
@@ -29,6 +29,7 @@ def get_provider_info():
|
|
|
29
29
|
"description": "`dbt Cloud <https://www.getdbt.com/product/what-is-dbt/>`__\n",
|
|
30
30
|
"suspended": False,
|
|
31
31
|
"versions": [
|
|
32
|
+
"3.2.1",
|
|
32
33
|
"3.2.0",
|
|
33
34
|
"3.1.1",
|
|
34
35
|
"3.1.0",
|
|
@@ -62,10 +63,7 @@ def get_provider_info():
|
|
|
62
63
|
{"integration-name": "dbt Cloud", "python-modules": ["airflow.providers.dbt.cloud.hooks.dbt"]}
|
|
63
64
|
],
|
|
64
65
|
"triggers": [
|
|
65
|
-
{
|
|
66
|
-
"integration-name": "dbt Cloud",
|
|
67
|
-
"class-names": ["airflow.providers.dbt.cloud.triggers.dbt.DbtCloudRunJobTrigger"],
|
|
68
|
-
}
|
|
66
|
+
{"integration-name": "dbt Cloud", "python-modules": ["airflow.providers.dbt.cloud.triggers.dbt"]}
|
|
69
67
|
],
|
|
70
68
|
"connection-types": [
|
|
71
69
|
{
|
|
@@ -19,7 +19,7 @@ from __future__ import annotations
|
|
|
19
19
|
import json
|
|
20
20
|
import time
|
|
21
21
|
from enum import Enum
|
|
22
|
-
from functools import wraps
|
|
22
|
+
from functools import cached_property, wraps
|
|
23
23
|
from inspect import signature
|
|
24
24
|
from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, TypeVar, cast
|
|
25
25
|
|
|
@@ -29,7 +29,6 @@ from asgiref.sync import sync_to_async
|
|
|
29
29
|
from requests.auth import AuthBase
|
|
30
30
|
from requests.sessions import Session
|
|
31
31
|
|
|
32
|
-
from airflow.compat.functools import cached_property
|
|
33
32
|
from airflow.exceptions import AirflowException
|
|
34
33
|
from airflow.providers.http.hooks.http import HttpHook
|
|
35
34
|
from airflow.typing_compat import TypedDict
|
|
@@ -192,7 +191,7 @@ class DbtCloudHook(HttpHook):
|
|
|
192
191
|
tenant: str, endpoint: str, include_related: list[str] | None = None
|
|
193
192
|
) -> tuple[str, dict[str, Any]]:
|
|
194
193
|
"""
|
|
195
|
-
Form URL from base url and endpoint url
|
|
194
|
+
Form URL from base url and endpoint url.
|
|
196
195
|
|
|
197
196
|
:param tenant: The tenant domain name which is need to be replaced in base url.
|
|
198
197
|
:param endpoint: Endpoint url to be requested.
|
|
@@ -206,7 +205,7 @@ class DbtCloudHook(HttpHook):
|
|
|
206
205
|
return url, data
|
|
207
206
|
|
|
208
207
|
async def get_headers_tenants_from_connection(self) -> tuple[dict[str, Any], str]:
|
|
209
|
-
"""Get Headers, tenants from the connection details"""
|
|
208
|
+
"""Get Headers, tenants from the connection details."""
|
|
210
209
|
headers: dict[str, Any] = {}
|
|
211
210
|
connection: Connection = await sync_to_async(self.get_connection)(self.dbt_cloud_conn_id)
|
|
212
211
|
tenant = self._get_tenant_domain(connection)
|
|
@@ -30,8 +30,7 @@ if TYPE_CHECKING:
|
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
class DbtCloudJobRunSensor(BaseSensorOperator):
|
|
33
|
-
"""
|
|
34
|
-
Checks the status of a dbt Cloud job run.
|
|
33
|
+
"""Checks the status of a dbt Cloud job run.
|
|
35
34
|
|
|
36
35
|
.. seealso::
|
|
37
36
|
For more information on how to use this sensor, take a look at the guide:
|
|
@@ -91,9 +90,11 @@ class DbtCloudJobRunSensor(BaseSensorOperator):
|
|
|
91
90
|
return job_run_status == DbtCloudJobRunStatus.SUCCESS.value
|
|
92
91
|
|
|
93
92
|
def execute(self, context: Context) -> None:
|
|
94
|
-
"""
|
|
95
|
-
|
|
96
|
-
|
|
93
|
+
"""Run the sensor.
|
|
94
|
+
|
|
95
|
+
Depending on whether ``deferrable`` is set, this would either defer to
|
|
96
|
+
the triggerer or poll for states of the job run, until the job reaches a
|
|
97
|
+
failure state or success state.
|
|
97
98
|
"""
|
|
98
99
|
if not self.deferrable:
|
|
99
100
|
super().execute(context)
|
|
@@ -113,10 +114,10 @@ class DbtCloudJobRunSensor(BaseSensorOperator):
|
|
|
113
114
|
)
|
|
114
115
|
|
|
115
116
|
def execute_complete(self, context: Context, event: dict[str, Any]) -> int:
|
|
116
|
-
"""
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
successful.
|
|
117
|
+
"""Callback for when the trigger fires - returns immediately.
|
|
118
|
+
|
|
119
|
+
This relies on trigger to throw an exception, otherwise it assumes
|
|
120
|
+
execution was successful.
|
|
120
121
|
"""
|
|
121
122
|
if event["status"] in ["error", "cancelled"]:
|
|
122
123
|
raise AirflowException("Error in dbt: " + event["message"])
|
|
@@ -125,10 +126,10 @@ class DbtCloudJobRunSensor(BaseSensorOperator):
|
|
|
125
126
|
|
|
126
127
|
|
|
127
128
|
class DbtCloudJobRunAsyncSensor(DbtCloudJobRunSensor):
|
|
128
|
-
"""
|
|
129
|
-
|
|
130
|
-
Please use
|
|
131
|
-
|
|
129
|
+
"""This class is deprecated.
|
|
130
|
+
|
|
131
|
+
Please use :class:`airflow.providers.dbt.cloud.sensor.dbt.DbtCloudJobRunSensor`
|
|
132
|
+
with ``deferrable=True``.
|
|
132
133
|
"""
|
|
133
134
|
|
|
134
135
|
def __init__(self, **kwargs: Any) -> None:
|
|
@@ -25,9 +25,9 @@ from airflow.triggers.base import BaseTrigger, TriggerEvent
|
|
|
25
25
|
|
|
26
26
|
|
|
27
27
|
class DbtCloudRunJobTrigger(BaseTrigger):
|
|
28
|
-
"""
|
|
29
|
-
|
|
30
|
-
|
|
28
|
+
"""Trigger to make an HTTP call to dbt and get the status for the job.
|
|
29
|
+
|
|
30
|
+
This is done with run id in polling interval of time.
|
|
31
31
|
|
|
32
32
|
:param conn_id: The connection identifier for connecting to Dbt.
|
|
33
33
|
:param run_id: The ID of a dbt Cloud job.
|
|
@@ -65,7 +65,7 @@ class DbtCloudRunJobTrigger(BaseTrigger):
|
|
|
65
65
|
)
|
|
66
66
|
|
|
67
67
|
async def run(self) -> AsyncIterator[TriggerEvent]:
|
|
68
|
-
"""Make async connection to Dbt, polls for the pipeline run status"""
|
|
68
|
+
"""Make async connection to Dbt, polls for the pipeline run status."""
|
|
69
69
|
hook = DbtCloudHook(self.conn_id)
|
|
70
70
|
try:
|
|
71
71
|
while await self.is_still_running(hook):
|
|
@@ -108,11 +108,7 @@ class DbtCloudRunJobTrigger(BaseTrigger):
|
|
|
108
108
|
yield TriggerEvent({"status": "error", "message": str(e), "run_id": self.run_id})
|
|
109
109
|
|
|
110
110
|
async def is_still_running(self, hook: DbtCloudHook) -> bool:
|
|
111
|
-
"""
|
|
112
|
-
Async function to check whether the job is submitted via async API is in
|
|
113
|
-
running state and returns True if it is still running else
|
|
114
|
-
return False
|
|
115
|
-
"""
|
|
111
|
+
"""Check whether the submitted job is running."""
|
|
116
112
|
job_run_status = await hook.get_job_status(self.run_id, self.account_id)
|
|
117
113
|
if not DbtCloudJobRunStatus.is_terminal(job_run_status):
|
|
118
114
|
return True
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: apache-airflow-providers-dbt-cloud
|
|
3
|
-
Version: 3.2.
|
|
3
|
+
Version: 3.2.1
|
|
4
4
|
Summary: Provider for Apache Airflow. Implements apache-airflow-providers-dbt-cloud package
|
|
5
5
|
Home-page: https://airflow.apache.org/
|
|
6
6
|
Download-URL: https://archive.apache.org/dist/airflow/providers
|
|
7
7
|
Author: Apache Software Foundation
|
|
8
8
|
Author-email: dev@airflow.apache.org
|
|
9
9
|
License: Apache License 2.0
|
|
10
|
-
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.2.
|
|
10
|
+
Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.2.1/
|
|
11
11
|
Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
|
|
12
12
|
Project-URL: Source Code, https://github.com/apache/airflow
|
|
13
13
|
Project-URL: Slack Chat, https://s.apache.org/airflow-slack
|
|
@@ -21,18 +21,18 @@ Classifier: Intended Audience :: System Administrators
|
|
|
21
21
|
Classifier: Framework :: Apache Airflow
|
|
22
22
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
23
23
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
24
|
-
Classifier: Programming Language :: Python :: 3.7
|
|
25
24
|
Classifier: Programming Language :: Python :: 3.8
|
|
26
25
|
Classifier: Programming Language :: Python :: 3.9
|
|
27
26
|
Classifier: Programming Language :: Python :: 3.10
|
|
27
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
28
28
|
Classifier: Topic :: System :: Monitoring
|
|
29
|
-
Requires-Python: ~=3.
|
|
29
|
+
Requires-Python: ~=3.8
|
|
30
30
|
Description-Content-Type: text/x-rst
|
|
31
31
|
License-File: LICENSE
|
|
32
32
|
License-File: NOTICE
|
|
33
33
|
Requires-Dist: aiohttp
|
|
34
34
|
Requires-Dist: apache-airflow-providers-http
|
|
35
|
-
Requires-Dist: apache-airflow (>=2.4.0
|
|
35
|
+
Requires-Dist: apache-airflow (>=2.4.0)
|
|
36
36
|
Requires-Dist: asgiref
|
|
37
37
|
Provides-Extra: http
|
|
38
38
|
Requires-Dist: apache-airflow-providers-http ; extra == 'http'
|
|
@@ -58,7 +58,7 @@ Requires-Dist: apache-airflow-providers-http ; extra == 'http'
|
|
|
58
58
|
|
|
59
59
|
Package ``apache-airflow-providers-dbt-cloud``
|
|
60
60
|
|
|
61
|
-
Release: ``3.2.
|
|
61
|
+
Release: ``3.2.1``
|
|
62
62
|
|
|
63
63
|
|
|
64
64
|
`dbt Cloud <https://www.getdbt.com/product/what-is-dbt/>`__
|
|
@@ -71,7 +71,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
|
|
|
71
71
|
are in ``airflow.providers.dbt.cloud`` python package.
|
|
72
72
|
|
|
73
73
|
You can find package information and changelog for the provider
|
|
74
|
-
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.2.
|
|
74
|
+
in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.2.1/>`_.
|
|
75
75
|
|
|
76
76
|
|
|
77
77
|
Installation
|
|
@@ -81,7 +81,7 @@ You can install this package on top of an existing Airflow 2 installation (see `
|
|
|
81
81
|
for the minimum Airflow version supported) via
|
|
82
82
|
``pip install apache-airflow-providers-dbt-cloud``
|
|
83
83
|
|
|
84
|
-
The package supports the following python versions: 3.
|
|
84
|
+
The package supports the following python versions: 3.8,3.9,3.10,3.11
|
|
85
85
|
|
|
86
86
|
Requirements
|
|
87
87
|
------------
|
|
@@ -140,6 +140,24 @@ Dependent package
|
|
|
140
140
|
Changelog
|
|
141
141
|
---------
|
|
142
142
|
|
|
143
|
+
3.2.1
|
|
144
|
+
.....
|
|
145
|
+
|
|
146
|
+
.. note::
|
|
147
|
+
This release dropped support for Python 3.7
|
|
148
|
+
|
|
149
|
+
Misc
|
|
150
|
+
~~~~
|
|
151
|
+
|
|
152
|
+
* ``Remove Python 3.7 support (#30963)``
|
|
153
|
+
|
|
154
|
+
.. Below changes are excluded from the changelog. Move them to
|
|
155
|
+
appropriate section above if needed. Do not delete the lines(!):
|
|
156
|
+
* ``Improve docstrings in providers (#31681)``
|
|
157
|
+
* ``Add discoverability for triggers in provider.yaml (#31576)``
|
|
158
|
+
* ``Add D400 pydocstyle check - Providers (#31427)``
|
|
159
|
+
* ``Add note about dropping Python 3.7 for providers (#32015)``
|
|
160
|
+
|
|
143
161
|
3.2.0
|
|
144
162
|
.....
|
|
145
163
|
|
|
@@ -152,6 +170,7 @@ Misc
|
|
|
152
170
|
|
|
153
171
|
* ``Bump minimum Airflow version in providers (#30917)``
|
|
154
172
|
* ``Optimize deferred execution mode in DbtCloudJobRunSensor (#30968)``
|
|
173
|
+
* ``Optimize deferred execution mode for DbtCloudRunJobOperator (#31188)``
|
|
155
174
|
|
|
156
175
|
.. Below changes are excluded from the changelog. Move them to
|
|
157
176
|
appropriate section above if needed. Do not delete the lines(!):
|
|
@@ -159,6 +178,9 @@ Misc
|
|
|
159
178
|
* ``Add full automation for min Airflow version for providers (#30994)``
|
|
160
179
|
* ``Add cli cmd to list the provider trigger info (#30822)``
|
|
161
180
|
* ``Upgrade ruff to 0.0.262 (#30809)``
|
|
181
|
+
* ``Use '__version__' in providers not 'version' (#31393)``
|
|
182
|
+
* ``Fixing circular import error in providers caused by airflow version check (#31379)``
|
|
183
|
+
* ``Prepare docs for May 2023 wave of Providers (#31252)``
|
|
162
184
|
|
|
163
185
|
3.1.1
|
|
164
186
|
.....
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
airflow/providers/dbt/cloud/__init__.py,sha256=oWVceKUn7wyJJH_zfR2PD2MRe_m14txZuGWTLohiaDg,1534
|
|
2
|
+
airflow/providers/dbt/cloud/get_provider_info.py,sha256=PUfarvk8mhG_WW6ISZBj4nfXIY71xzgnuLZwCRotlz0,2945
|
|
3
|
+
airflow/providers/dbt/cloud/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
4
|
+
airflow/providers/dbt/cloud/hooks/dbt.py,sha256=lyjedRhr5-dOKALStr0GXDqrMx9K6Ybs8ykQBZ6OFQs,24235
|
|
5
|
+
airflow/providers/dbt/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
|
+
airflow/providers/dbt/cloud/operators/dbt.py,sha256=ATIkbX7e5qi7QL-ZNXYWxUd1Spj6Rh03pugxyxbmXsE,13638
|
|
7
|
+
airflow/providers/dbt/cloud/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
8
|
+
airflow/providers/dbt/cloud/sensors/dbt.py,sha256=ZOnrFtD-8AgDWghu4LP1RJV1nnThP0B6S4OlKv1Sfz8,5614
|
|
9
|
+
airflow/providers/dbt/cloud/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
10
|
+
airflow/providers/dbt/cloud/triggers/dbt.py,sha256=TGBevYo4pcZT_x7bist5H5d9gl_YbNypXG71n_6DcgM,4649
|
|
11
|
+
apache_airflow_providers_dbt_cloud-3.2.1.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
12
|
+
apache_airflow_providers_dbt_cloud-3.2.1.dist-info/METADATA,sha256=rBmsWJ_2FjHzYsOy2E_k0lyz2OvIJWiNpbFPkvI06DA,11916
|
|
13
|
+
apache_airflow_providers_dbt_cloud-3.2.1.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
|
|
14
|
+
apache_airflow_providers_dbt_cloud-3.2.1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
|
15
|
+
apache_airflow_providers_dbt_cloud-3.2.1.dist-info/entry_points.txt,sha256=k8OVpnO31CZUDg6GUidxo_K0s-awqUtzwgleihksBkY,106
|
|
16
|
+
apache_airflow_providers_dbt_cloud-3.2.1.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
|
|
17
|
+
apache_airflow_providers_dbt_cloud-3.2.1.dist-info/RECORD,,
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
airflow/providers/dbt/cloud/__init__.py,sha256=ZjOhg5buhb173moaAmIpjW6oKpd_filPRtJqlp8LmGA,1392
|
|
2
|
-
airflow/providers/dbt/cloud/get_provider_info.py,sha256=fxh0XSPCMF919qUC6rBI2v7SQEcicblsyVHNezcmEn0,2990
|
|
3
|
-
airflow/providers/dbt/cloud/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
4
|
-
airflow/providers/dbt/cloud/hooks/dbt.py,sha256=8SlYp6dkeHSPx1yC22LjJjJ7pAFY3nU_9InBbAVeQ2c,24269
|
|
5
|
-
airflow/providers/dbt/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
6
|
-
airflow/providers/dbt/cloud/operators/dbt.py,sha256=ATIkbX7e5qi7QL-ZNXYWxUd1Spj6Rh03pugxyxbmXsE,13638
|
|
7
|
-
airflow/providers/dbt/cloud/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
|
|
8
|
-
airflow/providers/dbt/cloud/sensors/dbt.py,sha256=8IXuf_OAyA178tR81FDhcCJBtJfXDsq4v21DaF6EaCc,5508
|
|
9
|
-
airflow/providers/dbt/cloud/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
|
|
10
|
-
airflow/providers/dbt/cloud/triggers/dbt.py,sha256=rYkOps1tJgiN6tc3Dn9V42L3O70KQpOpjPYgOKc3UFM,4839
|
|
11
|
-
apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
|
|
12
|
-
apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info/METADATA,sha256=4alahubtjtGssOFexRBb88zHP_VGwvQb2s7PUmuhvzQ,11137
|
|
13
|
-
apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info/NOTICE,sha256=m-6s2XynUxVSUIxO4rVablAZCvFq-wmLrqV91DotRBw,240
|
|
14
|
-
apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
|
15
|
-
apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info/entry_points.txt,sha256=k8OVpnO31CZUDg6GUidxo_K0s-awqUtzwgleihksBkY,106
|
|
16
|
-
apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info/top_level.txt,sha256=OeMVH5md7fr2QQWpnZoOWWxWO-0WH1IP70lpTVwopPg,8
|
|
17
|
-
apache_airflow_providers_dbt_cloud-3.2.0rc1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|