apache-airflow-providers-dbt-cloud 4.0.0rc2__py3-none-any.whl → 4.2.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of apache-airflow-providers-dbt-cloud might be problematic. Click here for more details.

@@ -199,55 +199,3 @@ distributed under the License is distributed on an "AS IS" BASIS,
199
199
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
200
  See the License for the specific language governing permissions and
201
201
  limitations under the License.
202
-
203
- ============================================================================
204
- APACHE AIRFLOW SUBCOMPONENTS:
205
-
206
- The Apache Airflow project contains subcomponents with separate copyright
207
- notices and license terms. Your use of the source code for the these
208
- subcomponents is subject to the terms and conditions of the following
209
- licenses.
210
-
211
-
212
- ========================================================================
213
- Third party Apache 2.0 licenses
214
- ========================================================================
215
-
216
- The following components are provided under the Apache 2.0 License.
217
- See project link for details. The text of each license is also included
218
- at 3rd-party-licenses/LICENSE-[project].txt.
219
-
220
- (ALv2 License) hue v4.3.0 (https://github.com/cloudera/hue/)
221
- (ALv2 License) jqclock v2.3.0 (https://github.com/JohnRDOrazio/jQuery-Clock-Plugin)
222
- (ALv2 License) bootstrap3-typeahead v4.0.2 (https://github.com/bassjobsen/Bootstrap-3-Typeahead)
223
- (ALv2 License) connexion v2.7.0 (https://github.com/zalando/connexion)
224
-
225
- ========================================================================
226
- MIT licenses
227
- ========================================================================
228
-
229
- The following components are provided under the MIT License. See project link for details.
230
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
231
-
232
- (MIT License) jquery v3.5.1 (https://jquery.org/license/)
233
- (MIT License) dagre-d3 v0.6.4 (https://github.com/cpettitt/dagre-d3)
234
- (MIT License) bootstrap v3.4.1 (https://github.com/twbs/bootstrap/)
235
- (MIT License) d3-tip v0.9.1 (https://github.com/Caged/d3-tip)
236
- (MIT License) dataTables v1.10.25 (https://datatables.net)
237
- (MIT License) normalize.css v3.0.2 (http://necolas.github.io/normalize.css/)
238
- (MIT License) ElasticMock v1.3.2 (https://github.com/vrcmarcos/elasticmock)
239
- (MIT License) MomentJS v2.24.0 (http://momentjs.com/)
240
- (MIT License) eonasdan-bootstrap-datetimepicker v4.17.49 (https://github.com/eonasdan/bootstrap-datetimepicker/)
241
-
242
- ========================================================================
243
- BSD 3-Clause licenses
244
- ========================================================================
245
- The following components are provided under the BSD 3-Clause license. See project links for details.
246
- The text of each license is also included at 3rd-party-licenses/LICENSE-[project].txt.
247
-
248
- (BSD 3 License) d3 v5.16.0 (https://d3js.org)
249
- (BSD 3 License) d3-shape v2.1.0 (https://github.com/d3/d3-shape)
250
- (BSD 3 License) cgroupspy 0.2.1 (https://github.com/cloudsigma/cgroupspy)
251
-
252
- ========================================================================
253
- See 3rd-party-licenses/LICENSES-ui.txt for packages used in `/airflow/www`
@@ -29,7 +29,7 @@ from airflow import __version__ as airflow_version
29
29
 
30
30
  __all__ = ["__version__"]
31
31
 
32
- __version__ = "4.0.0"
32
+ __version__ = "4.2.0"
33
33
 
34
34
  if packaging.version.parse(packaging.version.parse(airflow_version).base_version) < packaging.version.parse(
35
35
  "2.9.0"
@@ -15,8 +15,7 @@
15
15
  # specific language governing permissions and limitations
16
16
  # under the License.
17
17
 
18
- # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
19
- # OVERWRITTEN WHEN PREPARING PACKAGES.
18
+ # NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
20
19
  #
21
20
  # IF YOU WANT TO MODIFY THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
22
21
  # `get_provider_info_TEMPLATE.py.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -28,8 +27,9 @@ def get_provider_info():
28
27
  "name": "dbt Cloud",
29
28
  "description": "`dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__\n",
30
29
  "state": "ready",
31
- "source-date-epoch": 1734533324,
30
+ "source-date-epoch": 1739959421,
32
31
  "versions": [
32
+ "4.2.0",
33
33
  "4.0.0",
34
34
  "3.11.2",
35
35
  "3.11.1",
@@ -64,24 +64,11 @@ def get_provider_info():
64
64
  "1.0.2",
65
65
  "1.0.1",
66
66
  ],
67
- "dependencies": [
68
- "apache-airflow>=2.9.0",
69
- "apache-airflow-providers-http",
70
- "asgiref>=2.3.0",
71
- "aiohttp>=3.9.2",
72
- ],
73
- "additional-extras": [
74
- {
75
- "name": "openlineage",
76
- "description": "Install compatible OpenLineage dependencies",
77
- "dependencies": ["apache-airflow-providers-openlineage>=1.7.0"],
78
- }
79
- ],
80
67
  "integrations": [
81
68
  {
82
69
  "integration-name": "dbt Cloud",
83
70
  "external-doc-url": "https://docs.getdbt.com/docs/dbt-cloud/cloud-overview",
84
- "logo": "/integration-logos/dbt/dbt.png",
71
+ "logo": "/docs/integration-logos/dbt.png",
85
72
  "how-to-guide": ["/docs/apache-airflow-providers-dbt-cloud/operators.rst"],
86
73
  "tags": ["dbt"],
87
74
  }
@@ -105,4 +92,11 @@ def get_provider_info():
105
92
  }
106
93
  ],
107
94
  "extra-links": ["airflow.providers.dbt.cloud.operators.dbt.DbtCloudRunJobOperatorLink"],
95
+ "dependencies": [
96
+ "apache-airflow>=2.9.0",
97
+ "apache-airflow-providers-http",
98
+ "asgiref>=2.3.0",
99
+ "aiohttp>=3.9.2",
100
+ ],
101
+ "optional-dependencies": {"openlineage": ["apache-airflow-providers-openlineage>=1.7.0"]},
108
102
  }
@@ -24,7 +24,7 @@ from collections.abc import Sequence
24
24
  from enum import Enum
25
25
  from functools import cached_property, wraps
26
26
  from inspect import signature
27
- from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast
27
+ from typing import TYPE_CHECKING, Any, Callable, TypedDict, TypeVar, cast
28
28
 
29
29
  import aiohttp
30
30
  from asgiref.sync import sync_to_async
@@ -33,7 +33,6 @@ from requests.sessions import Session
33
33
 
34
34
  from airflow.exceptions import AirflowException
35
35
  from airflow.providers.http.hooks.http import HttpHook
36
- from airflow.typing_compat import TypedDict
37
36
 
38
37
  if TYPE_CHECKING:
39
38
  from requests.models import PreparedRequest, Response
@@ -136,6 +135,10 @@ class DbtCloudJobRunException(AirflowException):
136
135
  """An exception that indicates a job run failed to complete."""
137
136
 
138
137
 
138
+ class DbtCloudResourceLookupError(AirflowException):
139
+ """Exception raised when a dbt Cloud resource cannot be uniquely identified."""
140
+
141
+
139
142
  T = TypeVar("T", bound=Any)
140
143
 
141
144
 
@@ -357,14 +360,23 @@ class DbtCloudHook(HttpHook):
357
360
  return self._run_and_get_response(endpoint=f"{account_id}/")
358
361
 
359
362
  @fallback_to_default_account
360
- def list_projects(self, account_id: int | None = None) -> list[Response]:
363
+ def list_projects(
364
+ self, account_id: int | None = None, name_contains: str | None = None
365
+ ) -> list[Response]:
361
366
  """
362
367
  Retrieve metadata for all projects tied to a specified dbt Cloud account.
363
368
 
364
369
  :param account_id: Optional. The ID of a dbt Cloud account.
370
+ :param name_contains: Optional. The case-insensitive substring of a dbt Cloud project name to filter by.
365
371
  :return: List of request responses.
366
372
  """
367
- return self._run_and_get_response(endpoint=f"{account_id}/projects/", paginate=True, api_version="v3")
373
+ payload = {"name__icontains": name_contains} if name_contains else None
374
+ return self._run_and_get_response(
375
+ endpoint=f"{account_id}/projects/",
376
+ payload=payload,
377
+ paginate=True,
378
+ api_version="v3",
379
+ )
368
380
 
369
381
  @fallback_to_default_account
370
382
  def get_project(self, project_id: int, account_id: int | None = None) -> Response:
@@ -377,27 +389,73 @@ class DbtCloudHook(HttpHook):
377
389
  """
378
390
  return self._run_and_get_response(endpoint=f"{account_id}/projects/{project_id}/", api_version="v3")
379
391
 
392
+ @fallback_to_default_account
393
+ def list_environments(
394
+ self, project_id: int, *, name_contains: str | None = None, account_id: int | None = None
395
+ ) -> list[Response]:
396
+ """
397
+ Retrieve metadata for all environments tied to a specified dbt Cloud project.
398
+
399
+ :param project_id: The ID of a dbt Cloud project.
400
+ :param name_contains: Optional. The case-insensitive substring of a dbt Cloud environment name to filter by.
401
+ :param account_id: Optional. The ID of a dbt Cloud account.
402
+ :return: List of request responses.
403
+ """
404
+ payload = {"name__icontains": name_contains} if name_contains else None
405
+ return self._run_and_get_response(
406
+ endpoint=f"{account_id}/projects/{project_id}/environments/",
407
+ payload=payload,
408
+ paginate=True,
409
+ api_version="v3",
410
+ )
411
+
412
+ @fallback_to_default_account
413
+ def get_environment(
414
+ self, project_id: int, environment_id: int, *, account_id: int | None = None
415
+ ) -> Response:
416
+ """
417
+ Retrieve metadata for a specific project's environment.
418
+
419
+ :param project_id: The ID of a dbt Cloud project.
420
+ :param environment_id: The ID of a dbt Cloud environment.
421
+ :param account_id: Optional. The ID of a dbt Cloud account.
422
+ :return: The request response.
423
+ """
424
+ return self._run_and_get_response(
425
+ endpoint=f"{account_id}/projects/{project_id}/environments/{environment_id}/", api_version="v3"
426
+ )
427
+
380
428
  @fallback_to_default_account
381
429
  def list_jobs(
382
430
  self,
383
431
  account_id: int | None = None,
384
432
  order_by: str | None = None,
385
433
  project_id: int | None = None,
434
+ environment_id: int | None = None,
435
+ name_contains: str | None = None,
386
436
  ) -> list[Response]:
387
437
  """
388
438
  Retrieve metadata for all jobs tied to a specified dbt Cloud account.
389
439
 
390
440
  If a ``project_id`` is supplied, only jobs pertaining to this project will be retrieved.
441
+ If an ``environment_id`` is supplied, only jobs pertaining to this environment will be retrieved.
391
442
 
392
443
  :param account_id: Optional. The ID of a dbt Cloud account.
393
444
  :param order_by: Optional. Field to order the result by. Use '-' to indicate reverse order.
394
445
  For example, to use reverse order by the run ID use ``order_by=-id``.
395
- :param project_id: The ID of a dbt Cloud project.
446
+ :param project_id: Optional. The ID of a dbt Cloud project.
447
+ :param environment_id: Optional. The ID of a dbt Cloud environment.
448
+ :param name_contains: Optional. The case-insensitive substring of a dbt Cloud job name to filter by.
396
449
  :return: List of request responses.
397
450
  """
451
+ payload = {"order_by": order_by, "project_id": project_id}
452
+ if environment_id:
453
+ payload["environment_id"] = environment_id
454
+ if name_contains:
455
+ payload["name__icontains"] = name_contains
398
456
  return self._run_and_get_response(
399
457
  endpoint=f"{account_id}/jobs/",
400
- payload={"order_by": order_by, "project_id": project_id},
458
+ payload=payload,
401
459
  paginate=True,
402
460
  )
403
461
 
@@ -412,6 +470,72 @@ class DbtCloudHook(HttpHook):
412
470
  """
413
471
  return self._run_and_get_response(endpoint=f"{account_id}/jobs/{job_id}")
414
472
 
473
+ @fallback_to_default_account
474
+ def get_job_by_name(
475
+ self, *, project_name: str, environment_name: str, job_name: str, account_id: int | None = None
476
+ ) -> dict:
477
+ """
478
+ Retrieve metadata for a specific job by combination of project, environment, and job name.
479
+
480
+ Raises DbtCloudResourceLookupError if the job is not found or cannot be uniquely identified by provided parameters.
481
+
482
+ :param project_name: The name of a dbt Cloud project.
483
+ :param environment_name: The name of a dbt Cloud environment.
484
+ :param job_name: The name of a dbt Cloud job.
485
+ :param account_id: Optional. The ID of a dbt Cloud account.
486
+ :return: The details of a job.
487
+ """
488
+ # get project_id using project_name
489
+ list_projects_responses = self.list_projects(name_contains=project_name, account_id=account_id)
490
+ # flatten & filter the list of responses to find the exact match
491
+ projects = [
492
+ project
493
+ for response in list_projects_responses
494
+ for project in response.json()["data"]
495
+ if project["name"] == project_name
496
+ ]
497
+ if len(projects) != 1:
498
+ raise DbtCloudResourceLookupError(f"Found {len(projects)} projects with name `{project_name}`.")
499
+ project_id = projects[0]["id"]
500
+
501
+ # get environment_id using project_id and environment_name
502
+ list_environments_responses = self.list_environments(
503
+ project_id=project_id, name_contains=environment_name, account_id=account_id
504
+ )
505
+ # flatten & filter the list of responses to find the exact match
506
+ environments = [
507
+ env
508
+ for response in list_environments_responses
509
+ for env in response.json()["data"]
510
+ if env["name"] == environment_name
511
+ ]
512
+ if len(environments) != 1:
513
+ raise DbtCloudResourceLookupError(
514
+ f"Found {len(environments)} environments with name `{environment_name}` in project `{project_name}`."
515
+ )
516
+ environment_id = environments[0]["id"]
517
+
518
+ # get job using project_id, environment_id and job_name
519
+ list_jobs_responses = self.list_jobs(
520
+ project_id=project_id,
521
+ environment_id=environment_id,
522
+ name_contains=job_name,
523
+ account_id=account_id,
524
+ )
525
+ # flatten & filter the list of responses to find the exact match
526
+ jobs = [
527
+ job
528
+ for response in list_jobs_responses
529
+ for job in response.json()["data"]
530
+ if job["name"] == job_name
531
+ ]
532
+ if len(jobs) != 1:
533
+ raise DbtCloudResourceLookupError(
534
+ f"Found {len(jobs)} jobs with name `{job_name}` in environment `{environment_name}` in project `{project_name}`."
535
+ )
536
+
537
+ return jobs[0]
538
+
415
539
  @fallback_to_default_account
416
540
  def trigger_job_run(
417
541
  self,
@@ -57,7 +57,10 @@ class DbtCloudRunJobOperator(BaseOperator):
57
57
  :ref:`howto/operator:DbtCloudRunJobOperator`
58
58
 
59
59
  :param dbt_cloud_conn_id: The connection ID for connecting to dbt Cloud.
60
- :param job_id: The ID of a dbt Cloud job.
60
+ :param job_id: The ID of a dbt Cloud job. Required if project_name, environment_name, and job_name are not provided.
61
+ :param project_name: Optional. The name of a dbt Cloud project. Used only if ``job_id`` is None.
62
+ :param environment_name: Optional. The name of a dbt Cloud environment. Used only if ``job_id`` is None.
63
+ :param job_name: Optional. The name of a dbt Cloud job. Used only if ``job_id`` is None.
61
64
  :param account_id: Optional. The ID of a dbt Cloud account.
62
65
  :param trigger_reason: Optional. Description of the reason to trigger the job.
63
66
  Defaults to "Triggered via Apache Airflow by task <task_id> in the <dag_id> DAG."
@@ -86,6 +89,9 @@ class DbtCloudRunJobOperator(BaseOperator):
86
89
  template_fields = (
87
90
  "dbt_cloud_conn_id",
88
91
  "job_id",
92
+ "project_name",
93
+ "environment_name",
94
+ "job_name",
89
95
  "account_id",
90
96
  "trigger_reason",
91
97
  "steps_override",
@@ -99,7 +105,10 @@ class DbtCloudRunJobOperator(BaseOperator):
99
105
  self,
100
106
  *,
101
107
  dbt_cloud_conn_id: str = DbtCloudHook.default_conn_name,
102
- job_id: int,
108
+ job_id: int | None = None,
109
+ project_name: str | None = None,
110
+ environment_name: str | None = None,
111
+ job_name: str | None = None,
103
112
  account_id: int | None = None,
104
113
  trigger_reason: str | None = None,
105
114
  steps_override: list[str] | None = None,
@@ -117,6 +126,9 @@ class DbtCloudRunJobOperator(BaseOperator):
117
126
  self.dbt_cloud_conn_id = dbt_cloud_conn_id
118
127
  self.account_id = account_id
119
128
  self.job_id = job_id
129
+ self.project_name = project_name
130
+ self.environment_name = environment_name
131
+ self.job_name = job_name
120
132
  self.trigger_reason = trigger_reason
121
133
  self.steps_override = steps_override
122
134
  self.schema_override = schema_override
@@ -135,6 +147,18 @@ class DbtCloudRunJobOperator(BaseOperator):
135
147
  f"Triggered via Apache Airflow by task {self.task_id!r} in the {self.dag.dag_id} DAG."
136
148
  )
137
149
 
150
+ if self.job_id is None:
151
+ if not all([self.project_name, self.environment_name, self.job_name]):
152
+ raise ValueError(
153
+ "Either job_id or project_name, environment_name, and job_name must be provided."
154
+ )
155
+ self.job_id = self.hook.get_job_by_name(
156
+ account_id=self.account_id,
157
+ project_name=self.project_name,
158
+ environment_name=self.environment_name,
159
+ job_name=self.job_name,
160
+ )["id"]
161
+
138
162
  non_terminal_runs = None
139
163
  if self.reuse_existing_run:
140
164
  non_terminal_runs = self.hook.get_job_runs(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 4.0.0rc2
3
+ Version: 4.2.0rc1
4
4
  Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
5
  Keywords: airflow-provider,dbt.cloud,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -20,14 +20,14 @@ Classifier: Programming Language :: Python :: 3.10
20
20
  Classifier: Programming Language :: Python :: 3.11
21
21
  Classifier: Programming Language :: Python :: 3.12
22
22
  Classifier: Topic :: System :: Monitoring
23
- Requires-Dist: aiohttp>=3.9.2
24
- Requires-Dist: apache-airflow-providers-http
25
23
  Requires-Dist: apache-airflow>=2.9.0rc0
24
+ Requires-Dist: apache-airflow-providers-http
26
25
  Requires-Dist: asgiref>=2.3.0
27
- Requires-Dist: apache-airflow-providers-openlineage>=1.7.0rc0 ; extra == "openlineage"
26
+ Requires-Dist: aiohttp>=3.9.2
27
+ Requires-Dist: apache-airflow-providers-openlineage>=1.7.0 ; extra == "openlineage"
28
28
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
29
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.0.0/changelog.html
30
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.0.0
29
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.2.0/changelog.html
30
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.2.0
31
31
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
32
32
  Project-URL: Source Code, https://github.com/apache/airflow
33
33
  Project-URL: Twitter, https://x.com/ApacheAirflow
@@ -35,23 +35,6 @@ Project-URL: YouTube, https://www.youtube.com/channel/UCSXwxpWZQ7XZ1WL3wqevChA/
35
35
  Provides-Extra: openlineage
36
36
 
37
37
 
38
- .. Licensed to the Apache Software Foundation (ASF) under one
39
- or more contributor license agreements. See the NOTICE file
40
- distributed with this work for additional information
41
- regarding copyright ownership. The ASF licenses this file
42
- to you under the Apache License, Version 2.0 (the
43
- "License"); you may not use this file except in compliance
44
- with the License. You may obtain a copy of the License at
45
-
46
- .. http://www.apache.org/licenses/LICENSE-2.0
47
-
48
- .. Unless required by applicable law or agreed to in writing,
49
- software distributed under the License is distributed on an
50
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
51
- KIND, either express or implied. See the License for the
52
- specific language governing permissions and limitations
53
- under the License.
54
-
55
38
  .. Licensed to the Apache Software Foundation (ASF) under one
56
39
  or more contributor license agreements. See the NOTICE file
57
40
  distributed with this work for additional information
@@ -69,8 +52,7 @@ Provides-Extra: openlineage
69
52
  specific language governing permissions and limitations
70
53
  under the License.
71
54
 
72
- .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE
73
- OVERWRITTEN WHEN PREPARING PACKAGES.
55
+ .. NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE OVERWRITTEN!
74
56
 
75
57
  .. IF YOU WANT TO MODIFY TEMPLATE FOR THIS FILE, YOU SHOULD MODIFY THE TEMPLATE
76
58
  `PROVIDER_README_TEMPLATE.rst.jinja2` IN the `dev/breeze/src/airflow_breeze/templates` DIRECTORY
@@ -78,7 +60,7 @@ Provides-Extra: openlineage
78
60
 
79
61
  Package ``apache-airflow-providers-dbt-cloud``
80
62
 
81
- Release: ``4.0.0.rc2``
63
+ Release: ``4.2.0``
82
64
 
83
65
 
84
66
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -91,7 +73,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
91
73
  are in ``airflow.providers.dbt.cloud`` python package.
92
74
 
93
75
  You can find package information and changelog for the provider
94
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.0.0/>`_.
76
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.2.0/>`_.
95
77
 
96
78
  Installation
97
79
  ------------
@@ -135,4 +117,5 @@ Dependent package
135
117
  ============================================================================================================== ===============
136
118
 
137
119
  The changelog for the provider package can be found in the
138
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.0.0/changelog.html>`_.
120
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/4.2.0/changelog.html>`_.
121
+
@@ -1,17 +1,17 @@
1
- airflow/providers/dbt/cloud/LICENSE,sha256=FFb4jd2AXnOOf7XLP04pQW6jbdhG49TxlGY6fFpCV1Y,13609
2
- airflow/providers/dbt/cloud/__init__.py,sha256=aFLIA3l_kzPW3lUS3Wtr65D9ZDCRtahs78rMwWtOgDM,1496
3
- airflow/providers/dbt/cloud/get_provider_info.py,sha256=WrJzEFLJMZk9LoIsrVTvM6rcGz1T2xJSy4wuCzTPF8Y,3777
1
+ airflow/providers/dbt/cloud/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850
2
+ airflow/providers/dbt/cloud/__init__.py,sha256=1mj3fNqTAVOQkU1OVmdIEsn7AYCPHAQa-UrC9xsbImw,1496
3
+ airflow/providers/dbt/cloud/get_provider_info.py,sha256=gh3m1yRgnC0Ry7h6jTO505jkJmui3Bu97Ron1RPW4-g,3604
4
4
  airflow/providers/dbt/cloud/hooks/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
5
- airflow/providers/dbt/cloud/hooks/dbt.py,sha256=Zkrpnis7OVPjFGly7dK8dvSFpEfbSFrjl8CdnOO6u8M,29192
5
+ airflow/providers/dbt/cloud/hooks/dbt.py,sha256=AxAEyeBewS0UQUtZMHLMbQmDAln3A_x3WdIrtzzK4Pk,34556
6
6
  airflow/providers/dbt/cloud/operators/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
7
- airflow/providers/dbt/cloud/operators/dbt.py,sha256=xilyondmB5yDWxh0gfaJKKWZYN52diFXsfrdBsn_1UE,16222
7
+ airflow/providers/dbt/cloud/operators/dbt.py,sha256=6Qq7_do0bqnrFRwwurG6lN0L9f3SU8iwNNrWqCjvyJY,17463
8
8
  airflow/providers/dbt/cloud/sensors/__init__.py,sha256=mlJxuZLkd5x-iq2SBwD3mvRQpt3YR7wjz_nceyF1IaI,787
9
9
  airflow/providers/dbt/cloud/sensors/dbt.py,sha256=OGf-VNKcCNCrImrdE8PudmtDak_MKJfPhmaf7_ccOLg,5157
10
10
  airflow/providers/dbt/cloud/triggers/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
11
11
  airflow/providers/dbt/cloud/triggers/dbt.py,sha256=Oabdc7FcNhCQxkjDC5SqAiYEw4hSZ9mQGZgSt36a1E0,4707
12
12
  airflow/providers/dbt/cloud/utils/__init__.py,sha256=9hdXHABrVpkbpjZgUft39kOFL2xSGeG4GEua0Hmelus,785
13
13
  airflow/providers/dbt/cloud/utils/openlineage.py,sha256=6hGaRpfPE0HJSyEQNo4xt9Pfvdi9GI-mysIb4dBACpk,6611
14
- apache_airflow_providers_dbt_cloud-4.0.0rc2.dist-info/entry_points.txt,sha256=c18L1WEEK18WQeEGrm9kMVqutiYJHiWGH5jU1JqnToE,105
15
- apache_airflow_providers_dbt_cloud-4.0.0rc2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
16
- apache_airflow_providers_dbt_cloud-4.0.0rc2.dist-info/METADATA,sha256=48DLKx4n8niLM_F8R5yV5AbFMvSfl_xjPergGBYeJ7I,6261
17
- apache_airflow_providers_dbt_cloud-4.0.0rc2.dist-info/RECORD,,
14
+ apache_airflow_providers_dbt_cloud-4.2.0rc1.dist-info/entry_points.txt,sha256=c18L1WEEK18WQeEGrm9kMVqutiYJHiWGH5jU1JqnToE,105
15
+ apache_airflow_providers_dbt_cloud-4.2.0rc1.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
16
+ apache_airflow_providers_dbt_cloud-4.2.0rc1.dist-info/METADATA,sha256=t3tXNESsbjxFiPNbv2fk_robSKMQbH_W2NZRVQMsUOc,5429
17
+ apache_airflow_providers_dbt_cloud-4.2.0rc1.dist-info/RECORD,,