apache-airflow-providers-dbt-cloud 3.6.1rc1__tar.gz → 3.7.0rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (16) hide show
  1. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/PKG-INFO +6 -6
  2. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/README.rst +3 -3
  3. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/__init__.py +1 -1
  4. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/get_provider_info.py +2 -1
  5. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/hooks/dbt.py +15 -0
  6. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/operators/dbt.py +30 -10
  7. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/pyproject.toml +3 -3
  8. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/LICENSE +0 -0
  9. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/hooks/__init__.py +0 -0
  10. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/operators/__init__.py +0 -0
  11. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/sensors/__init__.py +0 -0
  12. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/sensors/dbt.py +0 -0
  13. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/triggers/__init__.py +0 -0
  14. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/triggers/dbt.py +0 -0
  15. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/utils/__init__.py +0 -0
  16. {apache_airflow_providers_dbt_cloud-3.6.1rc1 → apache_airflow_providers_dbt_cloud-3.7.0rc1}/airflow/providers/dbt/cloud/utils/openlineage.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-dbt-cloud
3
- Version: 3.6.1rc1
3
+ Version: 3.7.0rc1
4
4
  Summary: Provider package apache-airflow-providers-dbt-cloud for Apache Airflow
5
5
  Keywords: airflow-provider,dbt.cloud,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -27,8 +27,8 @@ Requires-Dist: asgiref
27
27
  Requires-Dist: apache-airflow-providers-http ; extra == "http"
28
28
  Requires-Dist: apache-airflow-providers-openlineage ; extra == "openlineage"
29
29
  Project-URL: Bug Tracker, https://github.com/apache/airflow/issues
30
- Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.6.1/changelog.html
31
- Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.6.1
30
+ Project-URL: Changelog, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.7.0/changelog.html
31
+ Project-URL: Documentation, https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.7.0
32
32
  Project-URL: Slack Chat, https://s.apache.org/airflow-slack
33
33
  Project-URL: Source Code, https://github.com/apache/airflow
34
34
  Project-URL: Twitter, https://twitter.com/ApacheAirflow
@@ -80,7 +80,7 @@ Provides-Extra: openlineage
80
80
 
81
81
  Package ``apache-airflow-providers-dbt-cloud``
82
82
 
83
- Release: ``3.6.1.rc1``
83
+ Release: ``3.7.0.rc1``
84
84
 
85
85
 
86
86
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -93,7 +93,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
93
93
  are in ``airflow.providers.dbt.cloud`` python package.
94
94
 
95
95
  You can find package information and changelog for the provider
96
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.6.1/>`_.
96
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.7.0/>`_.
97
97
 
98
98
  Installation
99
99
  ------------
@@ -137,4 +137,4 @@ Dependent package
137
137
  ============================================================================================================== ===============
138
138
 
139
139
  The changelog for the provider package can be found in the
140
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.6.1/changelog.html>`_.
140
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.7.0/changelog.html>`_.
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-dbt-cloud``
44
44
 
45
- Release: ``3.6.1.rc1``
45
+ Release: ``3.7.0.rc1``
46
46
 
47
47
 
48
48
  `dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__
@@ -55,7 +55,7 @@ This is a provider package for ``dbt.cloud`` provider. All classes for this prov
55
55
  are in ``airflow.providers.dbt.cloud`` python package.
56
56
 
57
57
  You can find package information and changelog for the provider
58
- in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.6.1/>`_.
58
+ in the `documentation <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.7.0/>`_.
59
59
 
60
60
  Installation
61
61
  ------------
@@ -99,4 +99,4 @@ Dependent package
99
99
  ============================================================================================================== ===============
100
100
 
101
101
  The changelog for the provider package can be found in the
102
- `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.6.1/changelog.html>`_.
102
+ `changelog <https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.7.0/changelog.html>`_.
@@ -27,7 +27,7 @@ import packaging.version
27
27
 
28
28
  __all__ = ["__version__"]
29
29
 
30
- __version__ = "3.6.1"
30
+ __version__ = "3.7.0"
31
31
 
32
32
  try:
33
33
  from airflow import __version__ as airflow_version
@@ -28,8 +28,9 @@ def get_provider_info():
28
28
  "name": "dbt Cloud",
29
29
  "description": "`dbt Cloud <https://www.getdbt.com/product/dbt-cloud/>`__\n",
30
30
  "state": "ready",
31
- "source-date-epoch": 1707636353,
31
+ "source-date-epoch": 1709555328,
32
32
  "versions": [
33
+ "3.7.0",
33
34
  "3.6.1",
34
35
  "3.6.0",
35
36
  "3.5.1",
@@ -109,6 +109,7 @@ class DbtCloudJobRunStatus(Enum):
109
109
  SUCCESS = 10
110
110
  ERROR = 20
111
111
  CANCELLED = 30
112
+ NON_TERMINAL_STATUSES = (QUEUED, STARTING, RUNNING)
112
113
  TERMINAL_STATUSES = (SUCCESS, ERROR, CANCELLED)
113
114
 
114
115
  @classmethod
@@ -460,6 +461,20 @@ class DbtCloudHook(HttpHook):
460
461
  paginate=True,
461
462
  )
462
463
 
464
+ @fallback_to_default_account
465
+ def get_job_runs(self, account_id: int | None = None, payload: dict[str, Any] | None = None) -> Response:
466
+ """
467
+ Retrieve metadata for a specific run of a dbt Cloud job.
468
+
469
+ :param account_id: Optional. The ID of a dbt Cloud account.
470
+ :param paylod: Optional. Query Parameters
471
+ :return: The request response.
472
+ """
473
+ return self._run_and_get_response(
474
+ endpoint=f"{account_id}/runs/",
475
+ payload=payload,
476
+ )
477
+
463
478
  @fallback_to_default_account
464
479
  def get_job_run(
465
480
  self, run_id: int, account_id: int | None = None, include_related: list[str] | None = None
@@ -73,6 +73,8 @@ class DbtCloudRunJobOperator(BaseOperator):
73
73
  Used only if ``wait_for_termination`` is True. Defaults to 60 seconds.
74
74
  :param additional_run_config: Optional. Any additional parameters that should be included in the API
75
75
  request when triggering the job.
76
+ :param reuse_existing_run: Flag to determine whether to reuse existing non terminal job run. If set to
77
+ true and non terminal job runs found, it use the latest run without triggering a new job run.
76
78
  :param deferrable: Run operator in the deferrable mode
77
79
  :return: The ID of the triggered dbt Cloud job run.
78
80
  """
@@ -102,6 +104,7 @@ class DbtCloudRunJobOperator(BaseOperator):
102
104
  timeout: int = 60 * 60 * 24 * 7,
103
105
  check_interval: int = 60,
104
106
  additional_run_config: dict[str, Any] | None = None,
107
+ reuse_existing_run: bool = False,
105
108
  deferrable: bool = conf.getboolean("operators", "default_deferrable", fallback=False),
106
109
  **kwargs,
107
110
  ) -> None:
@@ -117,6 +120,7 @@ class DbtCloudRunJobOperator(BaseOperator):
117
120
  self.check_interval = check_interval
118
121
  self.additional_run_config = additional_run_config or {}
119
122
  self.run_id: int | None = None
123
+ self.reuse_existing_run = reuse_existing_run
120
124
  self.deferrable = deferrable
121
125
 
122
126
  def execute(self, context: Context):
@@ -125,16 +129,32 @@ class DbtCloudRunJobOperator(BaseOperator):
125
129
  f"Triggered via Apache Airflow by task {self.task_id!r} in the {self.dag.dag_id} DAG."
126
130
  )
127
131
 
128
- trigger_job_response = self.hook.trigger_job_run(
129
- account_id=self.account_id,
130
- job_id=self.job_id,
131
- cause=self.trigger_reason,
132
- steps_override=self.steps_override,
133
- schema_override=self.schema_override,
134
- additional_run_config=self.additional_run_config,
135
- )
136
- self.run_id = trigger_job_response.json()["data"]["id"]
137
- job_run_url = trigger_job_response.json()["data"]["href"]
132
+ non_terminal_runs = None
133
+ if self.reuse_existing_run:
134
+ non_terminal_runs = self.hook.get_job_runs(
135
+ account_id=self.account_id,
136
+ payload={
137
+ "job_definition_id": self.job_id,
138
+ "status": DbtCloudJobRunStatus.NON_TERMINAL_STATUSES,
139
+ "order_by": "-created_at",
140
+ },
141
+ ).json()["data"]
142
+ if non_terminal_runs:
143
+ self.run_id = non_terminal_runs[0]["id"]
144
+ job_run_url = non_terminal_runs[0]["href"]
145
+
146
+ if not self.reuse_existing_run or not non_terminal_runs:
147
+ trigger_job_response = self.hook.trigger_job_run(
148
+ account_id=self.account_id,
149
+ job_id=self.job_id,
150
+ cause=self.trigger_reason,
151
+ steps_override=self.steps_override,
152
+ schema_override=self.schema_override,
153
+ additional_run_config=self.additional_run_config,
154
+ )
155
+ self.run_id = trigger_job_response.json()["data"]["id"]
156
+ job_run_url = trigger_job_response.json()["data"]["href"]
157
+
138
158
  # Push the ``job_run_url`` value to XCom regardless of what happens during execution so that the job
139
159
  # run can be monitored via the operator link.
140
160
  context["ti"].xcom_push(key="job_run_url", value=job_run_url)
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-dbt-cloud"
31
- version = "3.6.1.rc1"
31
+ version = "3.7.0.rc1"
32
32
  description = "Provider package apache-airflow-providers-dbt-cloud for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [
@@ -62,8 +62,8 @@ dependencies = [
62
62
  ]
63
63
 
64
64
  [project.urls]
65
- "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.6.1"
66
- "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.6.1/changelog.html"
65
+ "Documentation" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.7.0"
66
+ "Changelog" = "https://airflow.apache.org/docs/apache-airflow-providers-dbt-cloud/3.7.0/changelog.html"
67
67
  "Bug Tracker" = "https://github.com/apache/airflow/issues"
68
68
  "Source Code" = "https://github.com/apache/airflow"
69
69
  "Slack Chat" = "https://s.apache.org/airflow-slack"