apache-airflow-providers-databricks 6.3.0rc2__tar.gz → 6.3.0rc3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/PKG-INFO +2 -2
  2. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/README.rst +1 -1
  3. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/hooks/databricks.py +3 -3
  4. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/operators/databricks.py +2 -2
  5. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/pyproject.toml +1 -1
  6. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/LICENSE +0 -0
  7. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/__init__.py +0 -0
  8. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/get_provider_info.py +0 -0
  9. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/hooks/__init__.py +0 -0
  10. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/hooks/databricks_base.py +0 -0
  11. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/hooks/databricks_sql.py +0 -0
  12. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/operators/__init__.py +0 -0
  13. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/operators/databricks_repos.py +0 -0
  14. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/operators/databricks_sql.py +0 -0
  15. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/sensors/__init__.py +0 -0
  16. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/sensors/databricks_partition.py +0 -0
  17. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/sensors/databricks_sql.py +0 -0
  18. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/triggers/__init__.py +0 -0
  19. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/triggers/databricks.py +0 -0
  20. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/utils/__init__.py +0 -0
  21. {apache_airflow_providers_databricks-6.3.0rc2 → apache_airflow_providers_databricks-6.3.0rc3}/airflow/providers/databricks/utils/databricks.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: apache-airflow-providers-databricks
3
- Version: 6.3.0rc2
3
+ Version: 6.3.0rc3
4
4
  Summary: Provider package apache-airflow-providers-databricks for Apache Airflow
5
5
  Keywords: airflow-provider,databricks,airflow,integration
6
6
  Author-email: Apache Software Foundation <dev@airflow.apache.org>
@@ -82,7 +82,7 @@ Provides-Extra: sdk
82
82
 
83
83
  Package ``apache-airflow-providers-databricks``
84
84
 
85
- Release: ``6.3.0.rc2``
85
+ Release: ``6.3.0.rc3``
86
86
 
87
87
 
88
88
  `Databricks <https://databricks.com/>`__
@@ -42,7 +42,7 @@
42
42
 
43
43
  Package ``apache-airflow-providers-databricks``
44
44
 
45
- Release: ``6.3.0.rc2``
45
+ Release: ``6.3.0.rc3``
46
46
 
47
47
 
48
48
  `Databricks <https://databricks.com/>`__
@@ -51,7 +51,6 @@ DELETE_RUN_ENDPOINT = ("POST", "api/2.1/jobs/runs/delete")
51
51
  REPAIR_RUN_ENDPOINT = ("POST", "api/2.1/jobs/runs/repair")
52
52
  OUTPUT_RUNS_JOB_ENDPOINT = ("GET", "api/2.1/jobs/runs/get-output")
53
53
  CANCEL_ALL_RUNS_ENDPOINT = ("POST", "api/2.1/jobs/runs/cancel-all")
54
- UPDATE_PERMISSION_ENDPOINT = ("PATCH", "api/2.0/permissions/jobs")
55
54
 
56
55
  INSTALL_LIBS_ENDPOINT = ("POST", "api/2.0/libraries/install")
57
56
  UNINSTALL_LIBS_ENDPOINT = ("POST", "api/2.0/libraries/uninstall")
@@ -656,14 +655,15 @@ class DatabricksHook(BaseDatabricksHook):
656
655
 
657
656
  return None
658
657
 
659
- def update_job_permission(self, json: dict[str, Any]) -> dict:
658
+ def update_job_permission(self, job_id: int, json: dict[str, Any]) -> dict:
660
659
  """
661
660
  Update databricks job permission.
662
661
 
662
+ :param job_id: job id
663
663
  :param json: payload
664
664
  :return: json containing permission specification
665
665
  """
666
- return self._do_api_call(UPDATE_PERMISSION_ENDPOINT, json)
666
+ return self._do_api_call(("PATCH", f"api/2.0/permissions/jobs/{job_id}"), json)
667
667
 
668
668
  def test_connection(self) -> tuple[bool, str]:
669
669
  """Test the Databricks connectivity from UI."""
@@ -318,7 +318,7 @@ class DatabricksCreateJobsOperator(BaseOperator):
318
318
  self._hook.reset_job(str(job_id), self.json)
319
319
  if (access_control_list := self.json.get("access_control_list")) is not None:
320
320
  acl_json = {"access_control_list": access_control_list}
321
- self._hook.update_job_permission(normalise_json_content(acl_json))
321
+ self._hook.update_job_permission(job_id, normalise_json_content(acl_json))
322
322
 
323
323
  return job_id
324
324
 
@@ -858,7 +858,7 @@ class DatabricksRunNowOperator(BaseOperator):
858
858
  repair_json = {"run_id": self.run_id, "rerun_all_failed_tasks": True}
859
859
  if latest_repair_id is not None:
860
860
  repair_json["latest_repair_id"] = latest_repair_id
861
- self.json["latest_srepair_id"] = self._hook.repair_run(repair_json)
861
+ self.json["latest_repair_id"] = self._hook.repair_run(repair_json)
862
862
  _handle_deferrable_databricks_operator_execution(self, self._hook, self.log, context)
863
863
 
864
864
  def on_kill(self) -> None:
@@ -28,7 +28,7 @@ build-backend = "flit_core.buildapi"
28
28
 
29
29
  [project]
30
30
  name = "apache-airflow-providers-databricks"
31
- version = "6.3.0.rc2"
31
+ version = "6.3.0.rc3"
32
32
  description = "Provider package apache-airflow-providers-databricks for Apache Airflow"
33
33
  readme = "README.rst"
34
34
  authors = [