airflow-unicore-integration 0.1.12__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,8 +1,14 @@
1
1
  from __future__ import annotations
2
2
 
3
- from airflow.hooks.base import BaseHook
3
+ import logging
4
+ from typing import Any
5
+
6
+ from airflow.providers.common.compat.sdk import BaseHook
4
7
  from pyunicore import client
5
8
  from pyunicore import credentials
9
+ from wtforms import StringField
10
+
11
+ logger = logging.getLogger(__name__)
6
12
 
7
13
 
8
14
  class UnicoreHook(BaseHook):
@@ -23,25 +29,59 @@ class UnicoreHook(BaseHook):
23
29
  super().__init__()
24
30
  self.uc_conn_id = uc_conn_id
25
31
 
32
+ @classmethod
33
+ def get_connection_form_fields(cls):
34
+ return {"auth_token": StringField("Auth Token")}
35
+
36
+ @classmethod
37
+ def get_ui_field_behaviour(cls) -> dict[str, Any]:
38
+ """Return custom UI field behaviour for UNICORE connection."""
39
+ return {
40
+ "hidden_fields": ["schema", "port", "extra"],
41
+ "relabeling": {
42
+ "login": "Username",
43
+ },
44
+ "placeholder": {"auth_token": "UNICORE auth token"},
45
+ }
46
+
26
47
  def get_conn(
27
48
  self,
28
49
  overwrite_base_url: str | None = None,
29
50
  overwrite_credential: credentials.Credential | None = None,
30
51
  ) -> client.Client:
31
52
  """Return a Unicore Client. base_url and credentials may be overwritten."""
32
- self.log.debug(
53
+ logger.debug(
33
54
  f"Gettig connection with id '{self.uc_conn_id}' from secrets backend. Will be modified with user input for UNICORE."
34
55
  )
35
56
  params = self.get_connection(self.uc_conn_id)
36
57
  base_url = params.host
37
58
  credential = credentials.UsernamePassword(params.login, params.password)
59
+ auth_token = params.extra_dejson.get("auth_token", None)
60
+ if auth_token is not None:
61
+ credential = credentials.create_credential(token=auth_token)
38
62
  if overwrite_base_url is not None:
39
63
  base_url = overwrite_base_url
40
64
  if overwrite_credential is not None:
41
65
  credential = overwrite_credential
66
+ if not base_url:
67
+ raise TypeError()
42
68
  conn = client.Client(credential, base_url)
43
69
  return conn
44
70
 
71
+ def get_credential(self) -> credentials.Credential:
72
+ """Return the credential part of the connection as a Credential object."""
73
+ params = self.get_connection(self.uc_conn_id)
74
+ credential = credentials.UsernamePassword(params.login, params.password)
75
+ auth_token = params.extra_dejson.get("auth_token", None)
76
+ if auth_token is not None:
77
+ credential = credentials.create_credential(token=auth_token)
78
+ return credential
79
+
80
+ def get_base_url(self) -> str:
81
+ """Return the base url of the connection."""
82
+ params = self.get_connection(self.uc_conn_id)
83
+ return params.host
84
+
45
85
  def test_connection(self) -> tuple[bool, str]:
46
86
  """Test the connection by sending an access_info request"""
47
87
  conn = self.get_conn()
@@ -0,0 +1,22 @@
1
+ from typing import Dict
2
+ from typing import List
3
+
4
+ from airflow_unicore_integration.operators.unicore_operators import (
5
+ UnicoreGenericOperator,
6
+ )
7
+
8
+
9
+ class UnicoreContainerOperator(UnicoreGenericOperator):
10
+ def __init__(
11
+ self, name: str, docker_image_url: str, command: str, options: str | None = None, **kwargs
12
+ ):
13
+ params: Dict[str, str | List[str]] = {"COMMAND": command, "IMAGE_URL": docker_image_url}
14
+ if options is not None:
15
+ params["OPTIONS"] = options
16
+ super().__init__(
17
+ name=name,
18
+ application_name="CONTAINER",
19
+ application_version="1.0",
20
+ parameters=params,
21
+ **kwargs,
22
+ )
@@ -6,7 +6,7 @@ from typing import Sequence
6
6
 
7
7
  import pyunicore.client as uc_client
8
8
  import pyunicore.credentials as uc_credentials
9
- from airflow.models.baseoperator import BaseOperator
9
+ from airflow.sdk.bases.operator import BaseOperator
10
10
  from airflow.utils.context import Context
11
11
 
12
12
  from airflow_unicore_integration.hooks import unicore_hooks
@@ -62,6 +62,7 @@ class UnicoreGenericOperator(BaseOperator):
62
62
  credential_username: str | None = None,
63
63
  credential_password: str | None = None,
64
64
  credential_token: str | None = None,
65
+ conn_id: str | None = None,
65
66
  **kwargs,
66
67
  ):
67
68
  """
@@ -114,6 +115,7 @@ class UnicoreGenericOperator(BaseOperator):
114
115
  self.credential_username = credential_username
115
116
  self.credential_password = credential_password
116
117
  self.credential_token = credential_token
118
+ self.conn_id = conn_id
117
119
 
118
120
  self.validate_job_description()
119
121
  logger.debug("created Unicore Job Task")
@@ -262,7 +264,7 @@ class UnicoreGenericOperator(BaseOperator):
262
264
 
263
265
  def execute_async(self, context: Context) -> Any:
264
266
  """Submits the job and returns the job object without waiting for it to finish."""
265
- client = self.get_uc_client()
267
+ client = self.get_uc_client(self.conn_id)
266
268
  job = client.new_job(job_description=self.get_job_description(), inputs=[])
267
269
  return job
268
270
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.1.12
3
+ Version: 0.2.0
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
6
  License-Expression: BSD-3-Clause
@@ -24,7 +24,7 @@ Unicore Airflow Integration
24
24
 
25
25
  |Generic badge|
26
26
 
27
- .. |Generic badge| image:: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml/badge.svg
27
+ .. |Generic badge| image:: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml/badge.svg
28
28
  :target: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml
29
29
 
30
30
  This project integrates `UNICORE <https://github.com/UNICORE-EU>`_ and `Apache Airflow <https://airflow.apache.org/>`_.
@@ -34,6 +34,31 @@ Airflow is a platform to programmatically author, schedule and monitor workflows
34
34
  In the current state, this projects provides a set of airflow `operators <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/operators.html>`_, which can be used as part of airflow workflows to submit jobs to Unicore.
35
35
  The UnicoreExecutor only offers experimental support for airflow 3 so far. Further support is currently being worked on.
36
36
 
37
+ -------------------------
38
+ Using the UnicoreExecutor
39
+ -------------------------
40
+
41
+ To use the UnicoreExecutor, this library needs to be installed in your airflow environment and then some configuration work needs to be done.
42
+
43
+ How to configure these settigns is up to your deployment, as it uses the standard airflow configuration mechanism.
44
+ In case of a helm deployemnt via the official helm chart, you will need to use environment variables, as all unicore related options are not present in the chart and will cause schema-validation to fail.
45
+
46
+ All options fall under the [unicore.executor] section in airflow.cfg, or have the ``AIRFLOW__UNICORE_EXECUTOR__`` prefix as an environment variable.
47
+
48
+ ========================= ============================================ ===========================================================================================
49
+ Option name default description
50
+ ========================= ============================================ ===========================================================================================
51
+ EXECUTION_API_SERVER_URL <The default from the airflow config> The url to reach the airflow API server from the execution environment (e.g. compute nodes)
52
+ AUTH_TOKEN mandatory The unicore auth token to use for job submission
53
+ DEFAULT_URL http://localhost:8080/DEMO-SITE/rest/core The default unicore site to submit jobs to
54
+ DEFAULT_ENV mandatory The default activation script for a functional airflow environment on the execution machine
55
+ TMP_DIR /tmp A temporary directory to store data such as GitDagBundles
56
+ ========================= ============================================ ===========================================================================================
57
+
58
+ The default env is loaded via ``. default_env.sh``, and must enable an environment, where python is available in a suitable version, and the ``apache-airflow-task-sdk`` and ``apache-airflow-providers-git`` packages are available. All other dependencies depend on the dags to be run, but must already be included in the environment.
59
+
60
+ A simple solution for this may be the "activate" script for a python venv. If the target systems requires additional commands to enable python (e.g. ``module load``), these may be added to the top of the activate script.
61
+
37
62
  ---------------------------
38
63
  Using the Unicore Operators
39
64
  ---------------------------
@@ -3,15 +3,16 @@ airflow_unicore_integration/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCe
3
3
  airflow_unicore_integration/executors/run_task_via_supervisor.py,sha256=3ErgPf-Oy3B4Di5yNXhhPkaojIJykvCxMZ9MlKSYPI8,2756
4
4
  airflow_unicore_integration/executors/unicore_executor.py,sha256=E1nOskWSBmC-ReLRvA8E3bY-G0lpxP403tazlBNhgFQ,5919
5
5
  airflow_unicore_integration/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- airflow_unicore_integration/hooks/unicore_hooks.py,sha256=JjcjogWtN1xveagpkraQuYOdXjkp2lSnEdQc0waqhU4,1662
6
+ airflow_unicore_integration/hooks/unicore_hooks.py,sha256=NEJ6sgQCD9AyZO9TA3vOR8RdijqaIX_FqY-Abz74Yys,3148
7
7
  airflow_unicore_integration/operators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- airflow_unicore_integration/operators/unicore_operators.py,sha256=xYX1t_QzjpqHZDoEu6jJNNXVmBEnjIlApFWvgYoYoB0,16790
8
+ airflow_unicore_integration/operators/container.py,sha256=D-2La59XZazXvBDVn-_wFIZ0IrCYeGy18miK8S5Usmc,693
9
+ airflow_unicore_integration/operators/unicore_operators.py,sha256=PrcB-riakRUt8DHR9Hkk0ccOe-xk6NjJmNAmDpNgpN4,16868
9
10
  airflow_unicore_integration/policies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
11
  airflow_unicore_integration/util/job.py,sha256=vYyaNzGw2EQWQlDTGO6q4T1gZTHoB9-RIb5KYYd6pAc,10556
11
12
  airflow_unicore_integration/util/launch_script_content.py,sha256=42_aFpaCMmvFmmUxQDGcudkleX1YSK_yYWE8T41NOy0,2915
12
- airflow_unicore_integration-0.1.12.dist-info/licenses/LICENSE,sha256=hZ5ouAedeNr8ClHrQE-RLsgMsARcmv3kSZz7tE2BTJE,1526
13
- airflow_unicore_integration-0.1.12.dist-info/METADATA,sha256=aK_J520LHA3xOf4vqqpo71zcJ6WDrVTiMf120Ph00R8,11189
14
- airflow_unicore_integration-0.1.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
15
- airflow_unicore_integration-0.1.12.dist-info/entry_points.txt,sha256=PzEfCLYLSawjiYR-HNBzw8-YGfJxs1nPBULevgBQjoY,147
16
- airflow_unicore_integration-0.1.12.dist-info/top_level.txt,sha256=j45X-uIuOk3oL78iwlpHakMWtUkg__B7zUlJLwmZx6w,28
17
- airflow_unicore_integration-0.1.12.dist-info/RECORD,,
13
+ airflow_unicore_integration-0.2.0.dist-info/licenses/LICENSE,sha256=hZ5ouAedeNr8ClHrQE-RLsgMsARcmv3kSZz7tE2BTJE,1526
14
+ airflow_unicore_integration-0.2.0.dist-info/METADATA,sha256=-ifD7f7RX30qQs3OMzsXKeAvo_acGHLigthKu93-rU0,13696
15
+ airflow_unicore_integration-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ airflow_unicore_integration-0.2.0.dist-info/entry_points.txt,sha256=PzEfCLYLSawjiYR-HNBzw8-YGfJxs1nPBULevgBQjoY,147
17
+ airflow_unicore_integration-0.2.0.dist-info/top_level.txt,sha256=j45X-uIuOk3oL78iwlpHakMWtUkg__B7zUlJLwmZx6w,28
18
+ airflow_unicore_integration-0.2.0.dist-info/RECORD,,