airflow-unicore-integration 0.1.13__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -65,10 +65,23 @@ class UnicoreHook(BaseHook):
65
65
  credential = overwrite_credential
66
66
  if not base_url:
67
67
  raise TypeError()
68
- logger.info(f"Using credential {credential} for SITE {base_url}.")
69
68
  conn = client.Client(credential, base_url)
70
69
  return conn
71
70
 
71
+ def get_credential(self) -> credentials.Credential:
72
+ """Return the credential part of the connection as a Credential object."""
73
+ params = self.get_connection(self.uc_conn_id)
74
+ credential = credentials.UsernamePassword(params.login, params.password)
75
+ auth_token = params.extra_dejson.get("auth_token", None)
76
+ if auth_token is not None:
77
+ credential = credentials.create_credential(token=auth_token)
78
+ return credential
79
+
80
+ def get_base_url(self) -> str:
81
+ """Return the base url of the connection."""
82
+ params = self.get_connection(self.uc_conn_id)
83
+ return params.host
84
+
72
85
  def test_connection(self) -> tuple[bool, str]:
73
86
  """Test the connection by sending an access_info request"""
74
87
  conn = self.get_conn()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.1.13
3
+ Version: 0.2.0
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
6
  License-Expression: BSD-3-Clause
@@ -24,7 +24,7 @@ Unicore Airflow Integration
24
24
 
25
25
  |Generic badge|
26
26
 
27
- .. |Generic badge| image:: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml/badge.svg
27
+ .. |Generic badge| image:: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml/badge.svg
28
28
  :target: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml
29
29
 
30
30
  This project integrates `UNICORE <https://github.com/UNICORE-EU>`_ and `Apache Airflow <https://airflow.apache.org/>`_.
@@ -34,6 +34,31 @@ Airflow is a platform to programmatically author, schedule and monitor workflows
34
34
  In the current state, this projects provides a set of airflow `operators <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/operators.html>`_, which can be used as part of airflow workflows to submit jobs to Unicore.
35
35
  The UnicoreExecutor only offers experimental support for airflow 3 so far. Further support is currently being worked on.
36
36
 
37
+ -------------------------
38
+ Using the UnicoreExecutor
39
+ -------------------------
40
+
41
+ To use the UnicoreExecutor, this library needs to be installed in your airflow environment and then some configuration work needs to be done.
42
+
43
+ How to configure these settigns is up to your deployment, as it uses the standard airflow configuration mechanism.
44
+ In case of a helm deployemnt via the official helm chart, you will need to use environment variables, as all unicore related options are not present in the chart and will cause schema-validation to fail.
45
+
46
+ All options fall under the [unicore.executor] section in airflow.cfg, or have the ``AIRFLOW__UNICORE_EXECUTOR__`` prefix as an environment variable.
47
+
48
+ ========================= ============================================ ===========================================================================================
49
+ Option name default description
50
+ ========================= ============================================ ===========================================================================================
51
+ EXECUTION_API_SERVER_URL <The default from the airflow config> The url to reach the airflow API server from the execution environment (e.g. compute nodes)
52
+ AUTH_TOKEN mandatory The unicore auth token to use for job submission
53
+ DEFAULT_URL http://localhost:8080/DEMO-SITE/rest/core The default unicore site to submit jobs to
54
+ DEFAULT_ENV mandatory The default activation script for a functional airflow environment on the execution machine
55
+ TMP_DIR /tmp A temporary directory to store data such as GitDagBundles
56
+ ========================= ============================================ ===========================================================================================
57
+
58
+ The default env is loaded via ``. default_env.sh``, and must enable an environment, where python is available in a suitable version, and the ``apache-airflow-task-sdk`` and ``apache-airflow-providers-git`` packages are available. All other dependencies depend on the dags to be run, but must already be included in the environment.
59
+
60
+ A simple solution for this may be the "activate" script for a python venv. If the target systems requires additional commands to enable python (e.g. ``module load``), these may be added to the top of the activate script.
61
+
37
62
  ---------------------------
38
63
  Using the Unicore Operators
39
64
  ---------------------------
@@ -3,16 +3,16 @@ airflow_unicore_integration/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCe
3
3
  airflow_unicore_integration/executors/run_task_via_supervisor.py,sha256=3ErgPf-Oy3B4Di5yNXhhPkaojIJykvCxMZ9MlKSYPI8,2756
4
4
  airflow_unicore_integration/executors/unicore_executor.py,sha256=E1nOskWSBmC-ReLRvA8E3bY-G0lpxP403tazlBNhgFQ,5919
5
5
  airflow_unicore_integration/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- airflow_unicore_integration/hooks/unicore_hooks.py,sha256=NAR5YmnPVzSsqpNVpRHt1sS3_HkEP_e8qoCv7daUZAA,2579
6
+ airflow_unicore_integration/hooks/unicore_hooks.py,sha256=NEJ6sgQCD9AyZO9TA3vOR8RdijqaIX_FqY-Abz74Yys,3148
7
7
  airflow_unicore_integration/operators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  airflow_unicore_integration/operators/container.py,sha256=D-2La59XZazXvBDVn-_wFIZ0IrCYeGy18miK8S5Usmc,693
9
9
  airflow_unicore_integration/operators/unicore_operators.py,sha256=PrcB-riakRUt8DHR9Hkk0ccOe-xk6NjJmNAmDpNgpN4,16868
10
10
  airflow_unicore_integration/policies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  airflow_unicore_integration/util/job.py,sha256=vYyaNzGw2EQWQlDTGO6q4T1gZTHoB9-RIb5KYYd6pAc,10556
12
12
  airflow_unicore_integration/util/launch_script_content.py,sha256=42_aFpaCMmvFmmUxQDGcudkleX1YSK_yYWE8T41NOy0,2915
13
- airflow_unicore_integration-0.1.13.dist-info/licenses/LICENSE,sha256=hZ5ouAedeNr8ClHrQE-RLsgMsARcmv3kSZz7tE2BTJE,1526
14
- airflow_unicore_integration-0.1.13.dist-info/METADATA,sha256=KXZXAPP9dv8EN2UheZDJ9TeXiIb5jL-t6AggYOJFDPU,11189
15
- airflow_unicore_integration-0.1.13.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
- airflow_unicore_integration-0.1.13.dist-info/entry_points.txt,sha256=PzEfCLYLSawjiYR-HNBzw8-YGfJxs1nPBULevgBQjoY,147
17
- airflow_unicore_integration-0.1.13.dist-info/top_level.txt,sha256=j45X-uIuOk3oL78iwlpHakMWtUkg__B7zUlJLwmZx6w,28
18
- airflow_unicore_integration-0.1.13.dist-info/RECORD,,
13
+ airflow_unicore_integration-0.2.0.dist-info/licenses/LICENSE,sha256=hZ5ouAedeNr8ClHrQE-RLsgMsARcmv3kSZz7tE2BTJE,1526
14
+ airflow_unicore_integration-0.2.0.dist-info/METADATA,sha256=-ifD7f7RX30qQs3OMzsXKeAvo_acGHLigthKu93-rU0,13696
15
+ airflow_unicore_integration-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
16
+ airflow_unicore_integration-0.2.0.dist-info/entry_points.txt,sha256=PzEfCLYLSawjiYR-HNBzw8-YGfJxs1nPBULevgBQjoY,147
17
+ airflow_unicore_integration-0.2.0.dist-info/top_level.txt,sha256=j45X-uIuOk3oL78iwlpHakMWtUkg__B7zUlJLwmZx6w,28
18
+ airflow_unicore_integration-0.2.0.dist-info/RECORD,,