airflow-unicore-integration 0.1.1__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {airflow_unicore_integration-0.1.1/src/airflow_unicore_integration.egg-info → airflow_unicore_integration-0.1.3}/PKG-INFO +1 -1
  2. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/pyproject.toml +1 -1
  3. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/util/job.py +13 -3
  4. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3/src/airflow_unicore_integration.egg-info}/PKG-INFO +1 -1
  5. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/LICENSE +0 -0
  6. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/README.rst +0 -0
  7. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/setup.cfg +0 -0
  8. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/__init__.py +0 -0
  9. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/executors/__init__.py +0 -0
  10. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/executors/run_task_via_supervisor.py +0 -0
  11. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/executors/unicore_executor.py +0 -0
  12. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/hooks/__init__.py +0 -0
  13. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/hooks/unicore_hooks.py +0 -0
  14. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/operators/__init__.py +0 -0
  15. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/operators/unicore_operators.py +0 -0
  16. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration/policies/__init__.py +0 -0
  17. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration.egg-info/SOURCES.txt +0 -0
  18. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration.egg-info/dependency_links.txt +0 -0
  19. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration.egg-info/entry_points.txt +0 -0
  20. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration.egg-info/requires.txt +0 -0
  21. {airflow_unicore_integration-0.1.1 → airflow_unicore_integration-0.1.3}/src/airflow_unicore_integration.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
6
  License-Expression: BSD-3-Clause
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
7
7
 
8
8
  [project]
9
9
  name = "airflow-unicore-integration"
10
- version = "0.1.1"
10
+ version = "0.1.3"
11
11
  authors = [
12
12
  { name="Christian Böttcher", email="c.boettcher@fz-juelich.de" },
13
13
  ]
@@ -1,3 +1,4 @@
1
+ import os
1
2
  from typing import Any
2
3
  from typing import Dict
3
4
 
@@ -52,7 +53,9 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
52
53
  # local_dag_path = conf.get("core", "DAGS_FOLDER") + "/" + dag_rel_path
53
54
  base_url = conf.get("api", "base_url", fallback="/")
54
55
  default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
55
- server = conf.get("core", "execution_api_server_url", fallback=default_execution_api_server)
56
+ server = conf.get(
57
+ "unicore.executor", "execution_api_server_url", fallback=default_execution_api_server
58
+ )
56
59
 
57
60
  # check which python virtualenv to use
58
61
  if user_defined_python_env:
@@ -65,7 +68,7 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
65
68
  # dag_import = {"To": dag_rel_path, "Data": dag_content}
66
69
  worker_script_import = {
67
70
  "To": "run_task_via_supervisor.py",
68
- "From": "https://gist.githubusercontent.com/cboettcher/3f1101a1d1b67e7944d17c02ecd69930/raw/6da9ec16ba598ddda9cf288900498fab5e226788/run_task_via_supervisor.py",
71
+ "From": "https://gist.githubusercontent.com/cboettcher/3f1101a1d1b67e7944d17c02ecd69930/raw/1d90bf38199d8c0adf47a79c8840c3e3ddf57462/run_task_via_supervisor.py",
69
72
  }
70
73
  # start filling the actual job description
71
74
  job_descr_dict["Name"] = f"{key.dag_id} - {key.task_id} - {key.run_id} - {key.try_number}"
@@ -81,11 +84,18 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
81
84
  "AIRFLOW__CORE__DAGS_FOLDER": "./",
82
85
  "AIRFLOW__LOGGING__LOGGING_LEVEL": "DEBUG",
83
86
  "AIRFLOW__CORE__EXECUTOR": "LocalExecutor,airflow_unicore_integration.executors.unicore_executor.UnicoreExecutor",
87
+ "AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_CONFIG_LIST": os.environ.get(
88
+ "AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_CONFIG_LIST", ""
89
+ ),
84
90
  }
91
+ # insert connection details that are provided via env vars to get bundles
92
+ for env_key in os.environ.keys():
93
+ if env_key.startswith("AIRFLOW_CONN_"):
94
+ job_descr_dict["Environment"][env_key] = os.environ[env_key]
85
95
  user_added_pre_commands.append(f"source {python_env}/bin/activate")
86
96
  job_descr_dict["User precommand"] = ";".join(user_added_pre_commands)
87
97
  job_descr_dict["RunUserPrecommandOnLoginNode"] = (
88
- "false" # precommand is activating the python env, this can also be done on compute node right before running the job
98
+ "false" # precommand includes activating the python env, this should be done on compute node right before running the job
89
99
  )
90
100
  job_descr_dict["Imports"] = [worker_script_import]
91
101
  # add user defined options to description
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
6
  License-Expression: BSD-3-Clause