airflow-unicore-integration 0.1.4__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. {airflow_unicore_integration-0.1.4/src/airflow_unicore_integration.egg-info → airflow_unicore_integration-0.1.6}/PKG-INFO +1 -1
  2. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/pyproject.toml +1 -1
  3. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/executors/unicore_executor.py +6 -2
  4. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/util/job.py +15 -10
  5. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6/src/airflow_unicore_integration.egg-info}/PKG-INFO +1 -1
  6. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/LICENSE +0 -0
  7. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/README.rst +0 -0
  8. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/setup.cfg +0 -0
  9. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/__init__.py +0 -0
  10. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/executors/__init__.py +0 -0
  11. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/executors/run_task_via_supervisor.py +0 -0
  12. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/hooks/__init__.py +0 -0
  13. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/hooks/unicore_hooks.py +0 -0
  14. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/operators/__init__.py +0 -0
  15. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/operators/unicore_operators.py +0 -0
  16. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration/policies/__init__.py +0 -0
  17. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration.egg-info/SOURCES.txt +0 -0
  18. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration.egg-info/dependency_links.txt +0 -0
  19. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration.egg-info/entry_points.txt +0 -0
  20. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration.egg-info/requires.txt +0 -0
  21. {airflow_unicore_integration-0.1.4 → airflow_unicore_integration-0.1.6}/src/airflow_unicore_integration.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.1.4
3
+ Version: 0.1.6
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
6
  License-Expression: BSD-3-Clause
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
7
7
 
8
8
  [project]
9
9
  name = "airflow-unicore-integration"
10
- version = "0.1.4"
10
+ version = "0.1.6"
11
11
  authors = [
12
12
  { name="Christian Böttcher", email="c.boettcher@fz-juelich.de" },
13
13
  ]
@@ -39,6 +39,12 @@ STATE_MAPPINGS: Dict[uc_client.JobStatus, TaskInstanceState] = {
39
39
 
40
40
  class UnicoreExecutor(BaseExecutor):
41
41
 
42
+ EXECUTOR_CONFIG_UNICORE_CONN_KEY = (
43
+ "unicore_connection_id" # alternative connection id for the Unicore connection to use
44
+ )
45
+ EXECUTOR_CONFIG_UNICORE_SITE_KEY = "unicore_site" # alternative Unicore site to run at, only required if different than connection default
46
+ EXECUTOR_CONFIG_UNICORE_CREDENTIAL_KEY = "unicore_credential" # alternative unicore credential to use for the job, only required if different than connection default
47
+
42
48
  def start(self):
43
49
  self.active_jobs: Dict[TaskInstanceKey, uc_client.Job] = {}
44
50
  self.uc_conn = unicore_hooks.UnicoreHook().get_conn()
@@ -67,9 +73,7 @@ class UnicoreExecutor(BaseExecutor):
67
73
  return []
68
74
 
69
75
  def _get_unicore_client(self, executor_config: dict | None = {}):
70
- # TODO fix this only temporary solution
71
76
  return self.uc_conn
72
- # END TODO fix this
73
77
  # include client desires from executor_config
74
78
  unicore_conn_id = executor_config.get( # type: ignore
75
79
  UnicoreExecutor.EXECUTOR_CONFIG_UNICORE_CONN_KEY,
@@ -1,4 +1,3 @@
1
- import json
2
1
  import os
3
2
  from typing import Any
4
3
  from typing import Dict
@@ -64,7 +63,7 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
64
63
  else:
65
64
  python_env = conf.get("unicore.executor", "DEFAULT_ENV")
66
65
  # prepare dag file to be uploaded via unicore
67
- # dag_file = open(local_dag_path)
66
+ # dag_file = open("/tmp/test")
68
67
  # dag_content = dag_file.readlines()
69
68
  # dag_import = {"To": dag_rel_path, "Data": dag_content}
70
69
  worker_script_import = {
@@ -85,22 +84,28 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
85
84
  "AIRFLOW__CORE__DAGS_FOLDER": "./",
86
85
  "AIRFLOW__LOGGING__LOGGING_LEVEL": "DEBUG",
87
86
  "AIRFLOW__CORE__EXECUTOR": "LocalExecutor,airflow_unicore_integration.executors.unicore_executor.UnicoreExecutor",
88
- "AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_CONFIG_LIST": json.dumps(
89
- os.environ.get("AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_CONFIG_LIST", "")
90
- ).replace("\\n", ""),
91
87
  }
88
+
89
+ # build filecontent string for importing in the job | this is needed to avoid confusing nested quotes and trying to escape them properly when using unicore env vars directly
90
+ env_file_content: list[str] = [
91
+ f"export AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_CONFIG_LIST='{os.environ.get("AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_CONFIG_LIST", "")}'"
92
+ ]
93
+
92
94
  # insert connection details that are provided via env vars to get bundles
93
95
  for env_key in os.environ.keys():
94
96
  if env_key.startswith("AIRFLOW_CONN_"):
95
- job_descr_dict["Environment"][env_key] = json.dumps(os.environ[env_key]).replace(
96
- "\\n", ""
97
- )
98
- user_added_pre_commands.append(f"source {python_env}/bin/activate")
97
+ env_file_content.append(f"export {env_key}='{os.environ[env_key]}'")
98
+
99
+ airflow_env_import = {"To": "airflow_config.env", "Data": env_file_content}
100
+
101
+ user_added_pre_commands.append(
102
+ f"source airflow_config.env && source {python_env}/bin/activate"
103
+ )
99
104
  job_descr_dict["User precommand"] = ";".join(user_added_pre_commands)
100
105
  job_descr_dict["RunUserPrecommandOnLoginNode"] = (
101
106
  "false" # precommand includes activating the python env, this should be done on compute node right before running the job
102
107
  )
103
- job_descr_dict["Imports"] = [worker_script_import]
108
+ job_descr_dict["Imports"] = [worker_script_import, airflow_env_import]
104
109
  # add user defined options to description
105
110
  if user_added_env:
106
111
  job_descr_dict["Environment"].update(user_added_env)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.1.4
3
+ Version: 0.1.6
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
6
  License-Expression: BSD-3-Clause