airflow-unicore-integration 0.1.9__tar.gz → 0.1.11__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (22) hide show
  1. {airflow_unicore_integration-0.1.9/src/airflow_unicore_integration.egg-info → airflow_unicore_integration-0.1.11}/PKG-INFO +1 -1
  2. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/pyproject.toml +1 -1
  3. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/util/job.py +25 -10
  4. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11/src/airflow_unicore_integration.egg-info}/PKG-INFO +1 -1
  5. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/LICENSE +0 -0
  6. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/README.rst +0 -0
  7. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/setup.cfg +0 -0
  8. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/__init__.py +0 -0
  9. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/executors/__init__.py +0 -0
  10. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/executors/run_task_via_supervisor.py +0 -0
  11. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/executors/unicore_executor.py +0 -0
  12. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/hooks/__init__.py +0 -0
  13. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/hooks/unicore_hooks.py +0 -0
  14. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/operators/__init__.py +0 -0
  15. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/operators/unicore_operators.py +0 -0
  16. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/policies/__init__.py +0 -0
  17. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration/util/launch_script_content.py +0 -0
  18. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration.egg-info/SOURCES.txt +0 -0
  19. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration.egg-info/dependency_links.txt +0 -0
  20. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration.egg-info/entry_points.txt +0 -0
  21. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration.egg-info/requires.txt +0 -0
  22. {airflow_unicore_integration-0.1.9 → airflow_unicore_integration-0.1.11}/src/airflow_unicore_integration.egg-info/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.1.9
3
+ Version: 0.1.11
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
6
  License-Expression: BSD-3-Clause
@@ -7,7 +7,7 @@ build-backend = "setuptools.build_meta"
7
7
 
8
8
  [project]
9
9
  name = "airflow-unicore-integration"
10
- version = "0.1.9"
10
+ version = "0.1.11"
11
11
  authors = [
12
12
  { name="Christian Böttcher", email="c.boettcher@fz-juelich.de" },
13
13
  ]
@@ -34,10 +34,13 @@ class JobDescriptionGenerator:
34
34
  def create_job_description(self, workload: ExecuteTask) -> Dict[str, Any]:
35
35
  raise NotImplementedError()
36
36
 
37
+ def get_job_name(self, key: TaskInstanceKey) -> str:
38
+ return f"{key.dag_id} - {key.task_id} - {key.run_id} - {key.try_number}"
39
+
37
40
 
38
41
  class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
39
42
  """
40
- This class generates a naive unicore job, that expects there to be a working python env containign airflow and any other required dependencies on the executing system.
43
+ This class generates a naive unicore job, that expects there to be a working python env containing airflow and any other required dependencies on the executing system.
41
44
  """
42
45
 
43
46
  GIT_DAG_BUNDLE_CLASSPATH = "airflow.providers.git.bundles.git.GitDagBundle"
@@ -84,7 +87,7 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
84
87
  "Data": LAUNCH_SCRIPT_CONTENT_STR,
85
88
  }
86
89
  # start filling the actual job description
87
- job_descr_dict["Name"] = f"{key.dag_id} - {key.task_id} - {key.run_id} - {key.try_number}"
90
+ job_descr_dict["Name"] = self.get_job_name(key)
88
91
  job_descr_dict["Executable"] = (
89
92
  f". airflow_config.env && . {python_env} && python run_task_via_supervisor.py --json-string '{workload.model_dump_json()}'" # TODO may require module load to be setup for some systems
90
93
  )
@@ -102,7 +105,6 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
102
105
  # "AIRFLOW__CORE__DAGS_FOLDER": "./",
103
106
  "AIRFLOW__LOGGING__LOGGING_LEVEL": "DEBUG",
104
107
  "AIRFLOW__CORE__EXECUTOR": "LocalExecutor,airflow_unicore_integration.executors.unicore_executor.UnicoreExecutor",
105
- "AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_STORAGE_PATH": f"{tmp_dir}/{workload.ti.id}/dagbundle",
106
108
  }
107
109
 
108
110
  # build filecontent string for importing in the job | this is needed to avoid confusing nested quotes and trying to escape them properly when using unicore env vars directly
@@ -133,9 +135,14 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
133
135
  ):
134
136
  git_hook = GitHook(conn_id_to_transmit)
135
137
  git_remote_url = git_hook.repo_url
136
- git_local_url = f"{tmp_dir}/{workload.ti.id}/dagmirror"
137
- # add precommand to clone repo on ligon node
138
- git_precommand = f". {python_env} && mkdir -p {tmp_dir}/{workload.ti.id}/dagmirror && mkdir -p {tmp_dir}/{workload.ti.id}/dagbundle && git clone {git_remote_url} {git_local_url}"
138
+ git_dir_prefix = f"{tmp_dir}/{workload.ti.dag_id}/{workload.ti.task_id}/{workload.ti.run_id}/{workload.ti.try_number}"
139
+ git_local_url = f"{git_dir_prefix}/dagmirror"
140
+ dag_bundle_path = f"{git_dir_prefix}/dagbundle"
141
+ # add precommand to clone repo on login node
142
+ git_precommand = f". {python_env} && mkdir -p {git_local_url} && mkdir -p {dag_bundle_path} && git clone {git_remote_url} {git_local_url}"
143
+ job_descr_dict["Environment"][
144
+ "AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_STORAGE_PATH"
145
+ ] = f"{dag_bundle_path}"
139
146
  logger.info(f"git precommand is {git_precommand}")
140
147
  user_added_pre_commands.append(git_precommand)
141
148
  # add connection to local clone to env of job
@@ -147,18 +154,26 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
147
154
  )
148
155
  logger.info(f"connection is '{airflow_conn_string}'")
149
156
  # add cleanup of local git repo to job description
150
- git_cleanup_command = f"rm -r {tmp_dir}/{workload.ti.id}"
157
+ git_cleanup_command = f"rm -r {git_dir_prefix}"
151
158
  logger.info(f"git cleanup is {git_cleanup_command}")
152
159
  user_added_post_commands.append(git_cleanup_command)
153
160
 
154
161
  airflow_env_import = {"To": "airflow_config.env", "Data": env_file_content}
155
162
 
156
- job_descr_dict["User postcommand"] = ";".join(user_added_post_commands)
157
- job_descr_dict["User precommand"] = ";".join(user_added_pre_commands)
163
+ job_descr_dict["Imports"] = [worker_script_import, airflow_env_import]
164
+
165
+ if len(user_added_pre_commands) > 0:
166
+ precommand_import = {"To": "precommand.sh", "Data": user_added_pre_commands}
167
+ job_descr_dict["Imports"].append(precommand_import)
168
+ job_descr_dict["User precommand"] = "bash precommand.sh"
169
+ if len(user_added_post_commands) > 0:
170
+ postcommand_import = {"To": "postcommand.sh", "Data": user_added_post_commands}
171
+ job_descr_dict["Imports"].append(postcommand_import)
172
+ job_descr_dict["User postcommand"] = "bash postcommand.sh"
173
+
158
174
  job_descr_dict["RunUserPrecommandOnLoginNode"] = (
159
175
  "true" # precommand needs public internet access to clone dag repos
160
176
  )
161
- job_descr_dict["Imports"] = [worker_script_import, airflow_env_import]
162
177
  # add user defined options to description
163
178
  if user_added_env:
164
179
  job_descr_dict["Environment"].update(user_added_env)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.1.9
3
+ Version: 0.1.11
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
6
  License-Expression: BSD-3-Clause