airflow-unicore-integration 0.1.10__tar.gz → 0.1.12__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airflow_unicore_integration-0.1.10/src/airflow_unicore_integration.egg-info → airflow_unicore_integration-0.1.12}/PKG-INFO +1 -1
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/pyproject.toml +1 -1
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/util/job.py +15 -5
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12/src/airflow_unicore_integration.egg-info}/PKG-INFO +1 -1
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/LICENSE +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/README.rst +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/setup.cfg +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/__init__.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/executors/__init__.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/executors/run_task_via_supervisor.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/executors/unicore_executor.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/hooks/__init__.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/hooks/unicore_hooks.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/operators/__init__.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/operators/unicore_operators.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/policies/__init__.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration/util/launch_script_content.py +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration.egg-info/SOURCES.txt +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration.egg-info/dependency_links.txt +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration.egg-info/entry_points.txt +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration.egg-info/requires.txt +0 -0
- {airflow_unicore_integration-0.1.10 → airflow_unicore_integration-0.1.12}/src/airflow_unicore_integration.egg-info/top_level.txt +0 -0
|
@@ -25,6 +25,7 @@ class JobDescriptionGenerator:
|
|
|
25
25
|
EXECUTOR_CONFIG_PROJECT = "Project" # gets added to the unicore job description
|
|
26
26
|
EXECUTOR_CONFIG_PRE_COMMANDS = "precommands" # gets added to the unicore job description
|
|
27
27
|
EXECUTOR_CONFIG_POST_COMMANDS = "postcommands" # gets added to the unicore job descirption
|
|
28
|
+
EXECUTOR_CONFIG_JOB_TYPE = "job_type"
|
|
28
29
|
EXECUTOR_CONFIG_UNICORE_CONN_KEY = (
|
|
29
30
|
"unicore_connection_id" # alternative connection id for the Unicore connection to use
|
|
30
31
|
)
|
|
@@ -59,6 +60,7 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
|
|
|
59
60
|
user_added_pre_commands: list[str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PRE_COMMANDS, []) # type: ignore
|
|
60
61
|
user_defined_python_env: str = workload.ti.executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PYTHON_ENV_KEY, None) # type: ignore
|
|
61
62
|
user_added_post_commands: list[str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_POST_COMMANDS, []) # type: ignore
|
|
63
|
+
user_defined_job_type: str = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_JOB_TYPE, None) # type: ignore
|
|
62
64
|
# get local dag path from cmd and fix dag path in arguments
|
|
63
65
|
dag_rel_path = str(workload.dag_rel_path)
|
|
64
66
|
if dag_rel_path.startswith("DAG_FOLDER"):
|
|
@@ -71,6 +73,10 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
|
|
|
71
73
|
)
|
|
72
74
|
logger.debug(f"Server is {server}")
|
|
73
75
|
|
|
76
|
+
# set job type
|
|
77
|
+
if user_defined_job_type:
|
|
78
|
+
job_descr_dict["Job type"] = user_defined_job_type
|
|
79
|
+
|
|
74
80
|
# check which python virtualenv to use
|
|
75
81
|
if user_defined_python_env:
|
|
76
82
|
python_env = user_defined_python_env
|
|
@@ -105,7 +111,6 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
|
|
|
105
111
|
# "AIRFLOW__CORE__DAGS_FOLDER": "./",
|
|
106
112
|
"AIRFLOW__LOGGING__LOGGING_LEVEL": "DEBUG",
|
|
107
113
|
"AIRFLOW__CORE__EXECUTOR": "LocalExecutor,airflow_unicore_integration.executors.unicore_executor.UnicoreExecutor",
|
|
108
|
-
"AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_STORAGE_PATH": f"{tmp_dir}/{workload.ti.id}/dagbundle",
|
|
109
114
|
}
|
|
110
115
|
|
|
111
116
|
# build filecontent string for importing in the job | this is needed to avoid confusing nested quotes and trying to escape them properly when using unicore env vars directly
|
|
@@ -136,9 +141,14 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
|
|
|
136
141
|
):
|
|
137
142
|
git_hook = GitHook(conn_id_to_transmit)
|
|
138
143
|
git_remote_url = git_hook.repo_url
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
144
|
+
git_dir_prefix = f"{tmp_dir}/{workload.ti.dag_id}/{workload.ti.task_id}/{workload.ti.run_id}/{workload.ti.try_number}"
|
|
145
|
+
git_local_url = f"{git_dir_prefix}/dagmirror"
|
|
146
|
+
dag_bundle_path = f"{git_dir_prefix}/dagbundle"
|
|
147
|
+
# add precommand to clone repo on login node
|
|
148
|
+
git_precommand = f". {python_env} && mkdir -p {git_local_url} && mkdir -p {dag_bundle_path} && git clone {git_remote_url} {git_local_url}"
|
|
149
|
+
job_descr_dict["Environment"][
|
|
150
|
+
"AIRFLOW__DAG_PROCESSOR__DAG_BUNDLE_STORAGE_PATH"
|
|
151
|
+
] = f"{dag_bundle_path}"
|
|
142
152
|
logger.info(f"git precommand is {git_precommand}")
|
|
143
153
|
user_added_pre_commands.append(git_precommand)
|
|
144
154
|
# add connection to local clone to env of job
|
|
@@ -150,7 +160,7 @@ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
|
|
|
150
160
|
)
|
|
151
161
|
logger.info(f"connection is '{airflow_conn_string}'")
|
|
152
162
|
# add cleanup of local git repo to job description
|
|
153
|
-
git_cleanup_command = f"rm -r {
|
|
163
|
+
git_cleanup_command = f"rm -r {git_dir_prefix}"
|
|
154
164
|
logger.info(f"git cleanup is {git_cleanup_command}")
|
|
155
165
|
user_added_post_commands.append(git_cleanup_command)
|
|
156
166
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|