airflow-unicore-integration 0.0.5__py3-none-any.whl → 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,8 +3,10 @@ def get_provider_info():
3
3
  "package-name": "airflow-unicore-integration",
4
4
  "name": "Unicore",
5
5
  "description": "Apache Airflow Unicore provider containing Operators and hooks.",
6
- "connection-types": [{
7
- "connection-type": "unicore",
8
- "hook-class-name": "airflow_unicore_integration.hooks.unicore_hooks.UnicoreHook",
9
- }],
6
+ "connection-types": [
7
+ {
8
+ "connection-type": "unicore",
9
+ "hook-class-name": "airflow_unicore_integration.hooks.unicore_hooks.UnicoreHook",
10
+ }
11
+ ],
10
12
  }
File without changes
@@ -0,0 +1,85 @@
1
+ """
2
+ Usage:
3
+
4
+ python run_task_via_supervisor.py [--json-string <workload string> | --json-file <workload filepath>]
5
+
6
+ """
7
+
8
+ import argparse
9
+ import sys
10
+
11
+ import structlog
12
+ from airflow.configuration import conf
13
+ from airflow.executors import workloads
14
+ from airflow.sdk.execution_time.supervisor import supervise
15
+ from pydantic import TypeAdapter
16
+ from pydantic_core._pydantic_core import ValidationError
17
+
18
+ log = structlog.get_logger(logger_name=__name__)
19
+
20
+
21
+ def execute_workload_locally(workload: workloads.All):
22
+ if not isinstance(workload, workloads.ExecuteTask):
23
+ raise ValueError(f"Executor does not know how to handle {type(workload)}")
24
+
25
+ base_url = conf.get("api", "base_url", fallback="/")
26
+ default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
27
+ server = conf.get("core", "execution_api_server_url", fallback=default_execution_api_server)
28
+ log.info("Connecting to server:", server=server)
29
+
30
+ supervise(
31
+ # This is the "wrong" ti type, but it duck types the same. TODO: Create a protocol for this.
32
+ ti=workload.ti, # type: ignore[arg-type]
33
+ dag_rel_path=workload.dag_rel_path,
34
+ bundle_info=workload.bundle_info,
35
+ token=workload.token,
36
+ server=server,
37
+ log_path=workload.log_path,
38
+ # Include the output of the task to stdout too, so that in process logs can be read from via the
39
+ # kubeapi as pod logs.
40
+ subprocess_logs_to_stdout=True,
41
+ )
42
+
43
+
44
+ def main():
45
+ parser = argparse.ArgumentParser(
46
+ description="Execute a workload in a Containerised executor using the task SDK."
47
+ )
48
+
49
+ # Create a mutually exclusive group to ensure that only one of the flags is set
50
+ group = parser.add_mutually_exclusive_group(required=True)
51
+ group.add_argument(
52
+ "--json-path",
53
+ help="Path to the input JSON file containing the execution workload payload.",
54
+ type=str,
55
+ )
56
+ group.add_argument(
57
+ "--json-string",
58
+ help="The JSON string itself containing the execution workload payload.",
59
+ type=str,
60
+ )
61
+ args = parser.parse_args()
62
+
63
+ decoder = TypeAdapter[workloads.All](workloads.All)
64
+
65
+ if args.json_path:
66
+ try:
67
+ with open(args.json_path) as file:
68
+ input_data = file.read()
69
+ workload = decoder.validate_json(input_data)
70
+ except OSError as e:
71
+ log.error("Failed to read file", error=str(e))
72
+ sys.exit(1)
73
+
74
+ elif args.json_string:
75
+ try:
76
+ workload = decoder.validate_json(args.json_string)
77
+ except ValidationError as e:
78
+ log.error("Failed to parse input JSON string", error=str(e))
79
+ sys.exit(1)
80
+
81
+ execute_workload_locally(workload)
82
+
83
+
84
+ if __name__ == "__main__":
85
+ main()
@@ -0,0 +1,123 @@
1
+ """
2
+ to configure for executor:
3
+ - Connection details for unicore: conn_id AIRFLOW__UNICORE_EXECUTOR__UNICORE_CONN_ID | should be defined, can be skipped if every task provides one
4
+ - location (path) of python virtualenv prepared on hpc system | AIRFLOW__UNICORE_EXECUTOR__DEFAULT_ENV | should be defined, can be skipped if every task provides one
5
+
6
+ tasks should be allowed to overwrite SITE, CREDENTIALS_*, UNICORE_CONN_ID and DEFAULT_ENV - i.e. everything but the database connection - credentials should be given as a uc_credential object via executor_config
7
+
8
+ """
9
+
10
+ import time
11
+ from typing import Any
12
+ from typing import Dict
13
+ from typing import List
14
+
15
+ import pyunicore.client as uc_client
16
+ from airflow.configuration import conf
17
+ from airflow.executors.base_executor import BaseExecutor
18
+ from airflow.executors.workloads import All
19
+ from airflow.executors.workloads import ExecuteTask
20
+ from airflow.models.taskinstancekey import TaskInstanceKey
21
+ from airflow.utils.state import TaskInstanceState
22
+
23
+ from airflow_unicore_integration.hooks import unicore_hooks
24
+
25
+ from ..util.job import JobDescriptionGenerator
26
+ from ..util.job import NaiveJobDescriptionGenerator
27
+
28
+ STATE_MAPPINGS: Dict[uc_client.JobStatus, TaskInstanceState] = {
29
+ uc_client.JobStatus.UNDEFINED: TaskInstanceState.FAILED,
30
+ uc_client.JobStatus.READY: TaskInstanceState.QUEUED,
31
+ uc_client.JobStatus.STAGINGIN: TaskInstanceState.QUEUED,
32
+ uc_client.JobStatus.QUEUED: TaskInstanceState.QUEUED,
33
+ uc_client.JobStatus.RUNNING: TaskInstanceState.RUNNING,
34
+ uc_client.JobStatus.STAGINGOUT: TaskInstanceState.RUNNING,
35
+ uc_client.JobStatus.SUCCESSFUL: TaskInstanceState.SUCCESS,
36
+ uc_client.JobStatus.FAILED: TaskInstanceState.FAILED,
37
+ }
38
+
39
+
40
+ class UnicoreExecutor(BaseExecutor):
41
+
42
+ def start(self):
43
+ self.active_jobs: Dict[TaskInstanceKey, uc_client.Job] = {}
44
+ self.uc_conn = unicore_hooks.UnicoreHook().get_conn()
45
+ # TODO get job description generator class and init params from config
46
+ self.job_descr_generator: JobDescriptionGenerator = NaiveJobDescriptionGenerator()
47
+
48
+ def sync(self) -> None:
49
+ # iterate through task collection and update task/ job status - delete if needed
50
+ for task, job in list(self.active_jobs.items()):
51
+ state = STATE_MAPPINGS[job.status]
52
+ if state == TaskInstanceState.FAILED:
53
+ self.fail(task)
54
+ self._forward_unicore_log(task, job)
55
+ self.active_jobs.pop(task)
56
+ elif state == TaskInstanceState.SUCCESS:
57
+ self.success(task)
58
+ self._forward_unicore_log(task, job)
59
+ self.active_jobs.pop(task)
60
+ elif state == TaskInstanceState.RUNNING:
61
+ self.running_state(task, state)
62
+
63
+ return super().sync()
64
+
65
+ def _forward_unicore_log(self, task: TaskInstanceKey, job: uc_client.Job) -> List[str]:
66
+ # TODO retrieve unicore logs from job directory and return
67
+ return []
68
+
69
+ def _get_unicore_client(self, executor_config: dict | None = {}):
70
+ # TODO fix this only temporary solution
71
+ return self.uc_conn
72
+ # END TODO fix this
73
+ # include client desires from executor_config
74
+ unicore_conn_id = executor_config.get( # type: ignore
75
+ UnicoreExecutor.EXECUTOR_CONFIG_UNICORE_CONN_KEY,
76
+ conf.get("unicore.executor", "UNICORE_CONN_ID"),
77
+ ) # task can provide a different unicore connection to use, else airflow-wide default is used
78
+ self.log.info(f"Using base unicore connection with id '{unicore_conn_id}'")
79
+ hook = unicore_hooks.UnicoreHook(uc_conn_id=unicore_conn_id)
80
+ unicore_site = executor_config.get( # type: ignore
81
+ UnicoreExecutor.EXECUTOR_CONFIG_UNICORE_SITE_KEY, None
82
+ ) # task can provide a different site to run at, else default from connetion is used
83
+ unicore_credential = executor_config.get( # type: ignore
84
+ UnicoreExecutor.EXECUTOR_CONFIG_UNICORE_CREDENTIAL_KEY, None
85
+ ) # task can provide a different credential to use, else default from connection is used
86
+ return hook.get_conn(
87
+ overwrite_base_url=unicore_site, overwrite_credential=unicore_credential
88
+ )
89
+
90
+ def _submit_job(self, workload: ExecuteTask):
91
+ uc_client = self._get_unicore_client(executor_config=workload.ti.executor_config)
92
+ job_descr = self._create_job_description(workload)
93
+ self.log.info("Generated job description")
94
+ self.log.debug(str(job_descr))
95
+ job = uc_client.new_job(job_descr)
96
+ self.log.info("Submitted unicore job")
97
+ self.active_jobs[workload.ti.key] = job
98
+ return job
99
+
100
+ def _create_job_description(self, workload: ExecuteTask) -> Dict[str, Any]:
101
+ return self.job_descr_generator.create_job_description(workload)
102
+
103
+ def queue_workload(self, workload: ExecuteTask | All, session):
104
+ if not isinstance(workload, ExecuteTask):
105
+ raise TypeError(f"Don't know how to queue workload of type {type(workload).__name__}")
106
+
107
+ # submit job to unicore and add to active_jobs dict for task state management
108
+ job = self._submit_job(workload)
109
+ self.active_jobs[workload.ti.key] = job
110
+
111
+ def end(self, heartbeat_interval=10) -> None:
112
+ # wait for current jobs to finish, dont start any new ones
113
+ while True:
114
+ self.sync()
115
+ if not self.active_jobs:
116
+ break
117
+ time.sleep(heartbeat_interval)
118
+
119
+ def terminate(self):
120
+ # terminate all jobs
121
+ for task, job in list(self.active_jobs.items()):
122
+ job.abort()
123
+ self.end()
@@ -1,8 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
- from pyunicore import client, credentials
4
-
5
3
  from airflow.hooks.base import BaseHook
4
+ from pyunicore import client
5
+ from pyunicore import credentials
6
6
 
7
7
 
8
8
  class UnicoreHook(BaseHook):
@@ -22,28 +22,28 @@ class UnicoreHook(BaseHook):
22
22
  def __init__(self, uc_conn_id: str = default_conn_name) -> None:
23
23
  super().__init__()
24
24
  self.uc_conn_id = uc_conn_id
25
- self.conn = None
26
25
 
27
- def get_conn(self, overwrite_base_url: str | None = None, overwrite_credential : credentials.Credential | None = None) -> client.Client:
26
+ def get_conn(
27
+ self,
28
+ overwrite_base_url: str | None = None,
29
+ overwrite_credential: credentials.Credential | None = None,
30
+ ) -> client.Client:
28
31
  """Return a Unicore Client. base_url and credentials may be overwritten."""
29
- if self.conn is None or overwrite_base_url is not None or overwrite_credential is not None: # if not generated, or overwrite attribute is set crete new
30
- params = self.get_connection(self.uc_conn_id)
31
- base_url = params.host
32
- credential = credentials.UsernamePassword(params.login, params.password)
33
- if overwrite_base_url is not None:
34
- base_url = overwrite_base_url
35
- if overwrite_credential is not None:
36
- credential = overwrite_credential
37
- self.conn = client.Client(credential, base_url)
38
-
39
- return self.conn
40
-
32
+ self.log.debug(
33
+ f"Gettig connection with id '{self.uc_conn_id}' from secrets backend. Will be modified with user input for UNICORE."
34
+ )
35
+ params = self.get_connection(self.uc_conn_id)
36
+ base_url = params.host
37
+ credential = credentials.UsernamePassword(params.login, params.password)
38
+ if overwrite_base_url is not None:
39
+ base_url = overwrite_base_url
40
+ if overwrite_credential is not None:
41
+ credential = overwrite_credential
42
+ conn = client.Client(credential, base_url)
43
+ return conn
41
44
 
42
45
  def test_connection(self) -> tuple[bool, str]:
43
46
  """Test the connection by sending an access_info request"""
44
- try:
45
- conn = self.get_conn()
46
- conn.access_info()
47
- return True, "Connection successfully tested"
48
- except Exception as e:
49
- return False, str(e)
47
+ conn = self.get_conn()
48
+ conn.access_info()
49
+ return True, "Connection successfully tested"
@@ -1,38 +1,73 @@
1
- from airflow.models.baseoperator import BaseOperator
2
- from airflow.decorators.base import DecoratedOperator, task_decorator_factory
3
- from typing import Any, List, Dict
4
-
5
- from airflow.utils.context import Context
1
+ import logging
2
+ from typing import Any
3
+ from typing import Dict
4
+ from typing import List
5
+ from typing import Sequence
6
6
 
7
7
  import pyunicore.client as uc_client
8
8
  import pyunicore.credentials as uc_credentials
9
- from airflow_unicore_integration.hooks import unicore_hooks
9
+ from airflow.models.baseoperator import BaseOperator
10
+ from airflow.utils.context import Context
10
11
 
11
- import logging
12
+ from airflow_unicore_integration.hooks import unicore_hooks
12
13
 
13
14
  logger = logging.getLogger(__name__)
14
15
 
15
- DEFAULT_SCRIPT_NAME = 'default_script_from_job_description'
16
- DEFAULT_BSS_FILE = 'default_bss_file_upload'
16
+ DEFAULT_SCRIPT_NAME = "default_script_from_job_description"
17
+ DEFAULT_BSS_FILE = "default_bss_file_upload"
18
+
17
19
 
18
20
  class JobDescriptionException(BaseException):
19
- """ For errors in the unicore job description that would be generated by the unicore operators."""
21
+ """For errors in the unicore job description that would be generated by the unicore operators."""
22
+
20
23
  def __init__(self, *args: object) -> None:
21
24
  super().__init__(*args)
22
25
 
26
+
23
27
  class UnicoreGenericOperator(BaseOperator):
24
28
 
25
- def __init__(self, name: str, application_name : str | None = None, application_version: str | None = None, executable: str | None = None, arguments: List[str] | None = None,
26
- environment: List[str] | None = None, parameters: Dict[str,str | List[str]] | None = None, stdout: str | None = None, stderr: str | None = None, stdin: str | None = None, ignore_non_zero_exit_code: bool | None = None,
27
- user_pre_command: str | None = None, run_user_pre_command_on_login_node: bool | None = None, user_pre_command_ignore_non_zero_exit_code: bool | None = None, user_post_command: str | None = None,
28
- run_user_post_command_on_login_node: bool | None = None, user_post_command_ignore_non_zero_exit_code: bool | None = None, resources: Dict[str, str] | None = None, project: str | None = None,
29
- imports: List[Dict[str,str | List[str]]] | None = None, exports: List[Dict[str,str | List[str]]] | None = None, have_client_stagein: bool | None = None, job_type: str | None = None,
30
- login_node: str | None = None, bss_file: str | None = None, tags: List[str] | None = None, notification: str | None = None, user_email: str | None = None, xcom_output_files: List[str] = ["stdout", "stderr"],
31
- base_url: str | None = None, credential: uc_credentials.Credential | None = None, credential_username: str | None = None, credential_password: str | None = None, credential_token: str | None = None, **kwargs):
29
+ def __init__(
30
+ self,
31
+ name: str,
32
+ application_name: str | None = None,
33
+ application_version: str | None = None,
34
+ executable: str | None = None,
35
+ arguments: List[str] | None = None,
36
+ environment: List[str] | None = None,
37
+ parameters: Dict[str, str | List[str]] | None = None,
38
+ stdout: str | None = None,
39
+ stderr: str | None = None,
40
+ stdin: str | None = None,
41
+ ignore_non_zero_exit_code: bool | None = None,
42
+ user_pre_command: str | None = None,
43
+ run_user_pre_command_on_login_node: bool | None = None,
44
+ user_pre_command_ignore_non_zero_exit_code: bool | None = None,
45
+ user_post_command: str | None = None,
46
+ run_user_post_command_on_login_node: bool | None = None,
47
+ user_post_command_ignore_non_zero_exit_code: bool | None = None,
48
+ resources: Dict[str, str] | None = None,
49
+ project: str | None = None,
50
+ imports: List[Dict[str, str | Sequence[str]]] | None = None,
51
+ exports: List[Dict[str, str | List[str]]] | None = None,
52
+ have_client_stagein: bool | None = None,
53
+ job_type: str | None = None,
54
+ login_node: str | None = None,
55
+ bss_file: str | None = None,
56
+ tags: List[str] | None = None,
57
+ notification: str | None = None,
58
+ user_email: str | None = None,
59
+ xcom_output_files: List[str] = ["stdout", "stderr"],
60
+ base_url: str | None = None,
61
+ credential: uc_credentials.Credential | None = None,
62
+ credential_username: str | None = None,
63
+ credential_password: str | None = None,
64
+ credential_token: str | None = None,
65
+ **kwargs,
66
+ ):
32
67
  """
33
- Initialize a Unicore Job Operator.
34
- :param name: The name parameter defines both the airflow task name and the unicore job name.
35
- :param xcom_output_files: A list of filenames to be exported from the job directory via x_coms. By default stdout and stderr.
68
+ Initialize a Unicore Job Operator.
69
+ :param name: The name parameter defines both the airflow task name and the unicore job name.
70
+ :param xcom_output_files: A list of filenames to be exported from the job directory via x_coms. By default stdout and stderr.
36
71
  :param base_url: The base URL of the UNICOREX server to be used for the unicore client. Overwrites the configured airflow conenction.
37
72
  :param credential: A unicore Credential to be used for the unicore client. Overwrites the configured user-pass in the airflow connection.
38
73
  :param credential_username: Username for the unicore client credentials. Overwrites the configured user in the airflow connection.
@@ -58,7 +93,9 @@ class UnicoreGenericOperator(BaseOperator):
58
93
  self.user_pre_command_ignore_non_zero_exit_code = user_pre_command_ignore_non_zero_exit_code
59
94
  self.user_post_command = user_post_command
60
95
  self.run_user_post_command_on_login_node = run_user_post_command_on_login_node
61
- self.user_post_command_ignore_non_zero_exit_code = user_post_command_ignore_non_zero_exit_code
96
+ self.user_post_command_ignore_non_zero_exit_code = (
97
+ user_post_command_ignore_non_zero_exit_code
98
+ )
62
99
  self.resources = resources
63
100
  self.project = project
64
101
  self.imports = imports
@@ -85,54 +122,61 @@ class UnicoreGenericOperator(BaseOperator):
85
122
  """
86
123
  Does some basic validation and checks if a proper job description can be generated. Raises a JobDescriptionException if not.
87
124
  """
88
- # check for some errors in the parameters for creating the unicore job
125
+ # check for some errors in the parameters for creating the unicore job
89
126
 
90
127
  # first check if application or executable been set
91
- if self.application_name is None and self.executable is None: # TODO check if executable is required if bss_file is given
128
+ if (
129
+ self.application_name is None and self.executable is None
130
+ ): # TODO check if executable is required if bss_file is given
92
131
  raise JobDescriptionException
93
-
132
+
94
133
  # if bss_file is set, we need an executable
95
134
  if self.bss_file is not None:
96
135
  if self.executable is None and self.application_name is not None:
97
136
  raise JobDescriptionException
98
137
  # TODO validate BSS file?
99
138
  logger.info("using bss file")
100
-
139
+
101
140
  if self.credential_token is not None and self.credential is None:
102
141
  logger.info("using provided oidc token")
103
142
  self.credential = uc_credentials.OIDCToken(token=self.credential_token)
104
143
 
105
- if self.credential_username is not None and self.credential_password is not None and self.credential is None:
144
+ if (
145
+ self.credential_username is not None
146
+ and self.credential_password is not None
147
+ and self.credential is None
148
+ ):
106
149
  logger.info("using provied user/pass credentials")
107
- self.credential = uc_credentials.UsernamePassword(username=self.credential_username, password=self.credential_password)
108
-
109
-
110
- def get_job_description(self) -> dict[str,Any]:
150
+ self.credential = uc_credentials.UsernamePassword(
151
+ username=self.credential_username, password=self.credential_password
152
+ )
153
+
154
+ def get_job_description(self) -> dict[str, Any]:
111
155
  """Generates the job description to be submitted to the unicore server."""
112
156
  job_description_dict: Dict = {}
113
157
 
114
158
  # now add the various simple string attribute fragments to the list, when they are not None
115
159
  if self.name is not None:
116
160
  job_description_dict["Name"] = self.name
117
-
161
+
118
162
  if self.application_name is not None:
119
163
  job_description_dict["ApplicationName"] = self.application_name
120
-
164
+
121
165
  if self.application_version is not None:
122
166
  job_description_dict["ApplicationVersion"] = self.application_version
123
-
167
+
124
168
  if self.executable is not None:
125
169
  job_description_dict["Executable"] = self.executable
126
170
 
127
171
  if self.arguments is not None:
128
172
  job_description_dict["Arguments"] = self.arguments
129
-
173
+
130
174
  if self.environment is not None:
131
175
  job_description_dict["Environment"] = self.environment
132
176
 
133
177
  if self.parameters is not None:
134
178
  job_description_dict["Parameters"] = self.parameters
135
-
179
+
136
180
  if self.stdout is not None:
137
181
  job_description_dict["Stdout"] = self.stdout
138
182
 
@@ -144,24 +188,32 @@ class UnicoreGenericOperator(BaseOperator):
144
188
 
145
189
  if self.ignore_non_zero_exit_code is not None:
146
190
  job_description_dict["IgnoreNonZeroExitCode"] = self.ignore_non_zero_exit_code
147
-
191
+
148
192
  if self.user_pre_command is not None:
149
193
  job_description_dict["User precommand"] = self.user_pre_command
150
194
 
151
195
  if self.run_user_pre_command_on_login_node is not None:
152
- job_description_dict["RunUserPrecommandOnLoginNode"] = self.run_user_pre_command_on_login_node
153
-
196
+ job_description_dict["RunUserPrecommandOnLoginNode"] = (
197
+ self.run_user_pre_command_on_login_node
198
+ )
199
+
154
200
  if self.user_pre_command_ignore_non_zero_exit_code is not None:
155
- job_description_dict["UserPrecommandIgnoreNonZeroExitCode"] = self.user_pre_command_ignore_non_zero_exit_code
201
+ job_description_dict["UserPrecommandIgnoreNonZeroExitCode"] = (
202
+ self.user_pre_command_ignore_non_zero_exit_code
203
+ )
156
204
 
157
205
  if self.user_post_command is not None:
158
206
  job_description_dict["User postcommand"] = self.user_post_command
159
207
 
160
208
  if self.run_user_post_command_on_login_node is not None:
161
- job_description_dict["RunUserPostcommandOnLoginNode"] = self.run_user_post_command_on_login_node
209
+ job_description_dict["RunUserPostcommandOnLoginNode"] = (
210
+ self.run_user_post_command_on_login_node
211
+ )
162
212
 
163
213
  if self.user_post_command_ignore_non_zero_exit_code is not None:
164
- job_description_dict["UserPostcommandIgnoreNonZeroExitCode"] = self.user_post_command_ignore_non_zero_exit_code
214
+ job_description_dict["UserPostcommandIgnoreNonZeroExitCode"] = (
215
+ self.user_post_command_ignore_non_zero_exit_code
216
+ )
165
217
 
166
218
  if self.resources is not None:
167
219
  job_description_dict["Resources"] = self.resources
@@ -171,12 +223,12 @@ class UnicoreGenericOperator(BaseOperator):
171
223
 
172
224
  if self.imports is not None:
173
225
  job_description_dict["Imports"] = self.imports
174
-
226
+
175
227
  if self.exports is not None:
176
228
  job_description_dict["Exports"] = self.exports
177
229
 
178
230
  if self.have_client_stagein is not None:
179
- job_description_dict["haveClientStageIn"] =self.have_client_stagein
231
+ job_description_dict["haveClientStageIn"] = self.have_client_stagein
180
232
 
181
233
  if self.job_type is not None:
182
234
  job_description_dict["Job type"] = self.job_type
@@ -197,7 +249,7 @@ class UnicoreGenericOperator(BaseOperator):
197
249
  job_description_dict["Tags"] = self.tags
198
250
 
199
251
  return job_description_dict
200
-
252
+
201
253
  def get_uc_client(self, uc_conn_id: str | None = None) -> uc_client.Client:
202
254
  """Return a unicore client for submitting the job. Will use provided base_url and credentials if possible, else it will use the default unicore connection from airflow."""
203
255
  if self.base_url is not None and self.credential is not None:
@@ -207,7 +259,7 @@ class UnicoreGenericOperator(BaseOperator):
207
259
  else:
208
260
  hook = unicore_hooks.UnicoreHook(uc_conn_id=uc_conn_id)
209
261
  return hook.get_conn(overwrite_base_url=self.base_url, overwrite_credential=self.credential)
210
-
262
+
211
263
  def execute_async(self, context: Context) -> Any:
212
264
  """Submits the job and returns the job object without waiting for it to finish."""
213
265
  client = self.get_uc_client()
@@ -216,35 +268,41 @@ class UnicoreGenericOperator(BaseOperator):
216
268
 
217
269
  def execute(self, context: Context) -> Any:
218
270
  """
219
- Submits the job description to the unicore server.
271
+ Submits the job description to the unicore server.
220
272
  After the job is finished (failed or successful), some basic output such as logs, status messages, job id or file content will be retreived and returned via x_coms or airflow logs.
221
273
  The exit code of this functions is the exit code of the unicore job, to ensure proper handling of failed or aborted jobs.
222
274
  """
223
- import logging
224
- from pyunicore.client import JobStatus, Job
275
+ import logging
276
+
277
+ from pyunicore.client import Job
278
+ from pyunicore.client import JobStatus
279
+
225
280
  logger = logging.getLogger(__name__)
226
-
227
- job: Job = self.execute_async(context) # TODO depending on params this may spawn multiple jobs -> in those cases, e.g. output needs to be handled differently
281
+
282
+ job: Job = self.execute_async(
283
+ context
284
+ ) # TODO depending on params this may spawn multiple jobs -> in those cases, e.g. output needs to be handled differently
228
285
  logger.debug(f"Waiting for unicore job {job}")
229
- job.poll() # wait for job to finish
286
+ job.poll() # wait for job to finish
230
287
 
231
- task_instance = context['task_instance']
288
+ task_instance = context["task_instance"]
232
289
 
233
-
234
290
  task_instance.xcom_push(key="status_message", value=job.properties["statusMessage"])
235
291
  task_instance.xcom_push(key="log", value=job.properties["log"])
236
292
  for line in job.properties["log"]:
237
293
  logger.info(f"UNICORE LOGS: {line}")
238
-
294
+
239
295
  if job.status is not JobStatus.SUCCESSFUL:
240
296
  from airflow.exceptions import AirflowFailException
241
- logger.error(f"Unicore job not successful. Job state is {job.status}. Aborting this task.")
242
- raise AirflowFailException
243
297
 
298
+ logger.error(
299
+ f"Unicore job not successful. Job state is {job.status}. Aborting this task."
300
+ )
301
+ raise AirflowFailException
244
302
 
245
303
  work_dir = job.working_dir
246
304
 
247
- content = work_dir.contents()['content']
305
+ content = work_dir.contents()["content"]
248
306
  task_instance.xcom_push(key="workdir_content", value=content)
249
307
 
250
308
  for filename in content.keys():
@@ -257,14 +315,15 @@ class UnicoreGenericOperator(BaseOperator):
257
315
  bss_submit_text = work_dir.stat(filename).raw().read().decode("utf-8")
258
316
  task_instance.xcom_push(key="BSS_SUBMIT", value=bss_submit_text)
259
317
  continue
260
-
318
+
261
319
  from requests.exceptions import HTTPError
320
+
262
321
  for file in self.xcom_output_files:
263
322
  try:
264
323
  logger.debug(f"Retreiving file {file} from workdir.")
265
324
  remote_file = work_dir.stat(file)
266
325
  content = remote_file.raw().read().decode("utf-8")
267
- task_instance.xcom_push(key=file,value=content)
326
+ task_instance.xcom_push(key=file, value=content)
268
327
  except HTTPError as http_error:
269
328
  logger.error(f"Error while retreiving file {file} from workdir.", http_error)
270
329
  continue
@@ -274,51 +333,63 @@ class UnicoreGenericOperator(BaseOperator):
274
333
  exit_code = job.properties["exitCode"]
275
334
  return exit_code
276
335
 
336
+
277
337
  class UnicoreScriptOperator(UnicoreGenericOperator):
278
338
  """
279
339
  This operator uplaods and submits a script to the unicore server as a job. Behaviour and parameters are otherwise the same as the UnicoreBaseOpertor.
280
340
  """
341
+
281
342
  def __init__(self, name: str, script_content: str, **kwargs):
282
343
  """
283
344
  :param script_content: the content of the script to be executed as a string
284
345
  """
285
346
  super().__init__(name=name, executable=DEFAULT_SCRIPT_NAME, **kwargs)
286
- lines = script_content.split('\n')
287
- script_stagein = {
288
- "To": DEFAULT_SCRIPT_NAME,
289
- "Data": lines
290
- }
347
+ lines = script_content.split("\n")
348
+ script_stagein = {"To": DEFAULT_SCRIPT_NAME, "Data": lines}
291
349
  if self.imports is not None:
292
350
  self.imports.append(script_stagein)
293
351
  else:
294
352
  self.imports = [script_stagein]
295
353
 
354
+
296
355
  class UnicoreBSSOperator(UnicoreGenericOperator):
297
356
  """
298
357
  This operator uplaods and submits a BSS-script to the unicore server as a job. Behaviour and parameters are otherwise the same as the UnicoreBaseOpertor.
299
358
  """
359
+
300
360
  def __init__(self, name: str, bss_file_content: str, **kwargs):
301
361
  """
302
362
  :param bss_file_content: the content of the BSS-script to be executed as a string
303
363
  """
304
- super().__init__(name=name, bss_file=DEFAULT_BSS_FILE, job_type="raw", xcom_output_files=[], **kwargs)
305
- lines = bss_file_content.split('\n')
306
- bss_stagein = {
307
- "To": DEFAULT_BSS_FILE,
308
- "Data": lines
309
- }
364
+ super().__init__(name=name, **kwargs)
365
+ lines = bss_file_content.split("\n")
366
+ bss_stagein = {"To": DEFAULT_BSS_FILE, "Data": lines}
310
367
  if self.imports is not None:
311
368
  self.imports.append(bss_stagein)
312
369
  else:
313
370
  self.imports = [bss_stagein]
314
371
 
372
+
315
373
  class UnicoreExecutableOperator(UnicoreGenericOperator):
316
374
  """A simplified operator for just executing an executable. Still supports all parameters fo the UnicoreBaseOperator."""
317
- def __init__(self, name: str, executable: str, xcom_output_files : List[str] = ["stdout","stderr"], **kwargs) -> None:
318
- super().__init__(name=name, executable=executable, xcom_output_files=xcom_output_files, **kwargs)
375
+
376
+ def __init__(
377
+ self,
378
+ name: str,
379
+ executable: str,
380
+ xcom_output_files: List[str] = ["stdout", "stderr"],
381
+ **kwargs,
382
+ ) -> None:
383
+ super().__init__(
384
+ name=name,
385
+ executable=executable,
386
+ xcom_output_files=xcom_output_files,
387
+ **kwargs,
388
+ )
389
+
319
390
 
320
391
  class UnicoreDateOperator(UnicoreExecutableOperator):
321
392
  """An operator for executing the 'date' executable. Mostly for testing purposes. Still supports all parameters fo the UnicoreBaseOperator."""
322
- def __init__(self, name: str, **kwargs) -> None:
323
- super().__init__(name=name, executable='date',**kwargs)
324
393
 
394
+ def __init__(self, name: str, **kwargs) -> None:
395
+ super().__init__(name=name, executable="date", **kwargs)
File without changes
@@ -0,0 +1,101 @@
1
+ from typing import Any
2
+ from typing import Dict
3
+
4
+ from airflow.configuration import conf
5
+ from airflow.executors.workloads import ExecuteTask
6
+ from airflow.models.taskinstancekey import TaskInstanceKey
7
+
8
+
9
+ class JobDescriptionGenerator:
10
+ """
11
+ A generator class for generating unicore jhob descriptions that may supprot different kinds of systems and/ or environments.
12
+ """
13
+
14
+ EXECUTOR_CONFIG_PYTHON_ENV_KEY = "python_env" # full path to a python virtualenv that includes airflow and all required libraries for the task (without the .../bin/activate part)
15
+ EXECUTOR_CONFIG_RESOURCES = "Resources" # gets added to the unicore job description
16
+ EXECUTOR_CONFIG_ENVIRONMENT = "Environment" # gets added to the unicore job description
17
+ EXECUTOR_CONFIG_PARAMETERS = "Parameters" # gets added to the unicore job description
18
+ EXECUTOR_CONFIG_PROJECT = "Project" # gets added to the unicore job description
19
+ EXECUTOR_CONFIG_PRE_COMMANDS = "precommands" # gets added to the unicore job description
20
+ EXECUTOR_CONFIG_UNICORE_CONN_KEY = (
21
+ "unicore_connection_id" # alternative connection id for the Unicore connection to use
22
+ )
23
+ EXECUTOR_CONFIG_UNICORE_SITE_KEY = "unicore_site" # alternative Unicore site to run at, only required if different than connection default
24
+ EXECUTOR_CONFIG_UNICORE_CREDENTIAL_KEY = "unicore_credential" # alternative unicore credential to use for the job, only required if different than connection default
25
+
26
+ def create_job_description(self, workload: ExecuteTask) -> Dict[str, Any]:
27
+ raise NotImplementedError()
28
+
29
+
30
+ class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
31
+ """
32
+ This class generates a naive unicore job, that expects there to be a working python env containign airflow and any other required dependencies on the executing system.
33
+ """
34
+
35
+ def create_job_description(self, workload: ExecuteTask) -> Dict[str, Any]:
36
+ key: TaskInstanceKey = workload.ti.key
37
+ executor_config = workload.ti.executor_config
38
+ if not executor_config:
39
+ executor_config = {}
40
+ job_descr_dict: Dict[str, Any] = {}
41
+ # get user config from executor_config
42
+ user_added_env: Dict[str, str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_ENVIRONMENT, None) # type: ignore
43
+ user_added_params: Dict[str, str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PARAMETERS, None) # type: ignore
44
+ user_added_project: str = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PROJECT, None) # type: ignore
45
+ user_added_resources: Dict[str, str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_RESOURCES, None) # type: ignore
46
+ user_added_pre_commands: list[str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PRE_COMMANDS, []) # type: ignore
47
+ user_defined_python_env: str = workload.ti.executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PYTHON_ENV_KEY, None) # type: ignore
48
+ # get local dag path from cmd and fix dag path in arguments
49
+ dag_rel_path = str(workload.dag_rel_path)
50
+ if dag_rel_path.startswith("DAG_FOLDER"):
51
+ dag_rel_path = dag_rel_path[10:]
52
+ local_dag_path = conf.get("core", "DAGS_FOLDER") + "/" + dag_rel_path
53
+ base_url = conf.get("api", "base_url", fallback="/")
54
+ default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
55
+ server = conf.get("core", "execution_api_server_url", fallback=default_execution_api_server)
56
+
57
+ # check which python virtualenv to use
58
+ if user_defined_python_env:
59
+ python_env = user_defined_python_env
60
+ else:
61
+ python_env = conf.get("unicore.executor", "DEFAULT_ENV")
62
+ # prepare dag file to be uploaded via unicore
63
+ dag_file = open(local_dag_path)
64
+ dag_content = dag_file.readlines()
65
+ dag_import = {"To": dag_rel_path, "Data": dag_content}
66
+ worker_script_import = {
67
+ "To": "run_task_via_supervisor.py",
68
+ "From": "https://gist.githubusercontent.com/cboettcher/3f1101a1d1b67e7944d17c02ecd69930/raw/6da9ec16ba598ddda9cf288900498fab5e226788/run_task_via_supervisor.py",
69
+ }
70
+ # start filling the actual job description
71
+ job_descr_dict["Name"] = f"{key.dag_id} - {key.task_id} - {key.run_id} - {key.try_number}"
72
+ job_descr_dict["Executable"] = (
73
+ "python" # TODO may require module load to be setup for some systems
74
+ )
75
+ job_descr_dict["Arguments"] = [
76
+ "run_task_via_supervisor.py",
77
+ f"--json-string '{workload.model_dump_json()}'",
78
+ ]
79
+ job_descr_dict["Environment"] = {
80
+ "AIRFLOW__CORE__EXECUTION_API_SERVER_URL": server,
81
+ "AIRFLOW__CORE__DAGS_FOLDER": "./",
82
+ "AIRFLOW__LOGGING__LOGGING_LEVEL": "DEBUG",
83
+ "AIRFLOW__CORE__EXECUTOR": "LocalExecutor,airflow_unicore_integration.executors.unicore_executor.UnicoreExecutor",
84
+ }
85
+ user_added_pre_commands.append(f"source {python_env}/bin/activate")
86
+ job_descr_dict["User precommand"] = ";".join(user_added_pre_commands)
87
+ job_descr_dict["RunUserPrecommandOnLoginNode"] = (
88
+ "false" # precommand is activating the python env, this can also be done on compute node right before running the job
89
+ )
90
+ job_descr_dict["Imports"] = [dag_import, worker_script_import]
91
+ # add user defined options to description
92
+ if user_added_env:
93
+ job_descr_dict["Environment"].update(user_added_env)
94
+ if user_added_params:
95
+ job_descr_dict["Parameters"] = user_added_params
96
+ if user_added_project:
97
+ job_descr_dict["Project"] = user_added_project
98
+ if user_added_resources:
99
+ job_descr_dict["Resources"] = user_added_resources
100
+
101
+ return job_descr_dict
@@ -1,26 +1,39 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.4
2
2
  Name: airflow-unicore-integration
3
- Version: 0.0.5
3
+ Version: 0.1.0
4
4
  Summary: Running Unicore Jobs from airflow DAGs.
5
5
  Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
6
+ License-Expression: BSD-3-Clause
6
7
  Project-URL: Homepage, https://github.com/UNICORE-EU/airflow-unicore-integration
7
8
  Project-URL: Issues, https://github.com/UNICORE-EU/airflow-unicore-integration/issues
8
9
  Classifier: Development Status :: 4 - Beta
9
10
  Classifier: Framework :: Apache Airflow :: Provider
10
11
  Classifier: Programming Language :: Python :: 3
11
- Classifier: License :: OSI Approved :: BSD License
12
12
  Classifier: Operating System :: OS Independent
13
- Requires-Python: >=3.10
13
+ Requires-Python: >=3.9
14
14
  Description-Content-Type: text/x-rst
15
15
  License-File: LICENSE
16
- Requires-Dist: pyunicore >=1.0.0
17
- Requires-Dist: apache-airflow ==2.8.4
16
+ Requires-Dist: pyunicore>=1.0.0
17
+ Requires-Dist: apache-airflow>=3.0.0
18
+ Dynamic: license-file
18
19
 
19
20
  ===========================
20
21
  Unicore Airflow Integration
21
22
  ===========================
22
23
 
23
24
 
25
+ |Generic badge|
26
+
27
+ .. |Generic badge| image:: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml/badge.svg
28
+ :target: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml
29
+
30
+ This project integrates `UNICORE <https://github.com/UNICORE-EU>`_ and `Apache Airflow <https://airflow.apache.org/>`_.
31
+ UNICORE is a software suite that, among other functions, provides seamless access to high-performance compute and data resources.
32
+ Airflow is a platform to programmatically author, schedule and monitor workflows.
33
+
34
+ In the current state, this projects provides a set of airflow `operators <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/operators.html>`_, which can be used as part of airflow workflows to submit jobs to Unicore.
35
+ The UnicoreExecutor only offers experimental support for airflow 3 so far. Further support is currently being worked on.
36
+
24
37
  ---------------------------
25
38
  Using the Unicore Operators
26
39
  ---------------------------
@@ -47,7 +60,7 @@ exports List(exports) None
47
60
  For imports and exports go `here <https://unicore-docs.readthedocs.io/en/latest/user-docs/rest-api/job-description/index.html#importing-files-into-the-job-workspace>`_ for details.
48
61
 
49
62
 
50
- The ``UnicoreGenericOperator`` supports the following additional parameters:
63
+ The ``UnicoreGenericOperator`` supports the following additional parameters:
51
64
 
52
65
  ======================= ======================= =========================================== ====================
53
66
  parameter name type default description
@@ -98,7 +111,7 @@ Behaviour on Errors and Success
98
111
  The Unicore Operators do not do a lot of error and exception handling, and mostly just forward any problems to be handled by airflow.
99
112
  All of the Unicore logic is handled by the `pyunicore library <https://github.com/HumanBrainProject/pyunicore>`_.
100
113
 
101
- While some validation of the resulting Unicore job description is done automatically, it may still be possible to build an invalid job description with the operators.
114
+ While some validation of the resulting Unicore job description is done automatically, it may still be possible to build an invalid job description with the operators.
102
115
  This may lead to a submission failure with Unicore. In this case, an exception is thrown to be handled by airflow.
103
116
 
104
117
 
@@ -128,10 +141,10 @@ There are some example DAGs in this repository under ``project-dir/dags``.
128
141
  - ``unicore-test-2.py`` has some basic examples for the generic operator.
129
142
  - ``unicore-test-3.py`` also includes script-operator examples.
130
143
  - ``unicore-test-4.py`` has some examples with more arguments.
131
- - ``unicore-test-bss.py`` shows how bss submission can be done (very simple example).
132
- - ``unicore-test-credentials.py`` demonstrates that not only the credentials from the airflow connections backend can be used, but they can also be provided in the constructor of the o`perator.
133
- - ``unicore-test-import-export.py`` gives shprt examples for the imports and exports usage.
134
-
144
+ - ``unicore-test-bss.py`` shows how bss submission can be done (very simple example).
145
+ - ``unicore-test-credentials.py`` demonstrates that not only the credentials from the airflow connections backend can be used, but they can also be provided in the constructor of the operator.
146
+ - ``unicore-test-import-export.py`` gives short examples for the imports and exports usage.
147
+
135
148
 
136
149
  -----------------
137
150
  Setup testing env
@@ -0,0 +1,16 @@
1
+ airflow_unicore_integration/__init__.py,sha256=sibcOlQKBCUKfTbZlAl0L7r1d2nxf0nGAQys612vzeA,429
2
+ airflow_unicore_integration/executors/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ airflow_unicore_integration/executors/run_task_via_supervisor.py,sha256=3ErgPf-Oy3B4Di5yNXhhPkaojIJykvCxMZ9MlKSYPI8,2756
4
+ airflow_unicore_integration/executors/unicore_executor.py,sha256=f2PKj66tZlWb39xF5YacEBoZ2-eZ_JR9pnKQwNHAybc,5619
5
+ airflow_unicore_integration/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ airflow_unicore_integration/hooks/unicore_hooks.py,sha256=JjcjogWtN1xveagpkraQuYOdXjkp2lSnEdQc0waqhU4,1662
7
+ airflow_unicore_integration/operators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ airflow_unicore_integration/operators/unicore_operators.py,sha256=xYX1t_QzjpqHZDoEu6jJNNXVmBEnjIlApFWvgYoYoB0,16790
9
+ airflow_unicore_integration/policies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ airflow_unicore_integration/util/job.py,sha256=AqbtV0rOCzvpW_7eqwMuMvUzIn0BDbuZVts-ju49zJE,5868
11
+ airflow_unicore_integration-0.1.0.dist-info/licenses/LICENSE,sha256=hZ5ouAedeNr8ClHrQE-RLsgMsARcmv3kSZz7tE2BTJE,1526
12
+ airflow_unicore_integration-0.1.0.dist-info/METADATA,sha256=K5ks_88YYhpL8L8s23tNH7l-v0BMQF_MdPydk11NntA,11188
13
+ airflow_unicore_integration-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
14
+ airflow_unicore_integration-0.1.0.dist-info/entry_points.txt,sha256=PzEfCLYLSawjiYR-HNBzw8-YGfJxs1nPBULevgBQjoY,147
15
+ airflow_unicore_integration-0.1.0.dist-info/top_level.txt,sha256=j45X-uIuOk3oL78iwlpHakMWtUkg__B7zUlJLwmZx6w,28
16
+ airflow_unicore_integration-0.1.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (70.1.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,2 +1,5 @@
1
+ [airflow.policy]
2
+ _ = airflow_unicore_integration.policies
3
+
1
4
  [apache_airflow_provider]
2
5
  provider_info = airflow_unicore_integration:get_provider_info
@@ -26,4 +26,4 @@ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26
26
  SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27
27
  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28
28
  OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -1,11 +0,0 @@
1
- airflow_unicore_integration/__init__.py,sha256=bh8G7mutfVy8umYxhoMZLBLufNxZCp44RxekRw2y4cw,395
2
- airflow_unicore_integration/hooks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- airflow_unicore_integration/hooks/unicore_hooks.py,sha256=3sXaVYNhd6w-DdWS0VmtM0e037lUcRvUOuNRWz_bCYk,1768
4
- airflow_unicore_integration/operators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- airflow_unicore_integration/operators/unicore_operators.py,sha256=1o9hhEJTcrSYuQ4AexWLXodylZwNerP9zpqgakJ7ZIk,16445
6
- airflow_unicore_integration-0.0.5.dist-info/LICENSE,sha256=FgrHcXlmWUhvWaEmE1-OJIR_mJIwlSBBlvPy2PLh83I,1525
7
- airflow_unicore_integration-0.0.5.dist-info/METADATA,sha256=kvCgHJkbdYSZxAzIwNmRLwksFKt6x4Io6UNrdj7E0Bo,10242
8
- airflow_unicore_integration-0.0.5.dist-info/WHEEL,sha256=mguMlWGMX-VHnMpKOjjQidIo1ssRlCFu4a4mBpz1s2M,91
9
- airflow_unicore_integration-0.0.5.dist-info/entry_points.txt,sha256=bmk1b2a4vYfZ2P4fbcPVSOLEThgKHSHftgjR82XiG6A,88
10
- airflow_unicore_integration-0.0.5.dist-info/top_level.txt,sha256=j45X-uIuOk3oL78iwlpHakMWtUkg__B7zUlJLwmZx6w,28
11
- airflow_unicore_integration-0.0.5.dist-info/RECORD,,