airflow-unicore-integration 0.0.5__tar.gz → 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/LICENSE +1 -1
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/PKG-INFO +24 -11
- airflow_unicore_integration-0.0.5/src/airflow_unicore_integration.egg-info/PKG-INFO → airflow_unicore_integration-0.1.1/README.rst +18 -24
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/pyproject.toml +33 -6
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/src/airflow_unicore_integration/__init__.py +6 -4
- airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/executors/run_task_via_supervisor.py +85 -0
- airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/executors/unicore_executor.py +123 -0
- airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/hooks/unicore_hooks.py +49 -0
- airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/operators/__init__.py +0 -0
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/src/airflow_unicore_integration/operators/unicore_operators.py +145 -74
- airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/policies/__init__.py +0 -0
- airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/util/job.py +101 -0
- airflow_unicore_integration-0.0.5/README.rst → airflow_unicore_integration-0.1.1/src/airflow_unicore_integration.egg-info/PKG-INFO +38 -7
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/src/airflow_unicore_integration.egg-info/SOURCES.txt +6 -1
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/src/airflow_unicore_integration.egg-info/entry_points.txt +3 -0
- airflow_unicore_integration-0.1.1/src/airflow_unicore_integration.egg-info/requires.txt +2 -0
- airflow_unicore_integration-0.0.5/src/airflow_unicore_integration/hooks/unicore_hooks.py +0 -49
- airflow_unicore_integration-0.0.5/src/airflow_unicore_integration.egg-info/requires.txt +0 -2
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/setup.cfg +0 -0
- {airflow_unicore_integration-0.0.5/src/airflow_unicore_integration/hooks → airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/executors}/__init__.py +0 -0
- {airflow_unicore_integration-0.0.5/src/airflow_unicore_integration/operators → airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/hooks}/__init__.py +0 -0
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/src/airflow_unicore_integration.egg-info/dependency_links.txt +0 -0
- {airflow_unicore_integration-0.0.5 → airflow_unicore_integration-0.1.1}/src/airflow_unicore_integration.egg-info/top_level.txt +0 -0
|
@@ -26,4 +26,4 @@ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
|
26
26
|
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
27
27
|
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
28
28
|
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
29
|
-
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
29
|
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@@ -1,26 +1,39 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: airflow-unicore-integration
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.1.1
|
|
4
4
|
Summary: Running Unicore Jobs from airflow DAGs.
|
|
5
5
|
Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
|
|
6
|
+
License-Expression: BSD-3-Clause
|
|
6
7
|
Project-URL: Homepage, https://github.com/UNICORE-EU/airflow-unicore-integration
|
|
7
8
|
Project-URL: Issues, https://github.com/UNICORE-EU/airflow-unicore-integration/issues
|
|
8
9
|
Classifier: Development Status :: 4 - Beta
|
|
9
10
|
Classifier: Framework :: Apache Airflow :: Provider
|
|
10
11
|
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: License :: OSI Approved :: BSD License
|
|
12
12
|
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.
|
|
13
|
+
Requires-Python: >=3.9
|
|
14
14
|
Description-Content-Type: text/x-rst
|
|
15
15
|
License-File: LICENSE
|
|
16
16
|
Requires-Dist: pyunicore>=1.0.0
|
|
17
|
-
Requires-Dist: apache-airflow
|
|
17
|
+
Requires-Dist: apache-airflow>=3.0.0
|
|
18
|
+
Dynamic: license-file
|
|
18
19
|
|
|
19
20
|
===========================
|
|
20
21
|
Unicore Airflow Integration
|
|
21
22
|
===========================
|
|
22
23
|
|
|
23
24
|
|
|
25
|
+
|Generic badge|
|
|
26
|
+
|
|
27
|
+
.. |Generic badge| image:: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml/badge.svg
|
|
28
|
+
:target: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml
|
|
29
|
+
|
|
30
|
+
This project integrates `UNICORE <https://github.com/UNICORE-EU>`_ and `Apache Airflow <https://airflow.apache.org/>`_.
|
|
31
|
+
UNICORE is a software suite that, among other functions, provides seamless access to high-performance compute and data resources.
|
|
32
|
+
Airflow is a platform to programmatically author, schedule and monitor workflows.
|
|
33
|
+
|
|
34
|
+
In the current state, this projects provides a set of airflow `operators <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/operators.html>`_, which can be used as part of airflow workflows to submit jobs to Unicore.
|
|
35
|
+
The UnicoreExecutor only offers experimental support for airflow 3 so far. Further support is currently being worked on.
|
|
36
|
+
|
|
24
37
|
---------------------------
|
|
25
38
|
Using the Unicore Operators
|
|
26
39
|
---------------------------
|
|
@@ -47,7 +60,7 @@ exports List(exports) None
|
|
|
47
60
|
For imports and exports go `here <https://unicore-docs.readthedocs.io/en/latest/user-docs/rest-api/job-description/index.html#importing-files-into-the-job-workspace>`_ for details.
|
|
48
61
|
|
|
49
62
|
|
|
50
|
-
The ``UnicoreGenericOperator`` supports the following additional parameters:
|
|
63
|
+
The ``UnicoreGenericOperator`` supports the following additional parameters:
|
|
51
64
|
|
|
52
65
|
======================= ======================= =========================================== ====================
|
|
53
66
|
parameter name type default description
|
|
@@ -98,7 +111,7 @@ Behaviour on Errors and Success
|
|
|
98
111
|
The Unicore Operators do not do a lot of error and exception handling, and mostly just forward any problems to be handled by airflow.
|
|
99
112
|
All of the Unicore logic is handled by the `pyunicore library <https://github.com/HumanBrainProject/pyunicore>`_.
|
|
100
113
|
|
|
101
|
-
While some validation of the resulting Unicore job description is done automatically, it may still be possible to build an invalid job description with the operators.
|
|
114
|
+
While some validation of the resulting Unicore job description is done automatically, it may still be possible to build an invalid job description with the operators.
|
|
102
115
|
This may lead to a submission failure with Unicore. In this case, an exception is thrown to be handled by airflow.
|
|
103
116
|
|
|
104
117
|
|
|
@@ -128,10 +141,10 @@ There are some example DAGs in this repository under ``project-dir/dags``.
|
|
|
128
141
|
- ``unicore-test-2.py`` has some basic examples for the generic operator.
|
|
129
142
|
- ``unicore-test-3.py`` also includes script-operator examples.
|
|
130
143
|
- ``unicore-test-4.py`` has some examples with more arguments.
|
|
131
|
-
- ``unicore-test-bss.py`` shows how bss submission can be done (very simple example).
|
|
132
|
-
- ``unicore-test-credentials.py`` demonstrates that not only the credentials from the airflow connections backend can be used, but they can also be provided in the constructor of the
|
|
133
|
-
- ``unicore-test-import-export.py`` gives
|
|
134
|
-
|
|
144
|
+
- ``unicore-test-bss.py`` shows how bss submission can be done (very simple example).
|
|
145
|
+
- ``unicore-test-credentials.py`` demonstrates that not only the credentials from the airflow connections backend can be used, but they can also be provided in the constructor of the operator.
|
|
146
|
+
- ``unicore-test-import-export.py`` gives short examples for the imports and exports usage.
|
|
147
|
+
|
|
135
148
|
|
|
136
149
|
-----------------
|
|
137
150
|
Setup testing env
|
|
@@ -1,26 +1,20 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: airflow-unicore-integration
|
|
3
|
-
Version: 0.0.5
|
|
4
|
-
Summary: Running Unicore Jobs from airflow DAGs.
|
|
5
|
-
Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
|
|
6
|
-
Project-URL: Homepage, https://github.com/UNICORE-EU/airflow-unicore-integration
|
|
7
|
-
Project-URL: Issues, https://github.com/UNICORE-EU/airflow-unicore-integration/issues
|
|
8
|
-
Classifier: Development Status :: 4 - Beta
|
|
9
|
-
Classifier: Framework :: Apache Airflow :: Provider
|
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: License :: OSI Approved :: BSD License
|
|
12
|
-
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.10
|
|
14
|
-
Description-Content-Type: text/x-rst
|
|
15
|
-
License-File: LICENSE
|
|
16
|
-
Requires-Dist: pyunicore>=1.0.0
|
|
17
|
-
Requires-Dist: apache-airflow==2.8.4
|
|
18
|
-
|
|
19
1
|
===========================
|
|
20
2
|
Unicore Airflow Integration
|
|
21
3
|
===========================
|
|
22
4
|
|
|
23
5
|
|
|
6
|
+
|Generic badge|
|
|
7
|
+
|
|
8
|
+
.. |Generic badge| image:: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml/badge.svg
|
|
9
|
+
:target: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml
|
|
10
|
+
|
|
11
|
+
This project integrates `UNICORE <https://github.com/UNICORE-EU>`_ and `Apache Airflow <https://airflow.apache.org/>`_.
|
|
12
|
+
UNICORE is a software suite that, among other functions, provides seamless access to high-performance compute and data resources.
|
|
13
|
+
Airflow is a platform to programmatically author, schedule and monitor workflows.
|
|
14
|
+
|
|
15
|
+
In the current state, this projects provides a set of airflow `operators <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/operators.html>`_, which can be used as part of airflow workflows to submit jobs to Unicore.
|
|
16
|
+
The UnicoreExecutor only offers experimental support for airflow 3 so far. Further support is currently being worked on.
|
|
17
|
+
|
|
24
18
|
---------------------------
|
|
25
19
|
Using the Unicore Operators
|
|
26
20
|
---------------------------
|
|
@@ -47,7 +41,7 @@ exports List(exports) None
|
|
|
47
41
|
For imports and exports go `here <https://unicore-docs.readthedocs.io/en/latest/user-docs/rest-api/job-description/index.html#importing-files-into-the-job-workspace>`_ for details.
|
|
48
42
|
|
|
49
43
|
|
|
50
|
-
The ``UnicoreGenericOperator`` supports the following additional parameters:
|
|
44
|
+
The ``UnicoreGenericOperator`` supports the following additional parameters:
|
|
51
45
|
|
|
52
46
|
======================= ======================= =========================================== ====================
|
|
53
47
|
parameter name type default description
|
|
@@ -98,7 +92,7 @@ Behaviour on Errors and Success
|
|
|
98
92
|
The Unicore Operators do not do a lot of error and exception handling, and mostly just forward any problems to be handled by airflow.
|
|
99
93
|
All of the Unicore logic is handled by the `pyunicore library <https://github.com/HumanBrainProject/pyunicore>`_.
|
|
100
94
|
|
|
101
|
-
While some validation of the resulting Unicore job description is done automatically, it may still be possible to build an invalid job description with the operators.
|
|
95
|
+
While some validation of the resulting Unicore job description is done automatically, it may still be possible to build an invalid job description with the operators.
|
|
102
96
|
This may lead to a submission failure with Unicore. In this case, an exception is thrown to be handled by airflow.
|
|
103
97
|
|
|
104
98
|
|
|
@@ -128,10 +122,10 @@ There are some example DAGs in this repository under ``project-dir/dags``.
|
|
|
128
122
|
- ``unicore-test-2.py`` has some basic examples for the generic operator.
|
|
129
123
|
- ``unicore-test-3.py`` also includes script-operator examples.
|
|
130
124
|
- ``unicore-test-4.py`` has some examples with more arguments.
|
|
131
|
-
- ``unicore-test-bss.py`` shows how bss submission can be done (very simple example).
|
|
132
|
-
- ``unicore-test-credentials.py`` demonstrates that not only the credentials from the airflow connections backend can be used, but they can also be provided in the constructor of the
|
|
133
|
-
- ``unicore-test-import-export.py`` gives
|
|
134
|
-
|
|
125
|
+
- ``unicore-test-bss.py`` shows how bss submission can be done (very simple example).
|
|
126
|
+
- ``unicore-test-credentials.py`` demonstrates that not only the credentials from the airflow connections backend can be used, but they can also be provided in the constructor of the operator.
|
|
127
|
+
- ``unicore-test-import-export.py`` gives short examples for the imports and exports usage.
|
|
128
|
+
|
|
135
129
|
|
|
136
130
|
-----------------
|
|
137
131
|
Setup testing env
|
|
@@ -1,23 +1,24 @@
|
|
|
1
1
|
[build-system]
|
|
2
2
|
requires = [
|
|
3
|
-
"setuptools>=61.0"
|
|
3
|
+
"setuptools>=61.0",
|
|
4
|
+
"wheel"
|
|
4
5
|
]
|
|
5
6
|
build-backend = "setuptools.build_meta"
|
|
6
7
|
|
|
7
8
|
[project]
|
|
8
9
|
name = "airflow-unicore-integration"
|
|
9
|
-
version = "0.
|
|
10
|
+
version = "0.1.1"
|
|
10
11
|
authors = [
|
|
11
12
|
{ name="Christian Böttcher", email="c.boettcher@fz-juelich.de" },
|
|
12
13
|
]
|
|
13
14
|
description = "Running Unicore Jobs from airflow DAGs."
|
|
14
15
|
readme = "README.rst"
|
|
15
|
-
requires-python = ">=3.
|
|
16
|
+
requires-python = ">=3.9"
|
|
17
|
+
license = "BSD-3-Clause"
|
|
16
18
|
classifiers = [
|
|
17
19
|
"Development Status :: 4 - Beta",
|
|
18
20
|
"Framework :: Apache Airflow :: Provider",
|
|
19
21
|
"Programming Language :: Python :: 3",
|
|
20
|
-
"License :: OSI Approved :: BSD License",
|
|
21
22
|
"Operating System :: OS Independent",
|
|
22
23
|
]
|
|
23
24
|
|
|
@@ -25,7 +26,7 @@ classifiers = [
|
|
|
25
26
|
|
|
26
27
|
dependencies = [
|
|
27
28
|
"pyunicore>=1.0.0",
|
|
28
|
-
"apache-airflow
|
|
29
|
+
"apache-airflow>=3.0.0"
|
|
29
30
|
]
|
|
30
31
|
|
|
31
32
|
[project.urls]
|
|
@@ -33,4 +34,30 @@ Homepage = "https://github.com/UNICORE-EU/airflow-unicore-integration"
|
|
|
33
34
|
Issues = "https://github.com/UNICORE-EU/airflow-unicore-integration/issues"
|
|
34
35
|
|
|
35
36
|
[project.entry-points."apache_airflow_provider"]
|
|
36
|
-
provider_info = "airflow_unicore_integration:get_provider_info"
|
|
37
|
+
provider_info = "airflow_unicore_integration:get_provider_info"
|
|
38
|
+
|
|
39
|
+
[project.entry-points.'airflow.policy']
|
|
40
|
+
_ = 'airflow_unicore_integration.policies'
|
|
41
|
+
|
|
42
|
+
[tool.pytest.ini_options]
|
|
43
|
+
pythonpath = [
|
|
44
|
+
"src"
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
[tool.black]
|
|
48
|
+
line-length = 100
|
|
49
|
+
|
|
50
|
+
[tool.flake8]
|
|
51
|
+
max-line-length = 100
|
|
52
|
+
ignore = [
|
|
53
|
+
"N999",
|
|
54
|
+
"E501",
|
|
55
|
+
"W503"
|
|
56
|
+
]
|
|
57
|
+
per-file-ignores = [
|
|
58
|
+
"__init__.py:F401,E501",
|
|
59
|
+
"_version.py:E203",
|
|
60
|
+
]
|
|
61
|
+
|
|
62
|
+
[tool.mypy]
|
|
63
|
+
ignore_missing_imports=true
|
|
@@ -3,8 +3,10 @@ def get_provider_info():
|
|
|
3
3
|
"package-name": "airflow-unicore-integration",
|
|
4
4
|
"name": "Unicore",
|
|
5
5
|
"description": "Apache Airflow Unicore provider containing Operators and hooks.",
|
|
6
|
-
"connection-types": [
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
6
|
+
"connection-types": [
|
|
7
|
+
{
|
|
8
|
+
"connection-type": "unicore",
|
|
9
|
+
"hook-class-name": "airflow_unicore_integration.hooks.unicore_hooks.UnicoreHook",
|
|
10
|
+
}
|
|
11
|
+
],
|
|
10
12
|
}
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Usage:
|
|
3
|
+
|
|
4
|
+
python run_task_via_supervisor.py [--json-string <workload string> | --json-file <workload filepath>]
|
|
5
|
+
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import argparse
|
|
9
|
+
import sys
|
|
10
|
+
|
|
11
|
+
import structlog
|
|
12
|
+
from airflow.configuration import conf
|
|
13
|
+
from airflow.executors import workloads
|
|
14
|
+
from airflow.sdk.execution_time.supervisor import supervise
|
|
15
|
+
from pydantic import TypeAdapter
|
|
16
|
+
from pydantic_core._pydantic_core import ValidationError
|
|
17
|
+
|
|
18
|
+
log = structlog.get_logger(logger_name=__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def execute_workload_locally(workload: workloads.All):
|
|
22
|
+
if not isinstance(workload, workloads.ExecuteTask):
|
|
23
|
+
raise ValueError(f"Executor does not know how to handle {type(workload)}")
|
|
24
|
+
|
|
25
|
+
base_url = conf.get("api", "base_url", fallback="/")
|
|
26
|
+
default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
|
|
27
|
+
server = conf.get("core", "execution_api_server_url", fallback=default_execution_api_server)
|
|
28
|
+
log.info("Connecting to server:", server=server)
|
|
29
|
+
|
|
30
|
+
supervise(
|
|
31
|
+
# This is the "wrong" ti type, but it duck types the same. TODO: Create a protocol for this.
|
|
32
|
+
ti=workload.ti, # type: ignore[arg-type]
|
|
33
|
+
dag_rel_path=workload.dag_rel_path,
|
|
34
|
+
bundle_info=workload.bundle_info,
|
|
35
|
+
token=workload.token,
|
|
36
|
+
server=server,
|
|
37
|
+
log_path=workload.log_path,
|
|
38
|
+
# Include the output of the task to stdout too, so that in process logs can be read from via the
|
|
39
|
+
# kubeapi as pod logs.
|
|
40
|
+
subprocess_logs_to_stdout=True,
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def main():
|
|
45
|
+
parser = argparse.ArgumentParser(
|
|
46
|
+
description="Execute a workload in a Containerised executor using the task SDK."
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
# Create a mutually exclusive group to ensure that only one of the flags is set
|
|
50
|
+
group = parser.add_mutually_exclusive_group(required=True)
|
|
51
|
+
group.add_argument(
|
|
52
|
+
"--json-path",
|
|
53
|
+
help="Path to the input JSON file containing the execution workload payload.",
|
|
54
|
+
type=str,
|
|
55
|
+
)
|
|
56
|
+
group.add_argument(
|
|
57
|
+
"--json-string",
|
|
58
|
+
help="The JSON string itself containing the execution workload payload.",
|
|
59
|
+
type=str,
|
|
60
|
+
)
|
|
61
|
+
args = parser.parse_args()
|
|
62
|
+
|
|
63
|
+
decoder = TypeAdapter[workloads.All](workloads.All)
|
|
64
|
+
|
|
65
|
+
if args.json_path:
|
|
66
|
+
try:
|
|
67
|
+
with open(args.json_path) as file:
|
|
68
|
+
input_data = file.read()
|
|
69
|
+
workload = decoder.validate_json(input_data)
|
|
70
|
+
except OSError as e:
|
|
71
|
+
log.error("Failed to read file", error=str(e))
|
|
72
|
+
sys.exit(1)
|
|
73
|
+
|
|
74
|
+
elif args.json_string:
|
|
75
|
+
try:
|
|
76
|
+
workload = decoder.validate_json(args.json_string)
|
|
77
|
+
except ValidationError as e:
|
|
78
|
+
log.error("Failed to parse input JSON string", error=str(e))
|
|
79
|
+
sys.exit(1)
|
|
80
|
+
|
|
81
|
+
execute_workload_locally(workload)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
if __name__ == "__main__":
|
|
85
|
+
main()
|
airflow_unicore_integration-0.1.1/src/airflow_unicore_integration/executors/unicore_executor.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
"""
|
|
2
|
+
to configure for executor:
|
|
3
|
+
- Connection details for unicore: conn_id AIRFLOW__UNICORE_EXECUTOR__UNICORE_CONN_ID | should be defined, can be skipped if every task provides one
|
|
4
|
+
- location (path) of python virtualenv prepared on hpc system | AIRFLOW__UNICORE_EXECUTOR__DEFAULT_ENV | should be defined, can be skipped if every task provides one
|
|
5
|
+
|
|
6
|
+
tasks should be allowed to overwrite SITE, CREDENTIALS_*, UNICORE_CONN_ID and DEFAULT_ENV - i.e. everything but the database connection - credentials should be given as a uc_credential object via executor_config
|
|
7
|
+
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import time
|
|
11
|
+
from typing import Any
|
|
12
|
+
from typing import Dict
|
|
13
|
+
from typing import List
|
|
14
|
+
|
|
15
|
+
import pyunicore.client as uc_client
|
|
16
|
+
from airflow.configuration import conf
|
|
17
|
+
from airflow.executors.base_executor import BaseExecutor
|
|
18
|
+
from airflow.executors.workloads import All
|
|
19
|
+
from airflow.executors.workloads import ExecuteTask
|
|
20
|
+
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
21
|
+
from airflow.utils.state import TaskInstanceState
|
|
22
|
+
|
|
23
|
+
from airflow_unicore_integration.hooks import unicore_hooks
|
|
24
|
+
|
|
25
|
+
from ..util.job import JobDescriptionGenerator
|
|
26
|
+
from ..util.job import NaiveJobDescriptionGenerator
|
|
27
|
+
|
|
28
|
+
STATE_MAPPINGS: Dict[uc_client.JobStatus, TaskInstanceState] = {
|
|
29
|
+
uc_client.JobStatus.UNDEFINED: TaskInstanceState.FAILED,
|
|
30
|
+
uc_client.JobStatus.READY: TaskInstanceState.QUEUED,
|
|
31
|
+
uc_client.JobStatus.STAGINGIN: TaskInstanceState.QUEUED,
|
|
32
|
+
uc_client.JobStatus.QUEUED: TaskInstanceState.QUEUED,
|
|
33
|
+
uc_client.JobStatus.RUNNING: TaskInstanceState.RUNNING,
|
|
34
|
+
uc_client.JobStatus.STAGINGOUT: TaskInstanceState.RUNNING,
|
|
35
|
+
uc_client.JobStatus.SUCCESSFUL: TaskInstanceState.SUCCESS,
|
|
36
|
+
uc_client.JobStatus.FAILED: TaskInstanceState.FAILED,
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class UnicoreExecutor(BaseExecutor):
|
|
41
|
+
|
|
42
|
+
def start(self):
|
|
43
|
+
self.active_jobs: Dict[TaskInstanceKey, uc_client.Job] = {}
|
|
44
|
+
self.uc_conn = unicore_hooks.UnicoreHook().get_conn()
|
|
45
|
+
# TODO get job description generator class and init params from config
|
|
46
|
+
self.job_descr_generator: JobDescriptionGenerator = NaiveJobDescriptionGenerator()
|
|
47
|
+
|
|
48
|
+
def sync(self) -> None:
|
|
49
|
+
# iterate through task collection and update task/ job status - delete if needed
|
|
50
|
+
for task, job in list(self.active_jobs.items()):
|
|
51
|
+
state = STATE_MAPPINGS[job.status]
|
|
52
|
+
if state == TaskInstanceState.FAILED:
|
|
53
|
+
self.fail(task)
|
|
54
|
+
self._forward_unicore_log(task, job)
|
|
55
|
+
self.active_jobs.pop(task)
|
|
56
|
+
elif state == TaskInstanceState.SUCCESS:
|
|
57
|
+
self.success(task)
|
|
58
|
+
self._forward_unicore_log(task, job)
|
|
59
|
+
self.active_jobs.pop(task)
|
|
60
|
+
elif state == TaskInstanceState.RUNNING:
|
|
61
|
+
self.running_state(task, state)
|
|
62
|
+
|
|
63
|
+
return super().sync()
|
|
64
|
+
|
|
65
|
+
def _forward_unicore_log(self, task: TaskInstanceKey, job: uc_client.Job) -> List[str]:
|
|
66
|
+
# TODO retrieve unicore logs from job directory and return
|
|
67
|
+
return []
|
|
68
|
+
|
|
69
|
+
def _get_unicore_client(self, executor_config: dict | None = {}):
|
|
70
|
+
# TODO fix this only temporary solution
|
|
71
|
+
return self.uc_conn
|
|
72
|
+
# END TODO fix this
|
|
73
|
+
# include client desires from executor_config
|
|
74
|
+
unicore_conn_id = executor_config.get( # type: ignore
|
|
75
|
+
UnicoreExecutor.EXECUTOR_CONFIG_UNICORE_CONN_KEY,
|
|
76
|
+
conf.get("unicore.executor", "UNICORE_CONN_ID"),
|
|
77
|
+
) # task can provide a different unicore connection to use, else airflow-wide default is used
|
|
78
|
+
self.log.info(f"Using base unicore connection with id '{unicore_conn_id}'")
|
|
79
|
+
hook = unicore_hooks.UnicoreHook(uc_conn_id=unicore_conn_id)
|
|
80
|
+
unicore_site = executor_config.get( # type: ignore
|
|
81
|
+
UnicoreExecutor.EXECUTOR_CONFIG_UNICORE_SITE_KEY, None
|
|
82
|
+
) # task can provide a different site to run at, else default from connetion is used
|
|
83
|
+
unicore_credential = executor_config.get( # type: ignore
|
|
84
|
+
UnicoreExecutor.EXECUTOR_CONFIG_UNICORE_CREDENTIAL_KEY, None
|
|
85
|
+
) # task can provide a different credential to use, else default from connection is used
|
|
86
|
+
return hook.get_conn(
|
|
87
|
+
overwrite_base_url=unicore_site, overwrite_credential=unicore_credential
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
def _submit_job(self, workload: ExecuteTask):
|
|
91
|
+
uc_client = self._get_unicore_client(executor_config=workload.ti.executor_config)
|
|
92
|
+
job_descr = self._create_job_description(workload)
|
|
93
|
+
self.log.info("Generated job description")
|
|
94
|
+
self.log.debug(str(job_descr))
|
|
95
|
+
job = uc_client.new_job(job_descr)
|
|
96
|
+
self.log.info("Submitted unicore job")
|
|
97
|
+
self.active_jobs[workload.ti.key] = job
|
|
98
|
+
return job
|
|
99
|
+
|
|
100
|
+
def _create_job_description(self, workload: ExecuteTask) -> Dict[str, Any]:
|
|
101
|
+
return self.job_descr_generator.create_job_description(workload)
|
|
102
|
+
|
|
103
|
+
def queue_workload(self, workload: ExecuteTask | All, session):
|
|
104
|
+
if not isinstance(workload, ExecuteTask):
|
|
105
|
+
raise TypeError(f"Don't know how to queue workload of type {type(workload).__name__}")
|
|
106
|
+
|
|
107
|
+
# submit job to unicore and add to active_jobs dict for task state management
|
|
108
|
+
job = self._submit_job(workload)
|
|
109
|
+
self.active_jobs[workload.ti.key] = job
|
|
110
|
+
|
|
111
|
+
def end(self, heartbeat_interval=10) -> None:
|
|
112
|
+
# wait for current jobs to finish, dont start any new ones
|
|
113
|
+
while True:
|
|
114
|
+
self.sync()
|
|
115
|
+
if not self.active_jobs:
|
|
116
|
+
break
|
|
117
|
+
time.sleep(heartbeat_interval)
|
|
118
|
+
|
|
119
|
+
def terminate(self):
|
|
120
|
+
# terminate all jobs
|
|
121
|
+
for task, job in list(self.active_jobs.items()):
|
|
122
|
+
job.abort()
|
|
123
|
+
self.end()
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from airflow.hooks.base import BaseHook
|
|
4
|
+
from pyunicore import client
|
|
5
|
+
from pyunicore import credentials
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class UnicoreHook(BaseHook):
|
|
9
|
+
"""
|
|
10
|
+
Interact with Unicore.
|
|
11
|
+
|
|
12
|
+
Creates Unicore Clients from airflow connections.
|
|
13
|
+
|
|
14
|
+
:param uc_conn_id: The unicore connection id - default: uc_default
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
conn_name_attr = "uc_conn_id"
|
|
18
|
+
default_conn_name = "uc_default"
|
|
19
|
+
conn_type = "unicore"
|
|
20
|
+
hook_name = "Unicore"
|
|
21
|
+
|
|
22
|
+
def __init__(self, uc_conn_id: str = default_conn_name) -> None:
|
|
23
|
+
super().__init__()
|
|
24
|
+
self.uc_conn_id = uc_conn_id
|
|
25
|
+
|
|
26
|
+
def get_conn(
|
|
27
|
+
self,
|
|
28
|
+
overwrite_base_url: str | None = None,
|
|
29
|
+
overwrite_credential: credentials.Credential | None = None,
|
|
30
|
+
) -> client.Client:
|
|
31
|
+
"""Return a Unicore Client. base_url and credentials may be overwritten."""
|
|
32
|
+
self.log.debug(
|
|
33
|
+
f"Gettig connection with id '{self.uc_conn_id}' from secrets backend. Will be modified with user input for UNICORE."
|
|
34
|
+
)
|
|
35
|
+
params = self.get_connection(self.uc_conn_id)
|
|
36
|
+
base_url = params.host
|
|
37
|
+
credential = credentials.UsernamePassword(params.login, params.password)
|
|
38
|
+
if overwrite_base_url is not None:
|
|
39
|
+
base_url = overwrite_base_url
|
|
40
|
+
if overwrite_credential is not None:
|
|
41
|
+
credential = overwrite_credential
|
|
42
|
+
conn = client.Client(credential, base_url)
|
|
43
|
+
return conn
|
|
44
|
+
|
|
45
|
+
def test_connection(self) -> tuple[bool, str]:
|
|
46
|
+
"""Test the connection by sending an access_info request"""
|
|
47
|
+
conn = self.get_conn()
|
|
48
|
+
conn.access_info()
|
|
49
|
+
return True, "Connection successfully tested"
|
|
File without changes
|
|
@@ -1,38 +1,73 @@
|
|
|
1
|
-
|
|
2
|
-
from
|
|
3
|
-
from typing import
|
|
4
|
-
|
|
5
|
-
from
|
|
1
|
+
import logging
|
|
2
|
+
from typing import Any
|
|
3
|
+
from typing import Dict
|
|
4
|
+
from typing import List
|
|
5
|
+
from typing import Sequence
|
|
6
6
|
|
|
7
7
|
import pyunicore.client as uc_client
|
|
8
8
|
import pyunicore.credentials as uc_credentials
|
|
9
|
-
from
|
|
9
|
+
from airflow.models.baseoperator import BaseOperator
|
|
10
|
+
from airflow.utils.context import Context
|
|
10
11
|
|
|
11
|
-
import
|
|
12
|
+
from airflow_unicore_integration.hooks import unicore_hooks
|
|
12
13
|
|
|
13
14
|
logger = logging.getLogger(__name__)
|
|
14
15
|
|
|
15
|
-
DEFAULT_SCRIPT_NAME =
|
|
16
|
-
DEFAULT_BSS_FILE =
|
|
16
|
+
DEFAULT_SCRIPT_NAME = "default_script_from_job_description"
|
|
17
|
+
DEFAULT_BSS_FILE = "default_bss_file_upload"
|
|
18
|
+
|
|
17
19
|
|
|
18
20
|
class JobDescriptionException(BaseException):
|
|
19
|
-
"""
|
|
21
|
+
"""For errors in the unicore job description that would be generated by the unicore operators."""
|
|
22
|
+
|
|
20
23
|
def __init__(self, *args: object) -> None:
|
|
21
24
|
super().__init__(*args)
|
|
22
25
|
|
|
26
|
+
|
|
23
27
|
class UnicoreGenericOperator(BaseOperator):
|
|
24
28
|
|
|
25
|
-
def __init__(
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
name: str,
|
|
32
|
+
application_name: str | None = None,
|
|
33
|
+
application_version: str | None = None,
|
|
34
|
+
executable: str | None = None,
|
|
35
|
+
arguments: List[str] | None = None,
|
|
36
|
+
environment: List[str] | None = None,
|
|
37
|
+
parameters: Dict[str, str | List[str]] | None = None,
|
|
38
|
+
stdout: str | None = None,
|
|
39
|
+
stderr: str | None = None,
|
|
40
|
+
stdin: str | None = None,
|
|
41
|
+
ignore_non_zero_exit_code: bool | None = None,
|
|
42
|
+
user_pre_command: str | None = None,
|
|
43
|
+
run_user_pre_command_on_login_node: bool | None = None,
|
|
44
|
+
user_pre_command_ignore_non_zero_exit_code: bool | None = None,
|
|
45
|
+
user_post_command: str | None = None,
|
|
46
|
+
run_user_post_command_on_login_node: bool | None = None,
|
|
47
|
+
user_post_command_ignore_non_zero_exit_code: bool | None = None,
|
|
48
|
+
resources: Dict[str, str] | None = None,
|
|
49
|
+
project: str | None = None,
|
|
50
|
+
imports: List[Dict[str, str | Sequence[str]]] | None = None,
|
|
51
|
+
exports: List[Dict[str, str | List[str]]] | None = None,
|
|
52
|
+
have_client_stagein: bool | None = None,
|
|
53
|
+
job_type: str | None = None,
|
|
54
|
+
login_node: str | None = None,
|
|
55
|
+
bss_file: str | None = None,
|
|
56
|
+
tags: List[str] | None = None,
|
|
57
|
+
notification: str | None = None,
|
|
58
|
+
user_email: str | None = None,
|
|
59
|
+
xcom_output_files: List[str] = ["stdout", "stderr"],
|
|
60
|
+
base_url: str | None = None,
|
|
61
|
+
credential: uc_credentials.Credential | None = None,
|
|
62
|
+
credential_username: str | None = None,
|
|
63
|
+
credential_password: str | None = None,
|
|
64
|
+
credential_token: str | None = None,
|
|
65
|
+
**kwargs,
|
|
66
|
+
):
|
|
32
67
|
"""
|
|
33
|
-
Initialize a Unicore Job Operator.
|
|
34
|
-
:param name: The name parameter defines both the airflow task name and the unicore job name.
|
|
35
|
-
:param xcom_output_files: A list of filenames to be exported from the job directory via x_coms. By default stdout and stderr.
|
|
68
|
+
Initialize a Unicore Job Operator.
|
|
69
|
+
:param name: The name parameter defines both the airflow task name and the unicore job name.
|
|
70
|
+
:param xcom_output_files: A list of filenames to be exported from the job directory via x_coms. By default stdout and stderr.
|
|
36
71
|
:param base_url: The base URL of the UNICOREX server to be used for the unicore client. Overwrites the configured airflow conenction.
|
|
37
72
|
:param credential: A unicore Credential to be used for the unicore client. Overwrites the configured user-pass in the airflow connection.
|
|
38
73
|
:param credential_username: Username for the unicore client credentials. Overwrites the configured user in the airflow connection.
|
|
@@ -58,7 +93,9 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
58
93
|
self.user_pre_command_ignore_non_zero_exit_code = user_pre_command_ignore_non_zero_exit_code
|
|
59
94
|
self.user_post_command = user_post_command
|
|
60
95
|
self.run_user_post_command_on_login_node = run_user_post_command_on_login_node
|
|
61
|
-
self.user_post_command_ignore_non_zero_exit_code =
|
|
96
|
+
self.user_post_command_ignore_non_zero_exit_code = (
|
|
97
|
+
user_post_command_ignore_non_zero_exit_code
|
|
98
|
+
)
|
|
62
99
|
self.resources = resources
|
|
63
100
|
self.project = project
|
|
64
101
|
self.imports = imports
|
|
@@ -85,54 +122,61 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
85
122
|
"""
|
|
86
123
|
Does some basic validation and checks if a proper job description can be generated. Raises a JobDescriptionException if not.
|
|
87
124
|
"""
|
|
88
|
-
# check for some errors in the parameters for creating the unicore job
|
|
125
|
+
# check for some errors in the parameters for creating the unicore job
|
|
89
126
|
|
|
90
127
|
# first check if application or executable been set
|
|
91
|
-
if
|
|
128
|
+
if (
|
|
129
|
+
self.application_name is None and self.executable is None
|
|
130
|
+
): # TODO check if executable is required if bss_file is given
|
|
92
131
|
raise JobDescriptionException
|
|
93
|
-
|
|
132
|
+
|
|
94
133
|
# if bss_file is set, we need an executable
|
|
95
134
|
if self.bss_file is not None:
|
|
96
135
|
if self.executable is None and self.application_name is not None:
|
|
97
136
|
raise JobDescriptionException
|
|
98
137
|
# TODO validate BSS file?
|
|
99
138
|
logger.info("using bss file")
|
|
100
|
-
|
|
139
|
+
|
|
101
140
|
if self.credential_token is not None and self.credential is None:
|
|
102
141
|
logger.info("using provided oidc token")
|
|
103
142
|
self.credential = uc_credentials.OIDCToken(token=self.credential_token)
|
|
104
143
|
|
|
105
|
-
if
|
|
144
|
+
if (
|
|
145
|
+
self.credential_username is not None
|
|
146
|
+
and self.credential_password is not None
|
|
147
|
+
and self.credential is None
|
|
148
|
+
):
|
|
106
149
|
logger.info("using provied user/pass credentials")
|
|
107
|
-
self.credential = uc_credentials.UsernamePassword(
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
150
|
+
self.credential = uc_credentials.UsernamePassword(
|
|
151
|
+
username=self.credential_username, password=self.credential_password
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
def get_job_description(self) -> dict[str, Any]:
|
|
111
155
|
"""Generates the job description to be submitted to the unicore server."""
|
|
112
156
|
job_description_dict: Dict = {}
|
|
113
157
|
|
|
114
158
|
# now add the various simple string attribute fragments to the list, when they are not None
|
|
115
159
|
if self.name is not None:
|
|
116
160
|
job_description_dict["Name"] = self.name
|
|
117
|
-
|
|
161
|
+
|
|
118
162
|
if self.application_name is not None:
|
|
119
163
|
job_description_dict["ApplicationName"] = self.application_name
|
|
120
|
-
|
|
164
|
+
|
|
121
165
|
if self.application_version is not None:
|
|
122
166
|
job_description_dict["ApplicationVersion"] = self.application_version
|
|
123
|
-
|
|
167
|
+
|
|
124
168
|
if self.executable is not None:
|
|
125
169
|
job_description_dict["Executable"] = self.executable
|
|
126
170
|
|
|
127
171
|
if self.arguments is not None:
|
|
128
172
|
job_description_dict["Arguments"] = self.arguments
|
|
129
|
-
|
|
173
|
+
|
|
130
174
|
if self.environment is not None:
|
|
131
175
|
job_description_dict["Environment"] = self.environment
|
|
132
176
|
|
|
133
177
|
if self.parameters is not None:
|
|
134
178
|
job_description_dict["Parameters"] = self.parameters
|
|
135
|
-
|
|
179
|
+
|
|
136
180
|
if self.stdout is not None:
|
|
137
181
|
job_description_dict["Stdout"] = self.stdout
|
|
138
182
|
|
|
@@ -144,24 +188,32 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
144
188
|
|
|
145
189
|
if self.ignore_non_zero_exit_code is not None:
|
|
146
190
|
job_description_dict["IgnoreNonZeroExitCode"] = self.ignore_non_zero_exit_code
|
|
147
|
-
|
|
191
|
+
|
|
148
192
|
if self.user_pre_command is not None:
|
|
149
193
|
job_description_dict["User precommand"] = self.user_pre_command
|
|
150
194
|
|
|
151
195
|
if self.run_user_pre_command_on_login_node is not None:
|
|
152
|
-
job_description_dict["RunUserPrecommandOnLoginNode"] =
|
|
153
|
-
|
|
196
|
+
job_description_dict["RunUserPrecommandOnLoginNode"] = (
|
|
197
|
+
self.run_user_pre_command_on_login_node
|
|
198
|
+
)
|
|
199
|
+
|
|
154
200
|
if self.user_pre_command_ignore_non_zero_exit_code is not None:
|
|
155
|
-
job_description_dict["UserPrecommandIgnoreNonZeroExitCode"] =
|
|
201
|
+
job_description_dict["UserPrecommandIgnoreNonZeroExitCode"] = (
|
|
202
|
+
self.user_pre_command_ignore_non_zero_exit_code
|
|
203
|
+
)
|
|
156
204
|
|
|
157
205
|
if self.user_post_command is not None:
|
|
158
206
|
job_description_dict["User postcommand"] = self.user_post_command
|
|
159
207
|
|
|
160
208
|
if self.run_user_post_command_on_login_node is not None:
|
|
161
|
-
job_description_dict["RunUserPostcommandOnLoginNode"] =
|
|
209
|
+
job_description_dict["RunUserPostcommandOnLoginNode"] = (
|
|
210
|
+
self.run_user_post_command_on_login_node
|
|
211
|
+
)
|
|
162
212
|
|
|
163
213
|
if self.user_post_command_ignore_non_zero_exit_code is not None:
|
|
164
|
-
job_description_dict["UserPostcommandIgnoreNonZeroExitCode"] =
|
|
214
|
+
job_description_dict["UserPostcommandIgnoreNonZeroExitCode"] = (
|
|
215
|
+
self.user_post_command_ignore_non_zero_exit_code
|
|
216
|
+
)
|
|
165
217
|
|
|
166
218
|
if self.resources is not None:
|
|
167
219
|
job_description_dict["Resources"] = self.resources
|
|
@@ -171,12 +223,12 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
171
223
|
|
|
172
224
|
if self.imports is not None:
|
|
173
225
|
job_description_dict["Imports"] = self.imports
|
|
174
|
-
|
|
226
|
+
|
|
175
227
|
if self.exports is not None:
|
|
176
228
|
job_description_dict["Exports"] = self.exports
|
|
177
229
|
|
|
178
230
|
if self.have_client_stagein is not None:
|
|
179
|
-
job_description_dict["haveClientStageIn"] =self.have_client_stagein
|
|
231
|
+
job_description_dict["haveClientStageIn"] = self.have_client_stagein
|
|
180
232
|
|
|
181
233
|
if self.job_type is not None:
|
|
182
234
|
job_description_dict["Job type"] = self.job_type
|
|
@@ -197,7 +249,7 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
197
249
|
job_description_dict["Tags"] = self.tags
|
|
198
250
|
|
|
199
251
|
return job_description_dict
|
|
200
|
-
|
|
252
|
+
|
|
201
253
|
def get_uc_client(self, uc_conn_id: str | None = None) -> uc_client.Client:
|
|
202
254
|
"""Return a unicore client for submitting the job. Will use provided base_url and credentials if possible, else it will use the default unicore connection from airflow."""
|
|
203
255
|
if self.base_url is not None and self.credential is not None:
|
|
@@ -207,7 +259,7 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
207
259
|
else:
|
|
208
260
|
hook = unicore_hooks.UnicoreHook(uc_conn_id=uc_conn_id)
|
|
209
261
|
return hook.get_conn(overwrite_base_url=self.base_url, overwrite_credential=self.credential)
|
|
210
|
-
|
|
262
|
+
|
|
211
263
|
def execute_async(self, context: Context) -> Any:
|
|
212
264
|
"""Submits the job and returns the job object without waiting for it to finish."""
|
|
213
265
|
client = self.get_uc_client()
|
|
@@ -216,35 +268,41 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
216
268
|
|
|
217
269
|
def execute(self, context: Context) -> Any:
|
|
218
270
|
"""
|
|
219
|
-
Submits the job description to the unicore server.
|
|
271
|
+
Submits the job description to the unicore server.
|
|
220
272
|
After the job is finished (failed or successful), some basic output such as logs, status messages, job id or file content will be retreived and returned via x_coms or airflow logs.
|
|
221
273
|
The exit code of this functions is the exit code of the unicore job, to ensure proper handling of failed or aborted jobs.
|
|
222
274
|
"""
|
|
223
|
-
import logging
|
|
224
|
-
|
|
275
|
+
import logging
|
|
276
|
+
|
|
277
|
+
from pyunicore.client import Job
|
|
278
|
+
from pyunicore.client import JobStatus
|
|
279
|
+
|
|
225
280
|
logger = logging.getLogger(__name__)
|
|
226
|
-
|
|
227
|
-
job: Job = self.execute_async(
|
|
281
|
+
|
|
282
|
+
job: Job = self.execute_async(
|
|
283
|
+
context
|
|
284
|
+
) # TODO depending on params this may spawn multiple jobs -> in those cases, e.g. output needs to be handled differently
|
|
228
285
|
logger.debug(f"Waiting for unicore job {job}")
|
|
229
|
-
job.poll()
|
|
286
|
+
job.poll() # wait for job to finish
|
|
230
287
|
|
|
231
|
-
task_instance = context[
|
|
288
|
+
task_instance = context["task_instance"]
|
|
232
289
|
|
|
233
|
-
|
|
234
290
|
task_instance.xcom_push(key="status_message", value=job.properties["statusMessage"])
|
|
235
291
|
task_instance.xcom_push(key="log", value=job.properties["log"])
|
|
236
292
|
for line in job.properties["log"]:
|
|
237
293
|
logger.info(f"UNICORE LOGS: {line}")
|
|
238
|
-
|
|
294
|
+
|
|
239
295
|
if job.status is not JobStatus.SUCCESSFUL:
|
|
240
296
|
from airflow.exceptions import AirflowFailException
|
|
241
|
-
logger.error(f"Unicore job not successful. Job state is {job.status}. Aborting this task.")
|
|
242
|
-
raise AirflowFailException
|
|
243
297
|
|
|
298
|
+
logger.error(
|
|
299
|
+
f"Unicore job not successful. Job state is {job.status}. Aborting this task."
|
|
300
|
+
)
|
|
301
|
+
raise AirflowFailException
|
|
244
302
|
|
|
245
303
|
work_dir = job.working_dir
|
|
246
304
|
|
|
247
|
-
content = work_dir.contents()[
|
|
305
|
+
content = work_dir.contents()["content"]
|
|
248
306
|
task_instance.xcom_push(key="workdir_content", value=content)
|
|
249
307
|
|
|
250
308
|
for filename in content.keys():
|
|
@@ -257,14 +315,15 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
257
315
|
bss_submit_text = work_dir.stat(filename).raw().read().decode("utf-8")
|
|
258
316
|
task_instance.xcom_push(key="BSS_SUBMIT", value=bss_submit_text)
|
|
259
317
|
continue
|
|
260
|
-
|
|
318
|
+
|
|
261
319
|
from requests.exceptions import HTTPError
|
|
320
|
+
|
|
262
321
|
for file in self.xcom_output_files:
|
|
263
322
|
try:
|
|
264
323
|
logger.debug(f"Retreiving file {file} from workdir.")
|
|
265
324
|
remote_file = work_dir.stat(file)
|
|
266
325
|
content = remote_file.raw().read().decode("utf-8")
|
|
267
|
-
task_instance.xcom_push(key=file,value=content)
|
|
326
|
+
task_instance.xcom_push(key=file, value=content)
|
|
268
327
|
except HTTPError as http_error:
|
|
269
328
|
logger.error(f"Error while retreiving file {file} from workdir.", http_error)
|
|
270
329
|
continue
|
|
@@ -274,51 +333,63 @@ class UnicoreGenericOperator(BaseOperator):
|
|
|
274
333
|
exit_code = job.properties["exitCode"]
|
|
275
334
|
return exit_code
|
|
276
335
|
|
|
336
|
+
|
|
277
337
|
class UnicoreScriptOperator(UnicoreGenericOperator):
|
|
278
338
|
"""
|
|
279
339
|
This operator uplaods and submits a script to the unicore server as a job. Behaviour and parameters are otherwise the same as the UnicoreBaseOpertor.
|
|
280
340
|
"""
|
|
341
|
+
|
|
281
342
|
def __init__(self, name: str, script_content: str, **kwargs):
|
|
282
343
|
"""
|
|
283
344
|
:param script_content: the content of the script to be executed as a string
|
|
284
345
|
"""
|
|
285
346
|
super().__init__(name=name, executable=DEFAULT_SCRIPT_NAME, **kwargs)
|
|
286
|
-
lines = script_content.split(
|
|
287
|
-
script_stagein = {
|
|
288
|
-
"To": DEFAULT_SCRIPT_NAME,
|
|
289
|
-
"Data": lines
|
|
290
|
-
}
|
|
347
|
+
lines = script_content.split("\n")
|
|
348
|
+
script_stagein = {"To": DEFAULT_SCRIPT_NAME, "Data": lines}
|
|
291
349
|
if self.imports is not None:
|
|
292
350
|
self.imports.append(script_stagein)
|
|
293
351
|
else:
|
|
294
352
|
self.imports = [script_stagein]
|
|
295
353
|
|
|
354
|
+
|
|
296
355
|
class UnicoreBSSOperator(UnicoreGenericOperator):
|
|
297
356
|
"""
|
|
298
357
|
This operator uplaods and submits a BSS-script to the unicore server as a job. Behaviour and parameters are otherwise the same as the UnicoreBaseOpertor.
|
|
299
358
|
"""
|
|
359
|
+
|
|
300
360
|
def __init__(self, name: str, bss_file_content: str, **kwargs):
|
|
301
361
|
"""
|
|
302
362
|
:param bss_file_content: the content of the BSS-script to be executed as a string
|
|
303
363
|
"""
|
|
304
|
-
super().__init__(name=name,
|
|
305
|
-
lines = bss_file_content.split(
|
|
306
|
-
bss_stagein = {
|
|
307
|
-
"To": DEFAULT_BSS_FILE,
|
|
308
|
-
"Data": lines
|
|
309
|
-
}
|
|
364
|
+
super().__init__(name=name, **kwargs)
|
|
365
|
+
lines = bss_file_content.split("\n")
|
|
366
|
+
bss_stagein = {"To": DEFAULT_BSS_FILE, "Data": lines}
|
|
310
367
|
if self.imports is not None:
|
|
311
368
|
self.imports.append(bss_stagein)
|
|
312
369
|
else:
|
|
313
370
|
self.imports = [bss_stagein]
|
|
314
371
|
|
|
372
|
+
|
|
315
373
|
class UnicoreExecutableOperator(UnicoreGenericOperator):
|
|
316
374
|
"""A simplified operator for just executing an executable. Still supports all parameters fo the UnicoreBaseOperator."""
|
|
317
|
-
|
|
318
|
-
|
|
375
|
+
|
|
376
|
+
def __init__(
|
|
377
|
+
self,
|
|
378
|
+
name: str,
|
|
379
|
+
executable: str,
|
|
380
|
+
xcom_output_files: List[str] = ["stdout", "stderr"],
|
|
381
|
+
**kwargs,
|
|
382
|
+
) -> None:
|
|
383
|
+
super().__init__(
|
|
384
|
+
name=name,
|
|
385
|
+
executable=executable,
|
|
386
|
+
xcom_output_files=xcom_output_files,
|
|
387
|
+
**kwargs,
|
|
388
|
+
)
|
|
389
|
+
|
|
319
390
|
|
|
320
391
|
class UnicoreDateOperator(UnicoreExecutableOperator):
|
|
321
392
|
"""An operator for executing the 'date' executable. Mostly for testing purposes. Still supports all parameters fo the UnicoreBaseOperator."""
|
|
322
|
-
def __init__(self, name: str, **kwargs) -> None:
|
|
323
|
-
super().__init__(name=name, executable='date',**kwargs)
|
|
324
393
|
|
|
394
|
+
def __init__(self, name: str, **kwargs) -> None:
|
|
395
|
+
super().__init__(name=name, executable="date", **kwargs)
|
|
File without changes
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
from typing import Dict
|
|
3
|
+
|
|
4
|
+
from airflow.configuration import conf
|
|
5
|
+
from airflow.executors.workloads import ExecuteTask
|
|
6
|
+
from airflow.models.taskinstancekey import TaskInstanceKey
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class JobDescriptionGenerator:
|
|
10
|
+
"""
|
|
11
|
+
A generator class for generating unicore jhob descriptions that may supprot different kinds of systems and/ or environments.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
EXECUTOR_CONFIG_PYTHON_ENV_KEY = "python_env" # full path to a python virtualenv that includes airflow and all required libraries for the task (without the .../bin/activate part)
|
|
15
|
+
EXECUTOR_CONFIG_RESOURCES = "Resources" # gets added to the unicore job description
|
|
16
|
+
EXECUTOR_CONFIG_ENVIRONMENT = "Environment" # gets added to the unicore job description
|
|
17
|
+
EXECUTOR_CONFIG_PARAMETERS = "Parameters" # gets added to the unicore job description
|
|
18
|
+
EXECUTOR_CONFIG_PROJECT = "Project" # gets added to the unicore job description
|
|
19
|
+
EXECUTOR_CONFIG_PRE_COMMANDS = "precommands" # gets added to the unicore job description
|
|
20
|
+
EXECUTOR_CONFIG_UNICORE_CONN_KEY = (
|
|
21
|
+
"unicore_connection_id" # alternative connection id for the Unicore connection to use
|
|
22
|
+
)
|
|
23
|
+
EXECUTOR_CONFIG_UNICORE_SITE_KEY = "unicore_site" # alternative Unicore site to run at, only required if different than connection default
|
|
24
|
+
EXECUTOR_CONFIG_UNICORE_CREDENTIAL_KEY = "unicore_credential" # alternative unicore credential to use for the job, only required if different than connection default
|
|
25
|
+
|
|
26
|
+
def create_job_description(self, workload: ExecuteTask) -> Dict[str, Any]:
|
|
27
|
+
raise NotImplementedError()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class NaiveJobDescriptionGenerator(JobDescriptionGenerator):
|
|
31
|
+
"""
|
|
32
|
+
This class generates a naive unicore job, that expects there to be a working python env containign airflow and any other required dependencies on the executing system.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def create_job_description(self, workload: ExecuteTask) -> Dict[str, Any]:
|
|
36
|
+
key: TaskInstanceKey = workload.ti.key
|
|
37
|
+
executor_config = workload.ti.executor_config
|
|
38
|
+
if not executor_config:
|
|
39
|
+
executor_config = {}
|
|
40
|
+
job_descr_dict: Dict[str, Any] = {}
|
|
41
|
+
# get user config from executor_config
|
|
42
|
+
user_added_env: Dict[str, str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_ENVIRONMENT, None) # type: ignore
|
|
43
|
+
user_added_params: Dict[str, str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PARAMETERS, None) # type: ignore
|
|
44
|
+
user_added_project: str = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PROJECT, None) # type: ignore
|
|
45
|
+
user_added_resources: Dict[str, str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_RESOURCES, None) # type: ignore
|
|
46
|
+
user_added_pre_commands: list[str] = executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PRE_COMMANDS, []) # type: ignore
|
|
47
|
+
user_defined_python_env: str = workload.ti.executor_config.get(JobDescriptionGenerator.EXECUTOR_CONFIG_PYTHON_ENV_KEY, None) # type: ignore
|
|
48
|
+
# get local dag path from cmd and fix dag path in arguments
|
|
49
|
+
dag_rel_path = str(workload.dag_rel_path)
|
|
50
|
+
if dag_rel_path.startswith("DAG_FOLDER"):
|
|
51
|
+
dag_rel_path = dag_rel_path[10:]
|
|
52
|
+
# local_dag_path = conf.get("core", "DAGS_FOLDER") + "/" + dag_rel_path
|
|
53
|
+
base_url = conf.get("api", "base_url", fallback="/")
|
|
54
|
+
default_execution_api_server = f"{base_url.rstrip('/')}/execution/"
|
|
55
|
+
server = conf.get("core", "execution_api_server_url", fallback=default_execution_api_server)
|
|
56
|
+
|
|
57
|
+
# check which python virtualenv to use
|
|
58
|
+
if user_defined_python_env:
|
|
59
|
+
python_env = user_defined_python_env
|
|
60
|
+
else:
|
|
61
|
+
python_env = conf.get("unicore.executor", "DEFAULT_ENV")
|
|
62
|
+
# prepare dag file to be uploaded via unicore
|
|
63
|
+
# dag_file = open(local_dag_path)
|
|
64
|
+
# dag_content = dag_file.readlines()
|
|
65
|
+
# dag_import = {"To": dag_rel_path, "Data": dag_content}
|
|
66
|
+
worker_script_import = {
|
|
67
|
+
"To": "run_task_via_supervisor.py",
|
|
68
|
+
"From": "https://gist.githubusercontent.com/cboettcher/3f1101a1d1b67e7944d17c02ecd69930/raw/6da9ec16ba598ddda9cf288900498fab5e226788/run_task_via_supervisor.py",
|
|
69
|
+
}
|
|
70
|
+
# start filling the actual job description
|
|
71
|
+
job_descr_dict["Name"] = f"{key.dag_id} - {key.task_id} - {key.run_id} - {key.try_number}"
|
|
72
|
+
job_descr_dict["Executable"] = (
|
|
73
|
+
"python" # TODO may require module load to be setup for some systems
|
|
74
|
+
)
|
|
75
|
+
job_descr_dict["Arguments"] = [
|
|
76
|
+
"run_task_via_supervisor.py",
|
|
77
|
+
f"--json-string '{workload.model_dump_json()}'",
|
|
78
|
+
]
|
|
79
|
+
job_descr_dict["Environment"] = {
|
|
80
|
+
"AIRFLOW__CORE__EXECUTION_API_SERVER_URL": server,
|
|
81
|
+
"AIRFLOW__CORE__DAGS_FOLDER": "./",
|
|
82
|
+
"AIRFLOW__LOGGING__LOGGING_LEVEL": "DEBUG",
|
|
83
|
+
"AIRFLOW__CORE__EXECUTOR": "LocalExecutor,airflow_unicore_integration.executors.unicore_executor.UnicoreExecutor",
|
|
84
|
+
}
|
|
85
|
+
user_added_pre_commands.append(f"source {python_env}/bin/activate")
|
|
86
|
+
job_descr_dict["User precommand"] = ";".join(user_added_pre_commands)
|
|
87
|
+
job_descr_dict["RunUserPrecommandOnLoginNode"] = (
|
|
88
|
+
"false" # precommand is activating the python env, this can also be done on compute node right before running the job
|
|
89
|
+
)
|
|
90
|
+
job_descr_dict["Imports"] = [worker_script_import]
|
|
91
|
+
# add user defined options to description
|
|
92
|
+
if user_added_env:
|
|
93
|
+
job_descr_dict["Environment"].update(user_added_env)
|
|
94
|
+
if user_added_params:
|
|
95
|
+
job_descr_dict["Parameters"] = user_added_params
|
|
96
|
+
if user_added_project:
|
|
97
|
+
job_descr_dict["Project"] = user_added_project
|
|
98
|
+
if user_added_resources:
|
|
99
|
+
job_descr_dict["Resources"] = user_added_resources
|
|
100
|
+
|
|
101
|
+
return job_descr_dict
|
|
@@ -1,8 +1,39 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: airflow-unicore-integration
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: Running Unicore Jobs from airflow DAGs.
|
|
5
|
+
Author-email: Christian Böttcher <c.boettcher@fz-juelich.de>
|
|
6
|
+
License-Expression: BSD-3-Clause
|
|
7
|
+
Project-URL: Homepage, https://github.com/UNICORE-EU/airflow-unicore-integration
|
|
8
|
+
Project-URL: Issues, https://github.com/UNICORE-EU/airflow-unicore-integration/issues
|
|
9
|
+
Classifier: Development Status :: 4 - Beta
|
|
10
|
+
Classifier: Framework :: Apache Airflow :: Provider
|
|
11
|
+
Classifier: Programming Language :: Python :: 3
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.9
|
|
14
|
+
Description-Content-Type: text/x-rst
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
Requires-Dist: pyunicore>=1.0.0
|
|
17
|
+
Requires-Dist: apache-airflow>=3.0.0
|
|
18
|
+
Dynamic: license-file
|
|
19
|
+
|
|
1
20
|
===========================
|
|
2
21
|
Unicore Airflow Integration
|
|
3
22
|
===========================
|
|
4
23
|
|
|
5
24
|
|
|
25
|
+
|Generic badge|
|
|
26
|
+
|
|
27
|
+
.. |Generic badge| image:: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml/badge.svg
|
|
28
|
+
:target: https://github.com/UNICORE-EU/airflow-unicore-integration/actions/workflows/publish-to-pypi.yml
|
|
29
|
+
|
|
30
|
+
This project integrates `UNICORE <https://github.com/UNICORE-EU>`_ and `Apache Airflow <https://airflow.apache.org/>`_.
|
|
31
|
+
UNICORE is a software suite that, among other functions, provides seamless access to high-performance compute and data resources.
|
|
32
|
+
Airflow is a platform to programmatically author, schedule and monitor workflows.
|
|
33
|
+
|
|
34
|
+
In the current state, this projects provides a set of airflow `operators <https://airflow.apache.org/docs/apache-airflow/stable/core-concepts/operators.html>`_, which can be used as part of airflow workflows to submit jobs to Unicore.
|
|
35
|
+
The UnicoreExecutor only offers experimental support for airflow 3 so far. Further support is currently being worked on.
|
|
36
|
+
|
|
6
37
|
---------------------------
|
|
7
38
|
Using the Unicore Operators
|
|
8
39
|
---------------------------
|
|
@@ -29,7 +60,7 @@ exports List(exports) None
|
|
|
29
60
|
For imports and exports go `here <https://unicore-docs.readthedocs.io/en/latest/user-docs/rest-api/job-description/index.html#importing-files-into-the-job-workspace>`_ for details.
|
|
30
61
|
|
|
31
62
|
|
|
32
|
-
The ``UnicoreGenericOperator`` supports the following additional parameters:
|
|
63
|
+
The ``UnicoreGenericOperator`` supports the following additional parameters:
|
|
33
64
|
|
|
34
65
|
======================= ======================= =========================================== ====================
|
|
35
66
|
parameter name type default description
|
|
@@ -80,7 +111,7 @@ Behaviour on Errors and Success
|
|
|
80
111
|
The Unicore Operators do not do a lot of error and exception handling, and mostly just forward any problems to be handled by airflow.
|
|
81
112
|
All of the Unicore logic is handled by the `pyunicore library <https://github.com/HumanBrainProject/pyunicore>`_.
|
|
82
113
|
|
|
83
|
-
While some validation of the resulting Unicore job description is done automatically, it may still be possible to build an invalid job description with the operators.
|
|
114
|
+
While some validation of the resulting Unicore job description is done automatically, it may still be possible to build an invalid job description with the operators.
|
|
84
115
|
This may lead to a submission failure with Unicore. In this case, an exception is thrown to be handled by airflow.
|
|
85
116
|
|
|
86
117
|
|
|
@@ -110,10 +141,10 @@ There are some example DAGs in this repository under ``project-dir/dags``.
|
|
|
110
141
|
- ``unicore-test-2.py`` has some basic examples for the generic operator.
|
|
111
142
|
- ``unicore-test-3.py`` also includes script-operator examples.
|
|
112
143
|
- ``unicore-test-4.py`` has some examples with more arguments.
|
|
113
|
-
- ``unicore-test-bss.py`` shows how bss submission can be done (very simple example).
|
|
114
|
-
- ``unicore-test-credentials.py`` demonstrates that not only the credentials from the airflow connections backend can be used, but they can also be provided in the constructor of the
|
|
115
|
-
- ``unicore-test-import-export.py`` gives
|
|
116
|
-
|
|
144
|
+
- ``unicore-test-bss.py`` shows how bss submission can be done (very simple example).
|
|
145
|
+
- ``unicore-test-credentials.py`` demonstrates that not only the credentials from the airflow connections backend can be used, but they can also be provided in the constructor of the operator.
|
|
146
|
+
- ``unicore-test-import-export.py`` gives short examples for the imports and exports usage.
|
|
147
|
+
|
|
117
148
|
|
|
118
149
|
-----------------
|
|
119
150
|
Setup testing env
|
|
@@ -135,4 +166,4 @@ The ``run-testing-env.sh`` script supports the commands up, down, start, stop, p
|
|
|
135
166
|
Install package via pip
|
|
136
167
|
-----------------------
|
|
137
168
|
|
|
138
|
-
``pip install airflow-unicore-integration``
|
|
169
|
+
``pip install airflow-unicore-integration``
|
|
@@ -8,7 +8,12 @@ src/airflow_unicore_integration.egg-info/dependency_links.txt
|
|
|
8
8
|
src/airflow_unicore_integration.egg-info/entry_points.txt
|
|
9
9
|
src/airflow_unicore_integration.egg-info/requires.txt
|
|
10
10
|
src/airflow_unicore_integration.egg-info/top_level.txt
|
|
11
|
+
src/airflow_unicore_integration/executors/__init__.py
|
|
12
|
+
src/airflow_unicore_integration/executors/run_task_via_supervisor.py
|
|
13
|
+
src/airflow_unicore_integration/executors/unicore_executor.py
|
|
11
14
|
src/airflow_unicore_integration/hooks/__init__.py
|
|
12
15
|
src/airflow_unicore_integration/hooks/unicore_hooks.py
|
|
13
16
|
src/airflow_unicore_integration/operators/__init__.py
|
|
14
|
-
src/airflow_unicore_integration/operators/unicore_operators.py
|
|
17
|
+
src/airflow_unicore_integration/operators/unicore_operators.py
|
|
18
|
+
src/airflow_unicore_integration/policies/__init__.py
|
|
19
|
+
src/airflow_unicore_integration/util/job.py
|
|
@@ -1,49 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from pyunicore import client, credentials
|
|
4
|
-
|
|
5
|
-
from airflow.hooks.base import BaseHook
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class UnicoreHook(BaseHook):
|
|
9
|
-
"""
|
|
10
|
-
Interact with Unicore.
|
|
11
|
-
|
|
12
|
-
Creates Unicore Clients from airflow connections.
|
|
13
|
-
|
|
14
|
-
:param uc_conn_id: The unicore connection id - default: uc_default
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
conn_name_attr = "uc_conn_id"
|
|
18
|
-
default_conn_name = "uc_default"
|
|
19
|
-
conn_type = "unicore"
|
|
20
|
-
hook_name = "Unicore"
|
|
21
|
-
|
|
22
|
-
def __init__(self, uc_conn_id: str = default_conn_name) -> None:
|
|
23
|
-
super().__init__()
|
|
24
|
-
self.uc_conn_id = uc_conn_id
|
|
25
|
-
self.conn = None
|
|
26
|
-
|
|
27
|
-
def get_conn(self, overwrite_base_url: str | None = None, overwrite_credential : credentials.Credential | None = None) -> client.Client:
|
|
28
|
-
"""Return a Unicore Client. base_url and credentials may be overwritten."""
|
|
29
|
-
if self.conn is None or overwrite_base_url is not None or overwrite_credential is not None: # if not generated, or overwrite attribute is set crete new
|
|
30
|
-
params = self.get_connection(self.uc_conn_id)
|
|
31
|
-
base_url = params.host
|
|
32
|
-
credential = credentials.UsernamePassword(params.login, params.password)
|
|
33
|
-
if overwrite_base_url is not None:
|
|
34
|
-
base_url = overwrite_base_url
|
|
35
|
-
if overwrite_credential is not None:
|
|
36
|
-
credential = overwrite_credential
|
|
37
|
-
self.conn = client.Client(credential, base_url)
|
|
38
|
-
|
|
39
|
-
return self.conn
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def test_connection(self) -> tuple[bool, str]:
|
|
43
|
-
"""Test the connection by sending an access_info request"""
|
|
44
|
-
try:
|
|
45
|
-
conn = self.get_conn()
|
|
46
|
-
conn.access_info()
|
|
47
|
-
return True, "Connection successfully tested"
|
|
48
|
-
except Exception as e:
|
|
49
|
-
return False, str(e)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|